Line data Source code
1 : /* Code for RTL transformations to satisfy insn constraints.
2 : Copyright (C) 2010-2026 Free Software Foundation, Inc.
3 : Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 :
22 : /* This file contains code for 3 passes: constraint pass,
23 : inheritance/split pass, and pass for undoing failed inheritance and
24 : split.
25 :
26 : The major goal of constraint pass is to transform RTL to satisfy
27 : insn and address constraints by:
28 : o choosing insn alternatives;
29 : o generating *reload insns* (or reloads in brief) and *reload
30 : pseudos* which will get necessary hard registers later;
31 : o substituting pseudos with equivalent values and removing the
32 : instructions that initialized those pseudos.
33 :
34 : The constraint pass has biggest and most complicated code in LRA.
35 : There are a lot of important details like:
36 : o reuse of input reload pseudos to simplify reload pseudo
37 : allocations;
38 : o some heuristics to choose insn alternative to improve the
39 : inheritance;
40 : o early clobbers etc.
41 :
42 : The pass is mimicking former reload pass in alternative choosing
43 : because the reload pass is oriented to current machine description
44 : model. It might be changed if the machine description model is
45 : changed.
46 :
47 : There is special code for preventing all LRA and this pass cycling
48 : in case of bugs.
49 :
50 : On the first iteration of the pass we process every instruction and
51 : choose an alternative for each one. On subsequent iterations we try
52 : to avoid reprocessing instructions if we can be sure that the old
53 : choice is still valid.
54 :
55 : The inheritance/spilt pass is to transform code to achieve
56 : ineheritance and live range splitting. It is done on backward
57 : traversal of EBBs.
58 :
59 : The inheritance optimization goal is to reuse values in hard
60 : registers. There is analogous optimization in old reload pass. The
61 : inheritance is achieved by following transformation:
62 :
63 : reload_p1 <- p reload_p1 <- p
64 : ... new_p <- reload_p1
65 : ... => ...
66 : reload_p2 <- p reload_p2 <- new_p
67 :
68 : where p is spilled and not changed between the insns. Reload_p1 is
69 : also called *original pseudo* and new_p is called *inheritance
70 : pseudo*.
71 :
72 : The subsequent assignment pass will try to assign the same (or
73 : another if it is not possible) hard register to new_p as to
74 : reload_p1 or reload_p2.
75 :
76 : If the assignment pass fails to assign a hard register to new_p,
77 : this file will undo the inheritance and restore the original code.
78 : This is because implementing the above sequence with a spilled
79 : new_p would make the code much worse. The inheritance is done in
80 : EBB scope. The above is just a simplified example to get an idea
81 : of the inheritance as the inheritance is also done for non-reload
82 : insns.
83 :
84 : Splitting (transformation) is also done in EBB scope on the same
85 : pass as the inheritance:
86 :
87 : r <- ... or ... <- r r <- ... or ... <- r
88 : ... s <- r (new insn -- save)
89 : ... =>
90 : ... r <- s (new insn -- restore)
91 : ... <- r ... <- r
92 :
93 : The *split pseudo* s is assigned to the hard register of the
94 : original pseudo or hard register r.
95 :
96 : Splitting is done:
97 : o In EBBs with high register pressure for global pseudos (living
98 : in at least 2 BBs) and assigned to hard registers when there
99 : are more one reloads needing the hard registers;
100 : o for pseudos needing save/restore code around calls.
101 :
102 : If the split pseudo still has the same hard register as the
103 : original pseudo after the subsequent assignment pass or the
104 : original pseudo was split, the opposite transformation is done on
105 : the same pass for undoing inheritance. */
106 :
107 : #undef REG_OK_STRICT
108 :
109 : #include "config.h"
110 : #include "system.h"
111 : #include "coretypes.h"
112 : #include "backend.h"
113 : #include "hooks.h"
114 : #include "target.h"
115 : #include "rtl.h"
116 : #include "tree.h"
117 : #include "stmt.h"
118 : #include "predict.h"
119 : #include "df.h"
120 : #include "memmodel.h"
121 : #include "tm_p.h"
122 : #include "expmed.h"
123 : #include "optabs.h"
124 : #include "regs.h"
125 : #include "ira.h"
126 : #include "recog.h"
127 : #include "output.h"
128 : #include "addresses.h"
129 : #include "expr.h"
130 : #include "cfgrtl.h"
131 : #include "rtl-error.h"
132 : #include "lra.h"
133 : #include "lra-int.h"
134 : #include "print-rtl.h"
135 : #include "function-abi.h"
136 : #include "rtl-iter.h"
137 : #include "hash-set.h"
138 :
139 : /* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
140 : insn. Remember that LRA_CURR_RELOAD_NUM is the number of emitted
141 : reload insns. */
142 : static int bb_reload_num;
143 :
144 : /* The current insn being processed and corresponding its single set
145 : (NULL otherwise), its data (basic block, the insn data, the insn
146 : static data, and the mode of each operand). */
147 : static rtx_insn *curr_insn;
148 : static rtx curr_insn_set;
149 : static basic_block curr_bb;
150 : static lra_insn_recog_data_t curr_id;
151 : static struct lra_static_insn_data *curr_static_id;
152 : static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
153 : /* Mode of the register substituted by its equivalence with VOIDmode
154 : (e.g. constant) and whose subreg is given operand of the current
155 : insn. VOIDmode in all other cases. */
156 : static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
157 : /* The first call insn after curr_insn within the EBB during inherit_in_ebb
158 : or NULL outside of that function. */
159 : static rtx_insn *first_call_insn;
160 :
161 :
162 :
163 : /* Start numbers for new registers and insns at the current constraints
164 : pass start. */
165 : static int new_regno_start;
166 : static int new_insn_uid_start;
167 :
168 : /* If LOC is nonnull, strip any outer subreg from it. */
169 : static inline rtx *
170 229040418 : strip_subreg (rtx *loc)
171 : {
172 101324668 : return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
173 : }
174 :
175 : /* Return hard regno of REGNO or if it is was not assigned to a hard
176 : register, use a hard register from its allocno class. */
177 : static int
178 72916 : get_try_hard_regno (int regno)
179 : {
180 72916 : int hard_regno;
181 72916 : enum reg_class rclass;
182 :
183 72916 : if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
184 72916 : hard_regno = lra_get_regno_hard_regno (regno);
185 72916 : if (hard_regno >= 0)
186 : return hard_regno;
187 39782 : rclass = lra_get_allocno_class (regno);
188 39782 : if (rclass == NO_REGS)
189 : return -1;
190 38220 : return ira_class_hard_regs[rclass][0];
191 : }
192 :
193 : /* Return the hard regno of X after removing its subreg. If X is not a
194 : register or a subreg of a register, return -1. If X is a pseudo, use its
195 : assignment. If X is a hard regno, return the final hard regno which will be
196 : after elimination. */
197 : static int
198 289490178 : get_hard_regno (rtx x)
199 : {
200 289490178 : rtx reg;
201 289490178 : int hard_regno;
202 :
203 289490178 : reg = x;
204 289490178 : if (SUBREG_P (x))
205 5184084 : reg = SUBREG_REG (x);
206 289490178 : if (! REG_P (reg))
207 : return -1;
208 200069608 : int regno = REGNO (reg);
209 200069608 : if (HARD_REGISTER_NUM_P (regno))
210 34930959 : hard_regno = lra_get_elimination_hard_regno (regno);
211 : else
212 165138649 : hard_regno = lra_get_regno_hard_regno (regno);
213 200069608 : if (hard_regno < 0)
214 : return -1;
215 182196005 : if (SUBREG_P (x))
216 4478294 : hard_regno += subreg_regno_offset (hard_regno, GET_MODE (reg),
217 4478294 : SUBREG_BYTE (x), GET_MODE (x));
218 : return hard_regno;
219 : }
220 :
221 : /* If REGNO is a hard register or has been allocated a hard register,
222 : return the class of that register. If REGNO is a reload pseudo
223 : created by the current constraints pass, return its allocno class.
224 : Return NO_REGS otherwise. */
225 : static enum reg_class
226 515730943 : get_reg_class (int regno)
227 : {
228 515730943 : int hard_regno;
229 :
230 515730943 : if (HARD_REGISTER_NUM_P (regno))
231 65007305 : hard_regno = lra_get_elimination_hard_regno (regno);
232 : else
233 450723638 : hard_regno = lra_get_regno_hard_regno (regno);
234 515730943 : if (hard_regno >= 0)
235 322579027 : return REGNO_REG_CLASS (hard_regno);
236 193151916 : if (regno >= new_regno_start)
237 62237895 : return lra_get_allocno_class (regno);
238 : return NO_REGS;
239 : }
240 :
241 : /* Return true if REG_CLASS has enough allocatable hard regs to keep value of
242 : REG_MODE. */
243 : static bool
244 18714963 : enough_allocatable_hard_regs_p (enum reg_class reg_class,
245 : enum machine_mode reg_mode)
246 : {
247 18714963 : int i, j, hard_regno, class_size, nregs;
248 :
249 37429926 : if (hard_reg_set_subset_p (reg_class_contents[reg_class], lra_no_alloc_regs))
250 : return false;
251 6351309 : class_size = ira_class_hard_regs_num[reg_class];
252 6351309 : for (i = 0; i < class_size; i++)
253 : {
254 6351309 : hard_regno = ira_class_hard_regs[reg_class][i];
255 6351309 : nregs = hard_regno_nregs (hard_regno, reg_mode);
256 6351309 : if (nregs == 1)
257 : return true;
258 255396 : for (j = 0; j < nregs; j++)
259 170264 : if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
260 170264 : || ! TEST_HARD_REG_BIT (reg_class_contents[reg_class],
261 : hard_regno + j))
262 : break;
263 85132 : if (j >= nregs)
264 : return true;
265 : }
266 : return false;
267 : }
268 :
269 : /* True if C is a non-empty register class that has too few registers
270 : to be safely used as a reload target class. */
271 : #define SMALL_REGISTER_CLASS_P(C) \
272 : (ira_class_hard_regs_num [(C)] == 1 \
273 : || (ira_class_hard_regs_num [(C)] >= 1 \
274 : && targetm.class_likely_spilled_p (C)))
275 :
276 : /* Return true if REG satisfies (or will satisfy) reg class constraint
277 : CL. Use elimination first if REG is a hard register. If REG is a
278 : reload pseudo created by this constraints pass, assume that it will
279 : be allocated a hard register from its allocno class, but allow that
280 : class to be narrowed to CL if it is currently a superset of CL and
281 : if either:
282 :
283 : - ALLOW_ALL_RELOAD_CLASS_CHANGES_P is true or
284 : - the instruction we're processing is not a reload move.
285 :
286 : If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
287 : REGNO (reg), or NO_REGS if no change in its class was needed. */
288 : static bool
289 219537375 : in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class,
290 : bool allow_all_reload_class_changes_p = false)
291 : {
292 219537375 : enum reg_class rclass, common_class;
293 219537375 : machine_mode reg_mode;
294 219537375 : rtx src;
295 219537375 : int regno = REGNO (reg);
296 :
297 219537375 : if (new_class != NULL)
298 112814945 : *new_class = NO_REGS;
299 219537375 : if (regno < FIRST_PSEUDO_REGISTER)
300 : {
301 27579220 : rtx final_reg = reg;
302 27579220 : rtx *final_loc = &final_reg;
303 :
304 27579220 : lra_eliminate_reg_if_possible (final_loc);
305 27579220 : return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
306 : }
307 191958155 : reg_mode = GET_MODE (reg);
308 191958155 : rclass = get_reg_class (regno);
309 191958155 : src = curr_insn_set != NULL ? SET_SRC (curr_insn_set) : NULL;
310 191958155 : if (regno < new_regno_start
311 : /* Do not allow the constraints for reload instructions to
312 : influence the classes of new pseudos. These reloads are
313 : typically moves that have many alternatives, and restricting
314 : reload pseudos for one alternative may lead to situations
315 : where other reload pseudos are no longer allocatable. */
316 191958155 : || (!allow_all_reload_class_changes_p
317 15088330 : && INSN_UID (curr_insn) >= new_insn_uid_start
318 14556370 : && src != NULL
319 14556370 : && ((REG_P (src) || MEM_P (src))
320 1406381 : || (GET_CODE (src) == SUBREG
321 638727 : && (REG_P (SUBREG_REG (src)) || MEM_P (SUBREG_REG (src)))))))
322 : /* When we don't know what class will be used finally for reload
323 : pseudos, we use ALL_REGS. */
324 13788716 : return ((regno >= new_regno_start && rclass == ALL_REGS)
325 187029508 : || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
326 201734102 : && ! hard_reg_set_subset_p (reg_class_contents[cl],
327 : lra_no_alloc_regs)));
328 : else
329 : {
330 18714963 : common_class = ira_reg_class_subset[rclass][cl];
331 18714963 : if (new_class != NULL)
332 5256353 : *new_class = common_class;
333 18714963 : return (enough_allocatable_hard_regs_p (common_class, reg_mode)
334 : /* Do not permit reload insn operand matching (new_class == NULL
335 : case) if the new class is too small. */
336 18714963 : && (new_class != NULL || common_class == rclass
337 1010906 : || !SMALL_REGISTER_CLASS_P (common_class)));
338 : }
339 : }
340 :
341 : /* Return true if REGNO satisfies a memory constraint. */
342 : static bool
343 63947268 : in_mem_p (int regno)
344 : {
345 0 : return get_reg_class (regno) == NO_REGS;
346 : }
347 :
348 : /* Return true if ADDR is a valid memory address for mode MODE in address
349 : space AS, and check that each pseudo has the proper kind of hard
350 : reg. */
351 : static bool
352 35683137 : valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
353 : rtx addr, addr_space_t as)
354 : {
355 : #ifdef GO_IF_LEGITIMATE_ADDRESS
356 : lra_assert (ADDR_SPACE_GENERIC_P (as));
357 : GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
358 : return false;
359 :
360 : win:
361 : return true;
362 : #else
363 0 : return targetm.addr_space.legitimate_address_p (mode, addr, 0, as,
364 35683137 : ERROR_MARK);
365 : #endif
366 : }
367 :
368 : namespace {
369 : /* Temporarily eliminates registers in an address (for the lifetime of
370 : the object). */
371 : class address_eliminator {
372 : public:
373 : address_eliminator (struct address_info *ad);
374 : ~address_eliminator ();
375 :
376 : private:
377 : struct address_info *m_ad;
378 : rtx *m_base_loc;
379 : rtx m_base_reg;
380 : rtx *m_index_loc;
381 : rtx m_index_reg;
382 : };
383 : }
384 :
385 75028746 : address_eliminator::address_eliminator (struct address_info *ad)
386 75028746 : : m_ad (ad),
387 75028746 : m_base_loc (strip_subreg (ad->base_term)),
388 75028746 : m_base_reg (NULL_RTX),
389 75028746 : m_index_loc (strip_subreg (ad->index_term)),
390 75028746 : m_index_reg (NULL_RTX)
391 : {
392 75028746 : if (m_base_loc != NULL)
393 : {
394 62457554 : m_base_reg = *m_base_loc;
395 : /* If we have non-legitimate address which is decomposed not in
396 : the way we expected, don't do elimination here. In such case
397 : the address will be reloaded and elimination will be done in
398 : reload insn finally. */
399 62457554 : if (REG_P (m_base_reg))
400 62457554 : lra_eliminate_reg_if_possible (m_base_loc);
401 62457554 : if (m_ad->base_term2 != NULL)
402 0 : *m_ad->base_term2 = *m_ad->base_term;
403 : }
404 75028746 : if (m_index_loc != NULL)
405 : {
406 3720478 : m_index_reg = *m_index_loc;
407 3720478 : if (REG_P (m_index_reg))
408 3720478 : lra_eliminate_reg_if_possible (m_index_loc);
409 : }
410 75028746 : }
411 :
412 75028746 : address_eliminator::~address_eliminator ()
413 : {
414 75028746 : if (m_base_loc && *m_base_loc != m_base_reg)
415 : {
416 44025109 : *m_base_loc = m_base_reg;
417 44025109 : if (m_ad->base_term2 != NULL)
418 0 : *m_ad->base_term2 = *m_ad->base_term;
419 : }
420 75028746 : if (m_index_loc && *m_index_loc != m_index_reg)
421 0 : *m_index_loc = m_index_reg;
422 75028746 : }
423 :
424 : /* Return true if the eliminated form of AD is a legitimate target address.
425 : If OP is a MEM, AD is the address within OP, otherwise OP should be
426 : ignored. CONSTRAINT is one constraint that the operand may need
427 : to meet. */
428 : static bool
429 35660685 : valid_address_p (rtx op, struct address_info *ad,
430 : enum constraint_num constraint)
431 : {
432 35660685 : address_eliminator eliminator (ad);
433 :
434 : /* Allow a memory OP if it matches CONSTRAINT, even if CONSTRAINT is more
435 : forgiving than "m".
436 : Need to extract memory from op for special memory constraint,
437 : i.e. bcst_mem_operand in i386 backend. */
438 35660685 : if (MEM_P (extract_mem_from_operand (op))
439 : && insn_extra_relaxed_memory_constraint (constraint)
440 : && constraint_satisfied_p (op, constraint))
441 : return true;
442 :
443 35660685 : return valid_address_p (ad->mode, *ad->outer, ad->as);
444 35660685 : }
445 :
446 : /* For special_memory_operand, it could be false for MEM_P (op),
447 : i.e. bcst_mem_operand in i386 backend.
448 : Extract and return real memory operand or op. */
449 : rtx
450 624779470 : extract_mem_from_operand (rtx op)
451 : {
452 626451456 : for (rtx x = op;; x = XEXP (x, 0))
453 : {
454 626451456 : if (MEM_P (x))
455 : return x;
456 445120704 : if (GET_RTX_LENGTH (GET_CODE (x)) != 1
457 364920251 : || GET_RTX_FORMAT (GET_CODE (x))[0] != 'e')
458 : break;
459 : }
460 : return op;
461 : }
462 :
463 : /* Return true if the eliminated form of memory reference OP satisfies
464 : extra (special) memory constraint CONSTRAINT. */
465 : static bool
466 36936881 : satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
467 : {
468 36936881 : struct address_info ad;
469 36936881 : rtx mem = extract_mem_from_operand (op);
470 36936881 : if (!MEM_P (mem))
471 : return false;
472 :
473 35930546 : decompose_mem_address (&ad, mem);
474 35930546 : address_eliminator eliminator (&ad);
475 35930546 : return constraint_satisfied_p (op, constraint);
476 35930546 : }
477 :
478 : /* Return true if the eliminated form of address AD satisfies extra
479 : address constraint CONSTRAINT. */
480 : static bool
481 3437515 : satisfies_address_constraint_p (struct address_info *ad,
482 : enum constraint_num constraint)
483 : {
484 3437515 : address_eliminator eliminator (ad);
485 3437515 : return constraint_satisfied_p (*ad->outer, constraint);
486 3437515 : }
487 :
488 : /* Return true if the eliminated form of address OP satisfies extra
489 : address constraint CONSTRAINT. */
490 : static bool
491 1686750 : satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
492 : {
493 1686750 : struct address_info ad;
494 :
495 1686750 : decompose_lea_address (&ad, &op);
496 1686750 : return satisfies_address_constraint_p (&ad, constraint);
497 : }
498 :
499 : /* Set of equivalences whose original targets have set up pointer flag. */
500 : static hash_set <rtx> *pointer_equiv_set;
501 :
502 : /* Add x to pointer_equiv_set. */
503 : void
504 1895736 : lra_pointer_equiv_set_add (rtx x)
505 : {
506 1895736 : pointer_equiv_set->add (x);
507 1895736 : }
508 :
509 : /* Return true if x is in pointer_equiv_set. */
510 : bool
511 9747531 : lra_pointer_equiv_set_in (rtx x)
512 : {
513 9747531 : return pointer_equiv_set->contains (x);
514 : }
515 :
516 : /* Initiate equivalences for LRA. As we keep original equivalences
517 : before any elimination, we need to make copies otherwise any change
518 : in insns might change the equivalences. */
519 : void
520 1480947 : lra_init_equiv (void)
521 : {
522 1480947 : ira_expand_reg_equiv ();
523 69860704 : for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
524 : {
525 68379757 : rtx res;
526 :
527 68379757 : if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
528 3048162 : ira_reg_equiv[i].memory = copy_rtx (res);
529 68379757 : if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
530 872087 : ira_reg_equiv[i].invariant = copy_rtx (res);
531 : }
532 1480947 : pointer_equiv_set = new hash_set <rtx>;
533 1480947 : }
534 :
535 : /* Finish equivalence data for LRA. */
536 : void
537 1480947 : lra_finish_equiv (void)
538 : {
539 2961894 : delete pointer_equiv_set;
540 1480947 : }
541 :
542 : static rtx loc_equivalence_callback (rtx, const_rtx, void *);
543 :
544 : /* Update equivalence for REGNO. We need to this as the equivalence
545 : might contain other pseudos which are changed by their
546 : equivalences. */
547 : static void
548 203054431 : update_equiv (int regno)
549 : {
550 203054431 : rtx x;
551 :
552 203054431 : if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
553 9270449 : ira_reg_equiv[regno].memory
554 9270449 : = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
555 : NULL_RTX);
556 203054431 : if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
557 2708901 : ira_reg_equiv[regno].invariant
558 2708901 : = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
559 : NULL_RTX);
560 203054431 : }
561 :
562 : /* If we have decided to substitute X with another value, return that
563 : value, otherwise return X. */
564 : static rtx
565 437542960 : get_equiv (rtx x)
566 : {
567 437542960 : int regno;
568 437542960 : rtx res;
569 :
570 296133507 : if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
571 196837798 : || regno >= ira_reg_equiv_len
572 196837798 : || ! ira_reg_equiv[regno].defined_p
573 25634393 : || ! ira_reg_equiv[regno].profitable_p
574 463123423 : || lra_get_regno_hard_regno (regno) >= 0)
575 432636123 : return x;
576 4906837 : if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
577 : {
578 2197645 : if (targetm.cannot_substitute_mem_equiv_p (res))
579 : return x;
580 : return res;
581 : }
582 2709192 : if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
583 : return res;
584 1861920 : if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
585 : return res;
586 0 : gcc_unreachable ();
587 : }
588 :
589 : /* If we have decided to substitute X with the equivalent value, return that
590 : value after elimination for INSN, otherwise return X. Add the result to
591 : pointer_equiv_set if X has set up pointer flag. */
592 : static rtx
593 246095355 : get_equiv_with_elimination (rtx x, rtx_insn *insn)
594 : {
595 246095355 : rtx res = get_equiv (x);
596 :
597 246095355 : if (x == res || CONSTANT_P (res))
598 : return res;
599 1489760 : res = lra_eliminate_regs_1 (insn, res, GET_MODE (res),
600 : false, false, 0, true);
601 1489760 : if (REG_POINTER (x))
602 1022317 : lra_pointer_equiv_set_add (res);
603 : return res;
604 : }
605 :
606 : /* Set up curr_operand_mode. */
607 : static void
608 106240335 : init_curr_operand_mode (void)
609 : {
610 106240335 : int nop = curr_static_id->n_operands;
611 331093498 : for (int i = 0; i < nop; i++)
612 : {
613 224853163 : machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
614 224853163 : if (mode == VOIDmode)
615 : {
616 : /* The .md mode for address operands is the mode of the
617 : addressed value rather than the mode of the address itself. */
618 43502742 : if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
619 95 : mode = Pmode;
620 : else
621 43502647 : mode = curr_static_id->operand[i].mode;
622 : }
623 224853163 : curr_operand_mode[i] = mode;
624 : }
625 106240335 : }
626 :
627 :
628 :
629 : /* The page contains code to reuse input reloads. */
630 :
631 : /* Structure describes input reload of the current insns. */
632 : struct input_reload
633 : {
634 : /* True for input reload of matched operands. */
635 : bool match_p;
636 : /* True for input reload of inout earlyclobber operand. */
637 : bool early_clobber_p;
638 : /* Reloaded value. */
639 : rtx input;
640 : /* Reload pseudo used. */
641 : rtx reg;
642 : };
643 :
644 : /* The number of elements in the following array. */
645 : static int curr_insn_input_reloads_num;
646 : /* Array containing info about input reloads. It is used to find the
647 : same input reload and reuse the reload pseudo in this case. */
648 : static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
649 :
650 : /* Initiate data concerning reuse of input reloads for the current
651 : insn. */
652 : static void
653 106240335 : init_curr_insn_input_reloads (void)
654 : {
655 106240335 : curr_insn_input_reloads_num = 0;
656 0 : }
657 :
658 : /* The canonical form of an rtx inside a MEM is not necessarily the same as the
659 : canonical form of the rtx outside the MEM. Fix this up in the case that
660 : we're reloading an address (and therefore pulling it outside a MEM). */
661 : static rtx
662 72 : canonicalize_reload_addr (rtx addr)
663 : {
664 72 : subrtx_var_iterator::array_type array;
665 246 : FOR_EACH_SUBRTX_VAR (iter, array, addr, NONCONST)
666 : {
667 174 : rtx x = *iter;
668 174 : if (GET_CODE (x) == MULT && CONST_INT_P (XEXP (x, 1)))
669 : {
670 14 : const HOST_WIDE_INT ci = INTVAL (XEXP (x, 1));
671 188 : const int pwr2 = exact_log2 (ci);
672 14 : if (pwr2 > 0)
673 : {
674 : /* Rewrite this to use a shift instead, which is canonical when
675 : outside of a MEM. */
676 14 : PUT_CODE (x, ASHIFT);
677 14 : XEXP (x, 1) = GEN_INT (pwr2);
678 : }
679 : }
680 : }
681 :
682 72 : return addr;
683 72 : }
684 :
685 : /* Return rtx accessing reload REG of RCLASS matching another reload reg in
686 : MODE. */
687 : static rtx
688 123896 : get_matching_reload_reg_subreg (machine_mode mode, rtx reg,
689 : enum reg_class rclass)
690 : {
691 123896 : int hard_regno = ira_class_hard_regs[rclass][0];
692 123896 : if (subreg_regno_offset (hard_regno,
693 123896 : GET_MODE (reg),
694 123896 : subreg_lowpart_offset (mode, GET_MODE (reg)),
695 : mode) == 0)
696 : /* For matching scalar int modes generate the right subreg byte offset for
697 : BE targets -- see call of reload.cc:operands_match_p in
698 : recog.cc:constrain_operands. */
699 123896 : return lowpart_subreg (mode, reg, GET_MODE (reg));
700 0 : int offset = (lra_constraint_offset (hard_regno, GET_MODE (reg))
701 0 : - lra_constraint_offset (hard_regno, mode)) * UNITS_PER_WORD;
702 0 : lra_assert (offset >= 0);
703 0 : return gen_rtx_SUBREG (mode, reg, offset);
704 : }
705 :
706 : /* Create a new pseudo using MODE, RCLASS, EXCLUDE_START_HARD_REGS, ORIGINAL or
707 : reuse an existing reload pseudo. Don't reuse an existing reload pseudo if
708 : IN_SUBREG_P is true and the reused pseudo should be wrapped up in a SUBREG.
709 : EARLY_CLOBBER_P is true for input reload of inout early clobber operand.
710 : The result pseudo is returned through RESULT_REG. Return TRUE if we created
711 : a new pseudo, FALSE if we reused an existing reload pseudo. Use TITLE to
712 : describe new registers for debug purposes. */
713 : static bool
714 3800862 : get_reload_reg (enum op_type type, machine_mode mode, rtx original,
715 : enum reg_class rclass, HARD_REG_SET *exclude_start_hard_regs,
716 : bool in_subreg_p, bool early_clobber_p,
717 : const char *title, rtx *result_reg)
718 : {
719 3800862 : int i, regno;
720 3800862 : enum reg_class new_class;
721 :
722 3800862 : if (type == OP_OUT)
723 : {
724 : /* Output reload registers tend to start out with a conservative
725 : choice of register class. Usually this is ALL_REGS, although
726 : a target might narrow it (for performance reasons) through
727 : targetm.preferred_reload_class. It's therefore quite common
728 : for a reload instruction to require a more restrictive class
729 : than the class that was originally assigned to the reload register.
730 :
731 : In these situations, it's more efficient to refine the choice
732 : of register class rather than create a second reload register.
733 : This also helps to avoid cycling for registers that are only
734 : used by reload instructions. */
735 964142 : if (REG_P (original)
736 708628 : && (int) REGNO (original) >= new_regno_start
737 7153 : && (INSN_UID (curr_insn) >= new_insn_uid_start
738 250 : || ira_former_scratch_p (REGNO (original)))
739 7153 : && in_class_p (original, rclass, &new_class, true)
740 964392 : && (exclude_start_hard_regs == nullptr
741 250 : || hard_reg_set_intersect_p (
742 964392 : ~lra_reg_info[REGNO (original)].exclude_start_hard_regs,
743 250 : ~*exclude_start_hard_regs)))
744 : {
745 250 : unsigned int regno = REGNO (original);
746 250 : if (lra_dump_file != NULL)
747 : {
748 0 : fprintf (lra_dump_file, " Reuse r%d for output ", regno);
749 0 : dump_value_slim (lra_dump_file, original, 1);
750 : }
751 500 : if (new_class != lra_get_allocno_class (regno))
752 250 : lra_change_class (regno, new_class, ", change to", false);
753 250 : if (lra_dump_file != NULL)
754 0 : fprintf (lra_dump_file, "\n");
755 250 : if (exclude_start_hard_regs)
756 250 : lra_reg_info[regno].exclude_start_hard_regs
757 250 : |= *exclude_start_hard_regs;
758 250 : *result_reg = original;
759 250 : return false;
760 : }
761 963892 : *result_reg
762 963892 : = lra_create_new_reg_with_unique_value (mode, original, rclass,
763 : exclude_start_hard_regs, title);
764 963892 : return true;
765 : }
766 :
767 2836720 : bool unique_p = early_clobber_p;
768 : /* Prevent reuse value of expression with side effects,
769 : e.g. volatile memory. */
770 2836720 : if (! side_effects_p (original))
771 3056290 : for (i = 0; i < curr_insn_input_reloads_num; i++)
772 : {
773 238137 : if (! curr_insn_input_reloads[i].match_p
774 99430 : && ! curr_insn_input_reloads[i].early_clobber_p
775 99429 : && rtx_equal_p (curr_insn_input_reloads[i].input, original)
776 246676 : && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
777 : {
778 8526 : rtx reg = curr_insn_input_reloads[i].reg;
779 8526 : regno = REGNO (reg);
780 : /* If input is equal to original and both are VOIDmode,
781 : GET_MODE (reg) might be still different from mode.
782 : Ensure we don't return *result_reg with wrong mode. */
783 8526 : if (GET_MODE (reg) != mode)
784 : {
785 0 : if (in_subreg_p)
786 0 : continue;
787 0 : if (maybe_lt (GET_MODE_SIZE (GET_MODE (reg)),
788 0 : GET_MODE_SIZE (mode)))
789 0 : continue;
790 0 : reg = get_matching_reload_reg_subreg (mode, reg, new_class);
791 0 : if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
792 0 : continue;
793 : }
794 : /* If the existing reload and this have no start hard register in
795 : common, then skip. Otherwise update exclude_start_hard_regs. */
796 8526 : if (exclude_start_hard_regs
797 9787 : && ! hard_reg_set_empty_p (*exclude_start_hard_regs))
798 : {
799 1 : HARD_REG_SET r = lra_reg_info[regno].exclude_start_hard_regs
800 1 : | *exclude_start_hard_regs;
801 2 : if (hard_reg_set_empty_p (~r))
802 0 : continue;
803 : else
804 1 : lra_reg_info[regno].exclude_start_hard_regs = r;
805 : }
806 8526 : *result_reg = reg;
807 8526 : if (lra_dump_file != NULL)
808 : {
809 0 : fprintf (lra_dump_file, " Reuse r%d for reload ", regno);
810 0 : dump_value_slim (lra_dump_file, original, 1);
811 : }
812 17052 : if (new_class != lra_get_allocno_class (regno))
813 4407 : lra_change_class (regno, new_class, ", change to", false);
814 8526 : if (lra_dump_file != NULL)
815 0 : fprintf (lra_dump_file, "\n");
816 8526 : return false;
817 : }
818 : /* If we have an input reload with a different mode, make sure it
819 : will get a different hard reg. */
820 229611 : else if (REG_P (original)
821 180508 : && REG_P (curr_insn_input_reloads[i].input)
822 147460 : && REGNO (original) == REGNO (curr_insn_input_reloads[i].input)
823 229611 : && (GET_MODE (original)
824 2139 : != GET_MODE (curr_insn_input_reloads[i].input)))
825 : unique_p = true;
826 : }
827 5656388 : *result_reg = (unique_p
828 2828194 : ? lra_create_new_reg_with_unique_value
829 2828194 : : lra_create_new_reg) (mode, original, rclass,
830 : exclude_start_hard_regs, title);
831 2828194 : lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
832 2828194 : curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
833 2828194 : curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = false;
834 2828194 : curr_insn_input_reloads[curr_insn_input_reloads_num].early_clobber_p
835 2828194 : = early_clobber_p;
836 2828194 : curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
837 2828194 : return true;
838 : }
839 :
840 :
841 : /* The page contains major code to choose the current insn alternative
842 : and generate reloads for it. */
843 :
844 : /* Return the offset from REGNO of the least significant register
845 : in (reg:MODE REGNO).
846 :
847 : This function is used to tell whether two registers satisfy
848 : a matching constraint. (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
849 :
850 : REGNO1 + lra_constraint_offset (REGNO1, MODE1)
851 : == REGNO2 + lra_constraint_offset (REGNO2, MODE2) */
852 : int
853 43635312 : lra_constraint_offset (int regno, machine_mode mode)
854 : {
855 43635312 : lra_assert (regno < FIRST_PSEUDO_REGISTER);
856 :
857 43635312 : scalar_int_mode int_mode;
858 43635312 : if (WORDS_BIG_ENDIAN
859 : && is_a <scalar_int_mode> (mode, &int_mode)
860 : && GET_MODE_SIZE (int_mode) > UNITS_PER_WORD)
861 : return hard_regno_nregs (regno, mode) - 1;
862 43635312 : return 0;
863 : }
864 :
865 : /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
866 : if they are the same hard reg, and has special hacks for
867 : auto-increment and auto-decrement. This is specifically intended for
868 : process_alt_operands to use in determining whether two operands
869 : match. X is the operand whose number is the lower of the two.
870 :
871 : It is supposed that X is the output operand and Y is the input
872 : operand. Y_HARD_REGNO is the final hard regno of register Y or
873 : register in subreg Y as we know it now. Otherwise, it is a
874 : negative value. */
875 : static bool
876 57873543 : operands_match_p (rtx x, rtx y, int y_hard_regno)
877 : {
878 57873543 : int i;
879 57873543 : RTX_CODE code = GET_CODE (x);
880 57873543 : const char *fmt;
881 :
882 57873543 : if (x == y)
883 : return true;
884 50237033 : if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
885 23805110 : && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
886 : {
887 23710421 : int j;
888 :
889 23710421 : i = get_hard_regno (x);
890 23710421 : if (i < 0)
891 1211030 : goto slow;
892 :
893 22499391 : if ((j = y_hard_regno) < 0)
894 681735 : goto slow;
895 :
896 21817656 : i += lra_constraint_offset (i, GET_MODE (x));
897 21817656 : j += lra_constraint_offset (j, GET_MODE (y));
898 :
899 21817656 : return i == j;
900 : }
901 :
902 : /* If two operands must match, because they are really a single
903 : operand of an assembler insn, then two post-increments are invalid
904 : because the assembler insn would increment only once. On the
905 : other hand, a post-increment matches ordinary indexing if the
906 : post-increment is the output operand. */
907 26526612 : if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
908 0 : return operands_match_p (XEXP (x, 0), y, y_hard_regno);
909 :
910 : /* Two pre-increments are invalid because the assembler insn would
911 : increment only once. On the other hand, a pre-increment matches
912 : ordinary indexing if the pre-increment is the input operand. */
913 26526612 : if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
914 26526612 : || GET_CODE (y) == PRE_MODIFY)
915 0 : return operands_match_p (x, XEXP (y, 0), -1);
916 :
917 26526612 : slow:
918 :
919 28419377 : if (code == REG && REG_P (y))
920 1795565 : return REGNO (x) == REGNO (y);
921 :
922 94693 : if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
923 7582 : && x == SUBREG_REG (y))
924 : return true;
925 26623812 : if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
926 64693 : && SUBREG_REG (x) == y)
927 : return true;
928 :
929 : /* Now we have disposed of all the cases in which different rtx
930 : codes can match. */
931 26623642 : if (code != GET_CODE (y))
932 : return false;
933 :
934 : /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
935 1042531 : if (GET_MODE (x) != GET_MODE (y))
936 : return false;
937 :
938 1041822 : switch (code)
939 : {
940 : CASE_CONST_UNIQUE:
941 : return false;
942 :
943 : case CONST_VECTOR:
944 : if (!same_vector_encodings_p (x, y))
945 : return false;
946 : break;
947 :
948 0 : case LABEL_REF:
949 0 : return label_ref_label (x) == label_ref_label (y);
950 25 : case SYMBOL_REF:
951 25 : return XSTR (x, 0) == XSTR (y, 0);
952 :
953 : default:
954 : break;
955 : }
956 :
957 : /* Compare the elements. If any pair of corresponding elements fail
958 : to match, return false for the whole things. */
959 :
960 1021410 : fmt = GET_RTX_FORMAT (code);
961 2959910 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
962 : {
963 2011381 : int val, j;
964 2011381 : switch (fmt[i])
965 : {
966 0 : case 'w':
967 0 : if (XWINT (x, i) != XWINT (y, i))
968 : return false;
969 : break;
970 :
971 486 : case 'i':
972 486 : if (XINT (x, i) != XINT (y, i))
973 : return false;
974 : break;
975 :
976 0 : case 'L':
977 0 : if (XLOC (x, i) != XLOC (y, i))
978 : return false;
979 : break;
980 :
981 24837 : case 'p':
982 24837 : if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
983 : return false;
984 : break;
985 :
986 1463343 : case 'e':
987 1463343 : val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
988 1463343 : if (val == 0)
989 : return false;
990 : break;
991 :
992 : case '0':
993 : break;
994 :
995 486 : case 'E':
996 486 : if (XVECLEN (x, i) != XVECLEN (y, i))
997 : return false;
998 972 : for (j = XVECLEN (x, i) - 1; j >= 0; --j)
999 : {
1000 486 : val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
1001 486 : if (val == 0)
1002 : return false;
1003 : }
1004 : break;
1005 :
1006 : /* It is believed that rtx's at this level will never
1007 : contain anything but integers and other rtx's, except for
1008 : within LABEL_REFs and SYMBOL_REFs. */
1009 0 : default:
1010 0 : gcc_unreachable ();
1011 : }
1012 : }
1013 : return true;
1014 : }
1015 :
1016 : /* True if X is a constant that can be forced into the constant pool.
1017 : MODE is the mode of the operand, or VOIDmode if not known. */
1018 : #define CONST_POOL_OK_P(MODE, X) \
1019 : ((MODE) != VOIDmode \
1020 : && CONSTANT_P (X) \
1021 : && GET_CODE (X) != HIGH \
1022 : && GET_MODE_SIZE (MODE).is_constant () \
1023 : && !targetm.cannot_force_const_mem (MODE, X))
1024 :
1025 : /* If REG is a reload pseudo, try to make its class satisfying CL. */
1026 : static void
1027 3406362 : narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
1028 : {
1029 3406362 : enum reg_class rclass;
1030 :
1031 : /* Do not make more accurate class from reloads generated. They are
1032 : mostly moves with a lot of constraints. Making more accurate
1033 : class may results in very narrow class and impossibility of find
1034 : registers for several reloads of one insn. */
1035 3406362 : if (INSN_UID (curr_insn) >= new_insn_uid_start)
1036 3406330 : return;
1037 3406250 : if (GET_CODE (reg) == SUBREG)
1038 167390 : reg = SUBREG_REG (reg);
1039 3406250 : if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
1040 : return;
1041 32 : if (in_class_p (reg, cl, &rclass) && rclass != cl)
1042 13 : lra_change_class (REGNO (reg), rclass, " Change to", true);
1043 : }
1044 :
1045 : /* Searches X for any reference to a reg with the same value as REGNO,
1046 : returning the rtx of the reference found if any. Otherwise,
1047 : returns NULL_RTX. */
1048 : static rtx
1049 528112 : regno_val_use_in (unsigned int regno, rtx x)
1050 : {
1051 528112 : const char *fmt;
1052 528112 : int i, j;
1053 528112 : rtx tem;
1054 :
1055 528112 : if (REG_P (x) && lra_reg_info[REGNO (x)].val == lra_reg_info[regno].val)
1056 : return x;
1057 :
1058 527786 : fmt = GET_RTX_FORMAT (GET_CODE (x));
1059 1061931 : for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1060 : {
1061 534145 : if (fmt[i] == 'e')
1062 : {
1063 7632 : if ((tem = regno_val_use_in (regno, XEXP (x, i))))
1064 : return tem;
1065 : }
1066 526513 : else if (fmt[i] == 'E')
1067 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1068 0 : if ((tem = regno_val_use_in (regno , XVECEXP (x, i, j))))
1069 : return tem;
1070 : }
1071 :
1072 : return NULL_RTX;
1073 : }
1074 :
1075 : /* Return true if all current insn non-output operands except INS (it
1076 : has a negaitve end marker) do not use pseudos with the same value
1077 : as REGNO. */
1078 : static bool
1079 2 : check_conflict_input_operands (int regno, signed char *ins)
1080 : {
1081 2 : int in;
1082 2 : int n_operands = curr_static_id->n_operands;
1083 :
1084 8 : for (int nop = 0; nop < n_operands; nop++)
1085 7 : if (! curr_static_id->operand[nop].is_operator
1086 7 : && curr_static_id->operand[nop].type != OP_OUT)
1087 : {
1088 5 : for (int i = 0; (in = ins[i]) >= 0; i++)
1089 4 : if (in == nop)
1090 : break;
1091 3 : if (in < 0
1092 3 : && regno_val_use_in (regno, *curr_id->operand_loc[nop]) != NULL_RTX)
1093 : return false;
1094 : }
1095 : return true;
1096 : }
1097 :
1098 : /* Generate reloads for matching OUT and INS (array of input operand numbers
1099 : with end marker -1) with reg class GOAL_CLASS and EXCLUDE_START_HARD_REGS,
1100 : considering output operands OUTS (similar array to INS) needing to be in
1101 : different registers. Add input and output reloads correspondingly to the
1102 : lists *BEFORE and *AFTER. OUT might be negative. In this case we generate
1103 : input reloads for matched input operands INS. EARLY_CLOBBER_P is a flag
1104 : that the output operand is early clobbered for chosen alternative. */
1105 : static void
1106 1703181 : match_reload (signed char out, signed char *ins, signed char *outs,
1107 : enum reg_class goal_class, HARD_REG_SET *exclude_start_hard_regs,
1108 : rtx_insn **before, rtx_insn **after, bool early_clobber_p)
1109 : {
1110 1703181 : bool out_conflict;
1111 1703181 : int i, in;
1112 1703181 : rtx new_in_reg, new_out_reg, reg;
1113 1703181 : machine_mode inmode, outmode;
1114 1703181 : rtx in_rtx = *curr_id->operand_loc[ins[0]];
1115 1703181 : rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
1116 :
1117 1703181 : inmode = curr_operand_mode[ins[0]];
1118 1703181 : outmode = out < 0 ? inmode : curr_operand_mode[out];
1119 1703181 : push_to_sequence (*before);
1120 1703181 : if (inmode != outmode)
1121 : {
1122 : /* process_alt_operands has already checked that the mode sizes
1123 : are ordered. */
1124 123896 : if (partial_subreg_p (outmode, inmode))
1125 : {
1126 1712 : bool asm_p = asm_noperands (PATTERN (curr_insn)) >= 0;
1127 1712 : int hr;
1128 1712 : HARD_REG_SET temp_hard_reg_set;
1129 :
1130 19 : if (asm_p && (hr = get_hard_regno (out_rtx)) >= 0
1131 1715 : && hard_regno_nregs (hr, inmode) > 1)
1132 : {
1133 : /* See gcc.c-torture/execute/20030222-1.c.
1134 : Consider the code for 32-bit (e.g. BE) target:
1135 : int i, v; long x; x = v; asm ("" : "=r" (i) : "0" (x));
1136 : We generate the following RTL with reload insns:
1137 : 1. subreg:si(x:di, 0) = 0;
1138 : 2. subreg:si(x:di, 4) = v:si;
1139 : 3. t:di = x:di, dead x;
1140 : 4. asm ("" : "=r" (subreg:si(t:di,4)) : "0" (t:di))
1141 : 5. i:si = subreg:si(t:di,4);
1142 : If we assign hard reg of x to t, dead code elimination
1143 : will remove insn #2 and we will use unitialized hard reg.
1144 : So exclude the hard reg of x for t. We could ignore this
1145 : problem for non-empty asm using all x value but it is hard to
1146 : check that the asm are expanded into insn realy using x
1147 : and setting r. */
1148 0 : CLEAR_HARD_REG_SET (temp_hard_reg_set);
1149 0 : if (exclude_start_hard_regs != NULL)
1150 0 : temp_hard_reg_set = *exclude_start_hard_regs;
1151 0 : SET_HARD_REG_BIT (temp_hard_reg_set, hr);
1152 0 : exclude_start_hard_regs = &temp_hard_reg_set;
1153 : }
1154 3424 : reg = new_in_reg
1155 1712 : = lra_create_new_reg_with_unique_value (inmode, in_rtx, goal_class,
1156 : exclude_start_hard_regs,
1157 : "");
1158 1712 : new_out_reg = get_matching_reload_reg_subreg (outmode, reg, goal_class);
1159 1712 : LRA_SUBREG_P (new_out_reg) = 1;
1160 : /* If the input reg is dying here, we can use the same hard
1161 : register for REG and IN_RTX. We do it only for original
1162 : pseudos as reload pseudos can die although original
1163 : pseudos still live where reload pseudos dies. */
1164 1497 : if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
1165 1455 : && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
1166 2739 : && (!early_clobber_p
1167 2 : || check_conflict_input_operands(REGNO (in_rtx), ins)))
1168 1026 : lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
1169 : }
1170 : else
1171 : {
1172 244368 : reg = new_out_reg
1173 122184 : = lra_create_new_reg_with_unique_value (outmode, out_rtx,
1174 : goal_class,
1175 : exclude_start_hard_regs,
1176 : "");
1177 122184 : new_in_reg = get_matching_reload_reg_subreg (inmode, reg, goal_class);
1178 : /* NEW_IN_REG is non-paradoxical subreg. We don't want
1179 : NEW_OUT_REG living above. We add clobber clause for
1180 : this. This is just a temporary clobber. We can remove
1181 : it at the end of LRA work. */
1182 122184 : rtx_insn *clobber = emit_clobber (new_out_reg);
1183 122184 : LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
1184 122184 : LRA_SUBREG_P (new_in_reg) = 1;
1185 122184 : if (GET_CODE (in_rtx) == SUBREG)
1186 : {
1187 1730 : rtx subreg_reg = SUBREG_REG (in_rtx);
1188 :
1189 : /* If SUBREG_REG is dying here and sub-registers IN_RTX
1190 : and NEW_IN_REG are similar, we can use the same hard
1191 : register for REG and SUBREG_REG. */
1192 1730 : if (REG_P (subreg_reg)
1193 1730 : && (int) REGNO (subreg_reg) < lra_new_regno_start
1194 1730 : && GET_MODE (subreg_reg) == outmode
1195 1077 : && known_eq (SUBREG_BYTE (in_rtx), SUBREG_BYTE (new_in_reg))
1196 1077 : && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg))
1197 1835 : && (! early_clobber_p
1198 0 : || check_conflict_input_operands (REGNO (subreg_reg),
1199 : ins)))
1200 105 : lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
1201 : }
1202 : }
1203 : }
1204 : else
1205 : {
1206 : /* Pseudos have values -- see comments for lra_reg_info.
1207 : Different pseudos with the same value do not conflict even if
1208 : they live in the same place. When we create a pseudo we
1209 : assign value of original pseudo (if any) from which we
1210 : created the new pseudo. If we create the pseudo from the
1211 : input pseudo, the new pseudo will have no conflict with the
1212 : input pseudo which is wrong when the input pseudo lives after
1213 : the insn and as the new pseudo value is changed by the insn
1214 : output. Therefore we create the new pseudo from the output
1215 : except the case when we have single matched dying input
1216 : pseudo.
1217 :
1218 : We cannot reuse the current output register because we might
1219 : have a situation like "a <- a op b", where the constraints
1220 : force the second input operand ("b") to match the output
1221 : operand ("a"). "b" must then be copied into a new register
1222 : so that it doesn't clobber the current value of "a".
1223 :
1224 : We cannot use the same value if the output pseudo is
1225 : early clobbered or the input pseudo is mentioned in the
1226 : output, e.g. as an address part in memory, because
1227 : output reload will actually extend the pseudo liveness.
1228 : We don't care about eliminable hard regs here as we are
1229 : interesting only in pseudos. */
1230 :
1231 : /* Matching input's register value is the same as one of the other
1232 : output operand. Output operands in a parallel insn must be in
1233 : different registers. */
1234 1579285 : out_conflict = false;
1235 1579285 : if (REG_P (in_rtx))
1236 : {
1237 2707005 : for (i = 0; outs[i] >= 0; i++)
1238 : {
1239 1402671 : rtx other_out_rtx = *curr_id->operand_loc[outs[i]];
1240 98107 : if (outs[i] != out && REG_P (other_out_rtx)
1241 1500584 : && (regno_val_use_in (REGNO (in_rtx), other_out_rtx)
1242 : != NULL_RTX))
1243 : {
1244 : out_conflict = true;
1245 : break;
1246 : }
1247 : }
1248 : }
1249 :
1250 1579285 : new_in_reg = new_out_reg
1251 1547966 : = (! early_clobber_p && ins[1] < 0 && REG_P (in_rtx)
1252 1274074 : && (int) REGNO (in_rtx) < lra_new_regno_start
1253 1273794 : && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
1254 : && (! early_clobber_p
1255 : || check_conflict_input_operands (REGNO (in_rtx), ins))
1256 422566 : && (out < 0
1257 422566 : || regno_val_use_in (REGNO (in_rtx), out_rtx) == NULL_RTX)
1258 422534 : && !out_conflict
1259 2001817 : ? lra_create_new_reg (inmode, in_rtx, goal_class,
1260 : exclude_start_hard_regs, "")
1261 1156753 : : lra_create_new_reg_with_unique_value (outmode, out_rtx, goal_class,
1262 : exclude_start_hard_regs,
1263 : ""));
1264 : }
1265 : /* In operand can be got from transformations before processing insn
1266 : constraints. One example of such transformations is subreg
1267 : reloading (see function simplify_operand_subreg). The new
1268 : pseudos created by the transformations might have inaccurate
1269 : class (ALL_REGS) and we should make their classes more
1270 : accurate. */
1271 1703181 : narrow_reload_pseudo_class (in_rtx, goal_class);
1272 1703181 : lra_emit_move (copy_rtx (new_in_reg), in_rtx);
1273 1703181 : *before = end_sequence ();
1274 : /* Add the new pseudo to consider values of subsequent input reload
1275 : pseudos. */
1276 1703181 : lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
1277 1703181 : curr_insn_input_reloads[curr_insn_input_reloads_num].input = in_rtx;
1278 1703181 : curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = true;
1279 1703181 : curr_insn_input_reloads[curr_insn_input_reloads_num].early_clobber_p = false;
1280 1703181 : curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = new_in_reg;
1281 3406363 : for (i = 0; (in = ins[i]) >= 0; i++)
1282 1703182 : if (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
1283 1675037 : || GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]))
1284 1703181 : *curr_id->operand_loc[in] = new_in_reg;
1285 : else
1286 : {
1287 1 : lra_assert
1288 : (GET_MODE (new_out_reg) == GET_MODE (*curr_id->operand_loc[in]));
1289 1 : *curr_id->operand_loc[in] = new_out_reg;
1290 : }
1291 1703181 : lra_update_dups (curr_id, ins);
1292 1703181 : if (out < 0)
1293 : return;
1294 : /* See a comment for the input operand above. */
1295 1703181 : narrow_reload_pseudo_class (out_rtx, goal_class);
1296 1703181 : reg = SUBREG_P (out_rtx) ? SUBREG_REG (out_rtx) : out_rtx;
1297 1703181 : if (find_reg_note (curr_insn, REG_UNUSED, reg) == NULL_RTX
1298 1703181 : && (!REG_P (reg) || !ira_former_scratch_p (REGNO (reg))))
1299 : {
1300 1618910 : start_sequence ();
1301 : /* If we had strict_low_part, use it also in reload to keep other
1302 : parts unchanged but do it only for regs as strict_low_part
1303 : has no sense for memory and probably there is no insn pattern
1304 : to match the reload insn in memory case. */
1305 1618910 : if (out >= 0 && curr_static_id->operand[out].strict_low && REG_P (reg))
1306 0 : out_rtx = gen_rtx_STRICT_LOW_PART (VOIDmode, out_rtx);
1307 1618910 : lra_emit_move (out_rtx, copy_rtx (new_out_reg));
1308 1618910 : emit_insn (*after);
1309 1618910 : *after = end_sequence ();
1310 : }
1311 1703181 : *curr_id->operand_loc[out] = new_out_reg;
1312 1703181 : lra_update_dup (curr_id, out);
1313 : }
1314 :
1315 : /* Return register class which is union of all reg classes in insn
1316 : constraint alternative string starting with P. */
1317 : static enum reg_class
1318 0 : reg_class_from_constraints (const char *p)
1319 : {
1320 0 : int c, len;
1321 0 : enum reg_class op_class = NO_REGS;
1322 :
1323 0 : do
1324 0 : switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1325 : {
1326 : case '#':
1327 : case ',':
1328 : return op_class;
1329 :
1330 0 : case 'g':
1331 0 : op_class = reg_class_subunion[op_class][GENERAL_REGS];
1332 0 : break;
1333 :
1334 0 : default:
1335 0 : enum constraint_num cn = lookup_constraint (p);
1336 0 : enum reg_class cl = reg_class_for_constraint (cn);
1337 0 : if (cl == NO_REGS)
1338 : {
1339 0 : if (insn_extra_address_constraint (cn))
1340 0 : op_class
1341 0 : = (reg_class_subunion
1342 0 : [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1343 0 : ADDRESS, SCRATCH)]);
1344 : break;
1345 : }
1346 :
1347 0 : op_class = reg_class_subunion[op_class][cl];
1348 0 : break;
1349 : }
1350 0 : while ((p += len), c);
1351 : return op_class;
1352 : }
1353 :
1354 : /* If OP is a register, return the class of the register as per
1355 : get_reg_class, otherwise return NO_REGS. */
1356 : static inline enum reg_class
1357 163255439 : get_op_class (rtx op)
1358 : {
1359 135574903 : return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
1360 : }
1361 :
1362 : /* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
1363 : otherwise. If modes of MEM_PSEUDO and VAL are different, use
1364 : SUBREG for VAL to make them equal. */
1365 : static rtx_insn *
1366 1339206 : emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
1367 : {
1368 1339206 : if (GET_MODE (mem_pseudo) != GET_MODE (val))
1369 : {
1370 : /* Usually size of mem_pseudo is greater than val size but in
1371 : rare cases it can be less as it can be defined by target
1372 : dependent macro HARD_REGNO_CALLER_SAVE_MODE. */
1373 3022 : if (! MEM_P (val))
1374 : {
1375 3022 : val = gen_lowpart_SUBREG (GET_MODE (mem_pseudo),
1376 : GET_CODE (val) == SUBREG
1377 : ? SUBREG_REG (val) : val);
1378 3022 : LRA_SUBREG_P (val) = 1;
1379 : }
1380 : else
1381 : {
1382 0 : mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
1383 0 : LRA_SUBREG_P (mem_pseudo) = 1;
1384 : }
1385 : }
1386 1339206 : return to_p ? gen_move_insn (mem_pseudo, val)
1387 676174 : : gen_move_insn (val, mem_pseudo);
1388 : }
1389 :
1390 : /* Process a special case insn (register move), return true if we
1391 : don't need to process it anymore. INSN should be a single set
1392 : insn. Set up that RTL was changed through CHANGE_P and that hook
1393 : TARGET_SECONDARY_MEMORY_NEEDED says to use secondary memory through
1394 : SEC_MEM_P. */
1395 : static bool
1396 75683576 : check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
1397 : {
1398 75683576 : int sregno, dregno;
1399 75683576 : rtx dest, src, dreg, sreg, new_reg, scratch_reg;
1400 75683576 : rtx_insn *before;
1401 75683576 : enum reg_class dclass, sclass, secondary_class;
1402 75683576 : secondary_reload_info sri;
1403 :
1404 75683576 : lra_assert (curr_insn_set != NULL_RTX);
1405 75683576 : dreg = dest = SET_DEST (curr_insn_set);
1406 75683576 : sreg = src = SET_SRC (curr_insn_set);
1407 75683576 : if (GET_CODE (dest) == SUBREG)
1408 1157789 : dreg = SUBREG_REG (dest);
1409 75683576 : if (GET_CODE (src) == SUBREG)
1410 1208019 : sreg = SUBREG_REG (src);
1411 75683576 : if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
1412 : return false;
1413 35521579 : sclass = dclass = NO_REGS;
1414 35521579 : if (REG_P (dreg))
1415 22901927 : dclass = get_reg_class (REGNO (dreg));
1416 22901927 : gcc_assert (dclass < LIM_REG_CLASSES && dclass >= NO_REGS);
1417 35521579 : if (dclass == ALL_REGS)
1418 : /* ALL_REGS is used for new pseudos created by transformations
1419 : like reload of SUBREG_REG (see function
1420 : simplify_operand_subreg). We don't know their class yet. We
1421 : should figure out the class from processing the insn
1422 : constraints not in this fast path function. Even if ALL_REGS
1423 : were a right class for the pseudo, secondary_... hooks usually
1424 : are not define for ALL_REGS. */
1425 : return false;
1426 35519372 : if (REG_P (sreg))
1427 19787783 : sclass = get_reg_class (REGNO (sreg));
1428 19787783 : gcc_assert (sclass < LIM_REG_CLASSES && sclass >= NO_REGS);
1429 35519372 : if (sclass == ALL_REGS)
1430 : /* See comments above. */
1431 : return false;
1432 35519372 : if (sclass == NO_REGS && dclass == NO_REGS)
1433 : return false;
1434 34057388 : if (targetm.secondary_memory_needed (GET_MODE (src), sclass, dclass)
1435 34057388 : && ((sclass != NO_REGS && dclass != NO_REGS)
1436 0 : || (GET_MODE (src)
1437 0 : != targetm.secondary_memory_needed_mode (GET_MODE (src)))))
1438 : {
1439 13331 : *sec_mem_p = true;
1440 13331 : return false;
1441 : }
1442 34044057 : if (! REG_P (dreg) || ! REG_P (sreg))
1443 : return false;
1444 7680393 : sri.prev_sri = NULL;
1445 7680393 : sri.icode = CODE_FOR_nothing;
1446 7680393 : sri.extra_cost = 0;
1447 7680393 : secondary_class = NO_REGS;
1448 : /* Set up hard register for a reload pseudo for hook
1449 : secondary_reload because some targets just ignore unassigned
1450 : pseudos in the hook. */
1451 7680393 : if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
1452 : {
1453 2868861 : dregno = REGNO (dreg);
1454 2868861 : reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
1455 : }
1456 : else
1457 : dregno = -1;
1458 7680393 : if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
1459 : {
1460 1264597 : sregno = REGNO (sreg);
1461 1264597 : reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
1462 : }
1463 : else
1464 : sregno = -1;
1465 7680393 : if (sclass != NO_REGS)
1466 3861871 : secondary_class
1467 7723742 : = (enum reg_class) targetm.secondary_reload (false, dest,
1468 : (reg_class_t) sclass,
1469 3861871 : GET_MODE (src), &sri);
1470 3861871 : if (sclass == NO_REGS
1471 3861871 : || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
1472 1351 : && dclass != NO_REGS))
1473 : {
1474 3818522 : enum reg_class old_sclass = secondary_class;
1475 3818522 : secondary_reload_info old_sri = sri;
1476 :
1477 3818522 : sri.prev_sri = NULL;
1478 3818522 : sri.icode = CODE_FOR_nothing;
1479 3818522 : sri.extra_cost = 0;
1480 3818522 : secondary_class
1481 7637044 : = (enum reg_class) targetm.secondary_reload (true, src,
1482 : (reg_class_t) dclass,
1483 3818522 : GET_MODE (src), &sri);
1484 : /* Check the target hook consistency. */
1485 3818522 : lra_assert
1486 : ((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1487 : || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
1488 : || (secondary_class == old_sclass && sri.icode == old_sri.icode));
1489 : }
1490 7680393 : if (sregno >= 0)
1491 1264597 : reg_renumber [sregno] = -1;
1492 7680393 : if (dregno >= 0)
1493 2868861 : reg_renumber [dregno] = -1;
1494 7680393 : if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1495 : return false;
1496 1352 : *change_p = true;
1497 1352 : new_reg = NULL_RTX;
1498 0 : if (secondary_class != NO_REGS)
1499 1352 : new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
1500 : secondary_class, NULL,
1501 : "secondary");
1502 1352 : start_sequence ();
1503 1352 : if (sri.icode == CODE_FOR_nothing)
1504 1352 : lra_emit_move (new_reg, src);
1505 : else
1506 : {
1507 0 : enum reg_class scratch_class;
1508 :
1509 0 : scratch_class = (reg_class_from_constraints
1510 0 : (insn_data[sri.icode].operand[2].constraint));
1511 0 : scratch_reg = (lra_create_new_reg_with_unique_value
1512 0 : (insn_data[sri.icode].operand[2].mode, NULL_RTX,
1513 : scratch_class, NULL, "scratch"));
1514 0 : emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
1515 : src, scratch_reg));
1516 : }
1517 1352 : before = end_sequence ();
1518 1352 : lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
1519 1352 : if (new_reg != NULL_RTX)
1520 1352 : SET_SRC (curr_insn_set) = new_reg;
1521 : else
1522 : {
1523 0 : if (lra_dump_file != NULL)
1524 : {
1525 0 : fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
1526 0 : dump_insn_slim (lra_dump_file, curr_insn);
1527 : }
1528 0 : lra_set_insn_deleted (curr_insn);
1529 0 : return true;
1530 : }
1531 1352 : return false;
1532 : }
1533 :
1534 : /* The following data describe the result of process_alt_operands.
1535 : The data are used in curr_insn_transform to generate reloads. */
1536 :
1537 : /* The chosen reg classes which should be used for the corresponding
1538 : operands. */
1539 : static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
1540 : /* Hard registers which cannot be a start hard register for the corresponding
1541 : operands. */
1542 : static HARD_REG_SET goal_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
1543 : /* True if the operand should be the same as another operand and that
1544 : other operand does not need a reload. */
1545 : static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
1546 : /* True if the operand does not need a reload. */
1547 : static bool goal_alt_win[MAX_RECOG_OPERANDS];
1548 : /* True if the operand can be offsetable memory. */
1549 : static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
1550 : /* The number of an operand to which given operand can be matched to. */
1551 : static int goal_alt_matches[MAX_RECOG_OPERANDS];
1552 : /* The number of elements in the following array. */
1553 : static int goal_alt_dont_inherit_ops_num;
1554 : /* Numbers of operands whose reload pseudos should not be inherited. */
1555 : static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1556 : /* True if we should try only this alternative for the next constraint sub-pass
1557 : to speed up the sub-pass. */
1558 : static bool goal_reuse_alt_p;
1559 : /* True if the insn commutative operands should be swapped. */
1560 : static bool goal_alt_swapped;
1561 : /* The chosen insn alternative. */
1562 : static int goal_alt_number;
1563 : /* True if output reload of the stack pointer should be generated. */
1564 : static bool goal_alt_out_sp_reload_p;
1565 :
1566 : /* True if the corresponding operand is the result of an equivalence
1567 : substitution. */
1568 : static bool equiv_substition_p[MAX_RECOG_OPERANDS];
1569 :
1570 : /* The following five variables are used to choose the best insn
1571 : alternative. They reflect final characteristics of the best
1572 : alternative. */
1573 :
1574 : /* Number of necessary reloads and overall cost reflecting the
1575 : previous value and other unpleasantness of the best alternative. */
1576 : static int best_losers, best_overall;
1577 : /* Overall number hard registers used for reloads. For example, on
1578 : some targets we need 2 general registers to reload DFmode and only
1579 : one floating point register. */
1580 : static int best_reload_nregs;
1581 : /* Overall number reflecting distances of previous reloading the same
1582 : value. The distances are counted from the current BB start. It is
1583 : used to improve inheritance chances. */
1584 : static int best_reload_sum;
1585 :
1586 : /* True if the current insn should have no correspondingly input or
1587 : output reloads. */
1588 : static bool no_input_reloads_p, no_output_reloads_p;
1589 :
1590 : /* True if we swapped the commutative operands in the current
1591 : insn. */
1592 : static int curr_swapped;
1593 :
1594 : /* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
1595 : register of class CL. Add any input reloads to list BEFORE. AFTER
1596 : is nonnull if *LOC is an automodified value; handle that case by
1597 : adding the required output reloads to list AFTER. Return true if
1598 : the RTL was changed.
1599 :
1600 : if CHECK_ONLY_P is true, check that the *LOC is a correct address
1601 : register. Return false if the address register is correct. */
1602 : static bool
1603 35153980 : process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
1604 : enum reg_class cl)
1605 : {
1606 35153980 : int regno;
1607 35153980 : enum reg_class rclass, new_class;
1608 35153980 : rtx reg;
1609 35153980 : rtx new_reg;
1610 35153980 : machine_mode mode;
1611 35153980 : bool subreg_p, before_p = false;
1612 :
1613 35153980 : subreg_p = GET_CODE (*loc) == SUBREG;
1614 35153980 : if (subreg_p)
1615 : {
1616 15596 : reg = SUBREG_REG (*loc);
1617 15596 : mode = GET_MODE (reg);
1618 :
1619 : /* For mode with size bigger than ptr_mode, there unlikely to be "mov"
1620 : between two registers with different classes, but there normally will
1621 : be "mov" which transfers element of vector register into the general
1622 : register, and this normally will be a subreg which should be reloaded
1623 : as a whole. This is particularly likely to be triggered when
1624 : -fno-split-wide-types specified. */
1625 15596 : if (!REG_P (reg)
1626 15596 : || in_class_p (reg, cl, &new_class)
1627 17902 : || known_le (GET_MODE_SIZE (mode), GET_MODE_SIZE (ptr_mode)))
1628 15596 : loc = &SUBREG_REG (*loc);
1629 : }
1630 :
1631 35153980 : reg = *loc;
1632 35153980 : mode = GET_MODE (reg);
1633 35153980 : if (! REG_P (reg))
1634 : {
1635 0 : if (check_only_p)
1636 : return true;
1637 : /* Always reload memory in an address even if the target supports
1638 : such addresses. */
1639 0 : new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, NULL,
1640 : "address");
1641 0 : before_p = true;
1642 : }
1643 : else
1644 : {
1645 35153980 : regno = REGNO (reg);
1646 35153980 : rclass = get_reg_class (regno);
1647 35153980 : if (! check_only_p
1648 35153980 : && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
1649 : {
1650 110657 : if (lra_dump_file != NULL)
1651 : {
1652 0 : fprintf (lra_dump_file,
1653 : "Changing pseudo %d in address of insn %u on equiv ",
1654 0 : REGNO (reg), INSN_UID (curr_insn));
1655 0 : dump_value_slim (lra_dump_file, *loc, 1);
1656 0 : fprintf (lra_dump_file, "\n");
1657 : }
1658 110657 : rtx new_equiv = copy_rtx (*loc);
1659 110657 : if (lra_pointer_equiv_set_in (*loc))
1660 105581 : lra_pointer_equiv_set_add (new_equiv);
1661 110657 : *loc = new_equiv;
1662 : }
1663 35153980 : if (*loc != reg || ! in_class_p (reg, cl, &new_class))
1664 : {
1665 476967 : if (check_only_p)
1666 : return true;
1667 476967 : reg = *loc;
1668 476967 : if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
1669 : mode, reg, cl, NULL,
1670 : subreg_p, false, "address", &new_reg))
1671 : before_p = true;
1672 : }
1673 34677013 : else if (new_class != NO_REGS && rclass != new_class)
1674 : {
1675 466006 : if (check_only_p)
1676 : return true;
1677 466006 : lra_change_class (regno, new_class, " Change to", true);
1678 466006 : return false;
1679 : }
1680 : else
1681 : return false;
1682 : }
1683 0 : if (before_p)
1684 : {
1685 469702 : push_to_sequence (*before);
1686 469702 : lra_emit_move (new_reg, reg);
1687 469702 : *before = end_sequence ();
1688 : }
1689 476967 : *loc = new_reg;
1690 476967 : if (after != NULL)
1691 : {
1692 0 : start_sequence ();
1693 0 : lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
1694 0 : emit_insn (*after);
1695 0 : *after = end_sequence ();
1696 : }
1697 : return true;
1698 : }
1699 :
1700 : /* Insert move insn in simplify_operand_subreg. BEFORE returns
1701 : the insn to be inserted before curr insn. AFTER returns the
1702 : the insn to be inserted after curr insn. ORIGREG and NEWREG
1703 : are the original reg and new reg for reload. */
1704 : static void
1705 458 : insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
1706 : rtx newreg)
1707 : {
1708 458 : if (before)
1709 : {
1710 458 : push_to_sequence (*before);
1711 458 : lra_emit_move (newreg, origreg);
1712 458 : *before = end_sequence ();
1713 : }
1714 458 : if (after)
1715 : {
1716 0 : start_sequence ();
1717 0 : lra_emit_move (origreg, newreg);
1718 0 : emit_insn (*after);
1719 0 : *after = end_sequence ();
1720 : }
1721 458 : }
1722 :
1723 : static bool valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
1724 : static bool process_address (int, bool, rtx_insn **, rtx_insn **);
1725 :
1726 : /* Make reloads for subreg in operand NOP with internal subreg mode
1727 : REG_MODE, add new reloads for further processing. Return true if
1728 : any change was done. */
1729 : static bool
1730 175767900 : simplify_operand_subreg (int nop, machine_mode reg_mode)
1731 : {
1732 175767900 : int hard_regno, inner_hard_regno;
1733 175767900 : rtx_insn *before, *after;
1734 175767900 : machine_mode mode, innermode;
1735 175767900 : rtx reg, new_reg;
1736 175767900 : rtx operand = *curr_id->operand_loc[nop];
1737 175767900 : enum reg_class regclass;
1738 175767900 : enum op_type type;
1739 :
1740 175767900 : before = after = NULL;
1741 :
1742 175767900 : if (GET_CODE (operand) != SUBREG)
1743 : return false;
1744 :
1745 3643542 : mode = GET_MODE (operand);
1746 3643542 : reg = SUBREG_REG (operand);
1747 3643542 : innermode = GET_MODE (reg);
1748 3643542 : type = curr_static_id->operand[nop].type;
1749 3643542 : if (MEM_P (reg))
1750 : {
1751 11226 : const bool addr_was_valid
1752 11226 : = valid_address_p (innermode, XEXP (reg, 0), MEM_ADDR_SPACE (reg));
1753 11226 : alter_subreg (curr_id->operand_loc[nop], false);
1754 11226 : rtx subst = *curr_id->operand_loc[nop];
1755 11226 : lra_assert (MEM_P (subst));
1756 11226 : const bool addr_is_valid = valid_address_p (GET_MODE (subst),
1757 : XEXP (subst, 0),
1758 11226 : MEM_ADDR_SPACE (subst));
1759 11226 : if (!addr_was_valid
1760 11226 : || addr_is_valid
1761 11226 : || ((get_constraint_type (lookup_constraint
1762 0 : (curr_static_id->operand[nop].constraint))
1763 : != CT_SPECIAL_MEMORY)
1764 : /* We still can reload address and if the address is
1765 : valid, we can remove subreg without reloading its
1766 : inner memory. */
1767 0 : && valid_address_p (GET_MODE (subst),
1768 0 : regno_reg_rtx
1769 : [ira_class_hard_regs
1770 0 : [base_reg_class (GET_MODE (subst),
1771 0 : MEM_ADDR_SPACE (subst),
1772 0 : ADDRESS, SCRATCH)][0]],
1773 0 : MEM_ADDR_SPACE (subst))))
1774 : {
1775 : /* If we change the address for a paradoxical subreg of memory, the
1776 : new address might violate the necessary alignment or the access
1777 : might be slow; take this into consideration. We need not worry
1778 : about accesses beyond allocated memory for paradoxical memory
1779 : subregs as we don't substitute such equiv memory (see processing
1780 : equivalences in function lra_constraints) and because for spilled
1781 : pseudos we allocate stack memory enough for the biggest
1782 : corresponding paradoxical subreg.
1783 :
1784 : However, do not blindly simplify a (subreg (mem ...)) for
1785 : WORD_REGISTER_OPERATIONS targets as this may lead to loading junk
1786 : data into a register when the inner is narrower than outer or
1787 : missing important data from memory when the inner is wider than
1788 : outer. This rule only applies to modes that are no wider than
1789 : a word.
1790 :
1791 : If valid memory becomes invalid after subreg elimination
1792 : and address might be different we still have to reload
1793 : memory.
1794 : */
1795 11226 : if ((! addr_was_valid
1796 : || addr_is_valid
1797 0 : || known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (innermode)))
1798 11226 : && !(maybe_ne (GET_MODE_PRECISION (mode),
1799 11226 : GET_MODE_PRECISION (innermode))
1800 13712 : && known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD)
1801 19751 : && known_le (GET_MODE_SIZE (innermode), UNITS_PER_WORD)
1802 : && WORD_REGISTER_OPERATIONS)
1803 23582 : && (!(MEM_ALIGN (subst) < GET_MODE_ALIGNMENT (mode)
1804 1130 : && targetm.slow_unaligned_access (mode, MEM_ALIGN (subst)))
1805 0 : || (MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (innermode)
1806 0 : && targetm.slow_unaligned_access (innermode,
1807 0 : MEM_ALIGN (reg)))))
1808 11226 : return true;
1809 :
1810 0 : *curr_id->operand_loc[nop] = operand;
1811 :
1812 : /* But if the address was not valid, we cannot reload the MEM without
1813 : reloading the address first. */
1814 0 : if (!addr_was_valid)
1815 0 : process_address (nop, false, &before, &after);
1816 :
1817 : /* INNERMODE is fast, MODE slow. Reload the mem in INNERMODE. */
1818 0 : enum reg_class rclass
1819 0 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1820 0 : if (get_reload_reg (curr_static_id->operand[nop].type, innermode,
1821 : reg, rclass, NULL,
1822 : true, false, "slow/invalid mem", &new_reg))
1823 : {
1824 0 : bool insert_before, insert_after;
1825 0 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1826 :
1827 0 : insert_before = (type != OP_OUT
1828 0 : || partial_subreg_p (mode, innermode));
1829 0 : insert_after = type != OP_IN;
1830 0 : insert_move_for_subreg (insert_before ? &before : NULL,
1831 : insert_after ? &after : NULL,
1832 : reg, new_reg);
1833 : }
1834 0 : SUBREG_REG (operand) = new_reg;
1835 :
1836 : /* Convert to MODE. */
1837 0 : reg = operand;
1838 0 : rclass
1839 0 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1840 0 : if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
1841 : rclass, NULL,
1842 : true, false, "slow/invalid mem", &new_reg))
1843 : {
1844 0 : bool insert_before, insert_after;
1845 0 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1846 :
1847 0 : insert_before = type != OP_OUT;
1848 0 : insert_after = type != OP_IN;
1849 0 : insert_move_for_subreg (insert_before ? &before : NULL,
1850 : insert_after ? &after : NULL,
1851 : reg, new_reg);
1852 : }
1853 0 : *curr_id->operand_loc[nop] = new_reg;
1854 0 : lra_process_new_insns (curr_insn, before, after,
1855 : "Inserting slow/invalid mem reload");
1856 0 : return true;
1857 : }
1858 :
1859 : /* If the address was valid and became invalid, prefer to reload
1860 : the memory. Typical case is when the index scale should
1861 : correspond the memory. */
1862 0 : *curr_id->operand_loc[nop] = operand;
1863 : /* Do not return false here as the MEM_P (reg) will be processed
1864 : later in this function. */
1865 : }
1866 3632316 : else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
1867 : {
1868 67 : alter_subreg (curr_id->operand_loc[nop], false);
1869 67 : return true;
1870 : }
1871 3632249 : else if (CONSTANT_P (reg))
1872 : {
1873 : /* Try to simplify subreg of constant. It is usually result of
1874 : equivalence substitution. */
1875 43524 : if (innermode == VOIDmode
1876 43524 : && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
1877 0 : innermode = curr_static_id->operand[nop].mode;
1878 43524 : if ((new_reg = simplify_subreg (mode, reg, innermode,
1879 43524 : SUBREG_BYTE (operand))) != NULL_RTX)
1880 : {
1881 43107 : *curr_id->operand_loc[nop] = new_reg;
1882 43107 : return true;
1883 : }
1884 : }
1885 : /* Put constant into memory when we have mixed modes. It generates
1886 : a better code in most cases as it does not need a secondary
1887 : reload memory. It also prevents LRA looping when LRA is using
1888 : secondary reload memory again and again. */
1889 834 : if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
1890 3589559 : && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
1891 : {
1892 8 : SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
1893 8 : alter_subreg (curr_id->operand_loc[nop], false);
1894 8 : return true;
1895 : }
1896 3589134 : auto fp_subreg_can_be_simplified_after_reload_p = [] (machine_mode innermode,
1897 : poly_uint64 offset,
1898 : machine_mode mode) {
1899 0 : reload_completed = 1;
1900 0 : bool res = simplify_subreg_regno (FRAME_POINTER_REGNUM,
1901 : innermode,
1902 0 : offset, mode) >= 0;
1903 0 : reload_completed = 0;
1904 0 : return res;
1905 : };
1906 : /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
1907 : if there may be a problem accessing OPERAND in the outer
1908 : mode. */
1909 3589134 : if ((REG_P (reg)
1910 3588676 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1911 3588676 : && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1912 : /* Don't reload paradoxical subregs because we could be looping
1913 : having repeatedly final regno out of hard regs range. */
1914 3023854 : && (hard_regno_nregs (hard_regno, innermode)
1915 3023854 : >= hard_regno_nregs (hard_regno, mode))
1916 3018762 : && simplify_subreg_regno (hard_regno, innermode,
1917 3018762 : SUBREG_BYTE (operand), mode) < 0
1918 : /* Exclude reloading of frame pointer in subreg if frame pointer can not
1919 : be simplified here only because the reload is not finished yet. */
1920 841 : && (hard_regno != FRAME_POINTER_REGNUM
1921 0 : || !fp_subreg_can_be_simplified_after_reload_p (innermode,
1922 0 : SUBREG_BYTE (operand),
1923 : mode))
1924 : /* Don't reload subreg for matching reload. It is actually
1925 : valid subreg in LRA. */
1926 841 : && ! LRA_SUBREG_P (operand))
1927 7177810 : || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
1928 : {
1929 458 : enum reg_class rclass;
1930 :
1931 458 : if (REG_P (reg))
1932 : /* There is a big probability that we will get the same class
1933 : for the new pseudo and we will get the same insn which
1934 : means infinite looping. So spill the new pseudo. */
1935 : rclass = NO_REGS;
1936 : else
1937 : /* The class will be defined later in curr_insn_transform. */
1938 458 : rclass
1939 458 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1940 :
1941 458 : if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
1942 : rclass, NULL,
1943 : true, false, "subreg reg", &new_reg))
1944 : {
1945 458 : bool insert_before, insert_after;
1946 458 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1947 :
1948 916 : insert_before = (type != OP_OUT
1949 458 : || read_modify_subreg_p (operand));
1950 458 : insert_after = (type != OP_IN);
1951 916 : insert_move_for_subreg (insert_before ? &before : NULL,
1952 : insert_after ? &after : NULL,
1953 : reg, new_reg);
1954 : }
1955 458 : SUBREG_REG (operand) = new_reg;
1956 458 : lra_process_new_insns (curr_insn, before, after,
1957 : "Inserting subreg reload");
1958 458 : return true;
1959 : }
1960 : /* Force a reload for a paradoxical subreg. For paradoxical subreg,
1961 : IRA allocates hardreg to the inner pseudo reg according to its mode
1962 : instead of the outermode, so the size of the hardreg may not be enough
1963 : to contain the outermode operand, in that case we may need to insert
1964 : reload for the reg. For the following two types of paradoxical subreg,
1965 : we need to insert reload:
1966 : 1. If the op_type is OP_IN, and the hardreg could not be paired with
1967 : other hardreg to contain the outermode operand
1968 : (checked by in_hard_reg_set_p), we need to insert the reload.
1969 : 2. If the op_type is OP_OUT or OP_INOUT.
1970 :
1971 : Here is a paradoxical subreg example showing how the reload is generated:
1972 :
1973 : (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1974 : (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
1975 :
1976 : In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
1977 : here, if reg107 is assigned to hardreg R15, because R15 is the last
1978 : hardreg, compiler cannot find another hardreg to pair with R15 to
1979 : contain TImode data. So we insert a TImode reload reg180 for it.
1980 : After reload is inserted:
1981 :
1982 : (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
1983 : (reg:DI 107 [ __comp ])) -1
1984 : (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1985 : (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
1986 :
1987 : Two reload hard registers will be allocated to reg180 to save TImode data
1988 : in LRA_assign.
1989 :
1990 : For LRA pseudos this should normally be handled by the biggest_mode
1991 : mechanism. However, it's possible for new uses of an LRA pseudo
1992 : to be introduced after we've allocated it, such as when undoing
1993 : inheritance, and the allocated register might not then be appropriate
1994 : for the new uses. */
1995 3588676 : else if (REG_P (reg)
1996 3588676 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1997 3588676 : && paradoxical_subreg_p (operand)
1998 1046810 : && (inner_hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1999 3588676 : && ((hard_regno
2000 4544847 : = simplify_subreg_regno (inner_hard_regno, innermode,
2001 956171 : SUBREG_BYTE (operand), mode)) < 0
2002 956171 : || ((hard_regno_nregs (inner_hard_regno, innermode)
2003 956171 : < hard_regno_nregs (hard_regno, mode))
2004 10184 : && (regclass = lra_get_allocno_class (REGNO (reg)))
2005 5092 : && (type != OP_IN
2006 5092 : || !in_hard_reg_set_p (reg_class_contents[regclass],
2007 : mode, hard_regno)
2008 5092 : || overlaps_hard_reg_set_p (lra_no_alloc_regs,
2009 : mode, hard_regno)))))
2010 : {
2011 : /* The class will be defined later in curr_insn_transform. */
2012 0 : enum reg_class rclass
2013 0 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
2014 :
2015 0 : if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
2016 : rclass, NULL,
2017 : true, false, "paradoxical subreg", &new_reg))
2018 : {
2019 0 : rtx subreg;
2020 0 : bool insert_before, insert_after;
2021 :
2022 0 : PUT_MODE (new_reg, mode);
2023 0 : subreg = gen_lowpart_SUBREG (innermode, new_reg);
2024 0 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
2025 :
2026 0 : insert_before = (type != OP_OUT);
2027 0 : insert_after = (type != OP_IN);
2028 0 : insert_move_for_subreg (insert_before ? &before : NULL,
2029 : insert_after ? &after : NULL,
2030 : reg, subreg);
2031 : }
2032 0 : SUBREG_REG (operand) = new_reg;
2033 0 : lra_process_new_insns (curr_insn, before, after,
2034 : "Inserting paradoxical subreg reload");
2035 0 : return true;
2036 : }
2037 : return false;
2038 : }
2039 :
2040 : /* Return TRUE if X refers for a hard register from SET. */
2041 : static bool
2042 407654 : uses_hard_regs_p (rtx x, HARD_REG_SET set)
2043 : {
2044 407654 : int i, j, x_hard_regno;
2045 407654 : machine_mode mode;
2046 407654 : const char *fmt;
2047 407654 : enum rtx_code code;
2048 :
2049 407654 : if (x == NULL_RTX)
2050 : return false;
2051 407654 : code = GET_CODE (x);
2052 407654 : mode = GET_MODE (x);
2053 :
2054 407654 : if (code == SUBREG)
2055 : {
2056 : /* For all SUBREGs we want to check whether the full multi-register
2057 : overlaps the set. For normal SUBREGs this means 'get_hard_regno' of
2058 : the inner register, for paradoxical SUBREGs this means the
2059 : 'get_hard_regno' of the full SUBREG and for complete SUBREGs either is
2060 : fine. Use the wider mode for all cases. */
2061 2701 : rtx subreg = SUBREG_REG (x);
2062 2701 : mode = wider_subreg_mode (x);
2063 2701 : if (mode == GET_MODE (subreg))
2064 : {
2065 1665 : x = subreg;
2066 1665 : code = GET_CODE (x);
2067 : }
2068 : }
2069 :
2070 407654 : if (REG_P (x) || SUBREG_P (x))
2071 : {
2072 266293 : x_hard_regno = get_hard_regno (x);
2073 266293 : return (x_hard_regno >= 0
2074 266293 : && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
2075 : }
2076 141361 : fmt = GET_RTX_FORMAT (code);
2077 366728 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2078 : {
2079 227369 : if (fmt[i] == 'e')
2080 : {
2081 109972 : if (uses_hard_regs_p (XEXP (x, i), set))
2082 : return true;
2083 : }
2084 117397 : else if (fmt[i] == 'E')
2085 : {
2086 4398 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2087 3980 : if (uses_hard_regs_p (XVECEXP (x, i, j), set))
2088 : return true;
2089 : }
2090 : }
2091 : return false;
2092 : }
2093 :
2094 : /* Return true if OP is a spilled pseudo. */
2095 : static inline bool
2096 80942302 : spilled_pseudo_p (rtx op)
2097 : {
2098 80942302 : return (REG_P (op)
2099 80942302 : && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
2100 : }
2101 :
2102 : /* Return true if X is a general constant. */
2103 : static inline bool
2104 7851057 : general_constant_p (rtx x)
2105 : {
2106 7851057 : return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
2107 : }
2108 :
2109 : static bool
2110 24895363 : reg_in_class_p (rtx reg, enum reg_class cl)
2111 : {
2112 24895363 : if (cl == NO_REGS)
2113 1108787 : return get_reg_class (REGNO (reg)) == NO_REGS;
2114 23786576 : return in_class_p (reg, cl, NULL);
2115 : }
2116 :
2117 : /* Return true if SET of RCLASS contains no hard regs which can be
2118 : used in MODE. */
2119 : static bool
2120 3851004 : prohibited_class_reg_set_mode_p (enum reg_class rclass,
2121 : HARD_REG_SET &set,
2122 : machine_mode mode)
2123 : {
2124 3851004 : HARD_REG_SET temp;
2125 :
2126 7702008 : lra_assert (hard_reg_set_subset_p (reg_class_contents[rclass], set));
2127 3851004 : temp = set & ~lra_no_alloc_regs;
2128 3851004 : return (hard_reg_set_subset_p
2129 3851004 : (temp, ira_prohibited_class_mode_regs[rclass][mode]));
2130 : }
2131 :
2132 :
2133 : /* Used to check validity info about small class input operands. It
2134 : should be incremented at start of processing an insn
2135 : alternative. */
2136 : static unsigned int curr_small_class_check = 0;
2137 :
2138 : /* Update number of used inputs of class OP_CLASS for operand NOP
2139 : of alternative NALT. Return true if we have more such class operands
2140 : than the number of available regs. */
2141 : static bool
2142 394826349 : update_and_check_small_class_inputs (int nop, int nalt,
2143 : enum reg_class op_class)
2144 : {
2145 394826349 : static unsigned int small_class_check[LIM_REG_CLASSES];
2146 394826349 : static int small_class_input_nums[LIM_REG_CLASSES];
2147 :
2148 391925677 : if (SMALL_REGISTER_CLASS_P (op_class)
2149 : /* We are interesting in classes became small because of fixing
2150 : some hard regs, e.g. by an user through GCC options. */
2151 3008235 : && hard_reg_set_intersect_p (reg_class_contents[op_class],
2152 3008235 : ira_no_alloc_regs)
2153 394826388 : && (curr_static_id->operand[nop].type != OP_OUT
2154 33 : || TEST_BIT (curr_static_id->operand[nop].early_clobber_alts, nalt)))
2155 : {
2156 6 : if (small_class_check[op_class] == curr_small_class_check)
2157 0 : small_class_input_nums[op_class]++;
2158 : else
2159 : {
2160 6 : small_class_check[op_class] = curr_small_class_check;
2161 6 : small_class_input_nums[op_class] = 1;
2162 : }
2163 6 : if (small_class_input_nums[op_class] > ira_class_hard_regs_num[op_class])
2164 : return true;
2165 : }
2166 : return false;
2167 : }
2168 :
2169 : /* Print operand constraints for alternative ALT_NUMBER of the current
2170 : insn. */
2171 : static void
2172 4590 : print_curr_insn_alt (int alt_number)
2173 : {
2174 15917 : for (int i = 0; i < curr_static_id->n_operands; i++)
2175 : {
2176 11327 : const char *p = (curr_static_id->operand_alternative
2177 11327 : [alt_number * curr_static_id->n_operands + i].constraint);
2178 11327 : if (*p == '\0')
2179 220 : continue;
2180 11107 : fprintf (lra_dump_file, " (%d) ", i);
2181 39435 : for (; *p != '\0' && *p != ',' && *p != '#'; p++)
2182 17221 : fputc (*p, lra_dump_file);
2183 : }
2184 4590 : }
2185 :
2186 : /* Major function to choose the current insn alternative and what
2187 : operands should be reloaded and how. If ONLY_ALTERNATIVE is not
2188 : negative we should consider only this alternative. Return false if
2189 : we cannot choose the alternative or find how to reload the
2190 : operands. */
2191 : static bool
2192 89962036 : process_alt_operands (int only_alternative)
2193 : {
2194 89962036 : bool ok_p = false;
2195 89962036 : int nop, overall, nalt;
2196 89962036 : int n_alternatives = curr_static_id->n_alternatives;
2197 89962036 : int n_operands = curr_static_id->n_operands;
2198 : /* LOSERS counts the operands that don't fit this alternative and
2199 : would require loading. */
2200 89962036 : int losers;
2201 89962036 : int addr_losers;
2202 : /* REJECT is a count of how undesirable this alternative says it is
2203 : if any reloading is required. If the alternative matches exactly
2204 : then REJECT is ignored, but otherwise it gets this much counted
2205 : against it in addition to the reloading needed. */
2206 89962036 : int reject;
2207 : /* This is defined by '!' or '?' alternative constraint and added to
2208 : reject. But in some cases it can be ignored. */
2209 89962036 : int static_reject;
2210 89962036 : int op_reject;
2211 : /* The number of elements in the following array. */
2212 89962036 : int early_clobbered_regs_num;
2213 : /* Numbers of operands which are early clobber registers. */
2214 89962036 : int early_clobbered_nops[MAX_RECOG_OPERANDS];
2215 89962036 : enum reg_class curr_alt[MAX_RECOG_OPERANDS];
2216 89962036 : enum reg_class all_this_alternative;
2217 89962036 : int all_used_nregs, all_reload_nregs;
2218 89962036 : HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
2219 89962036 : HARD_REG_SET curr_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
2220 89962036 : bool curr_alt_match_win[MAX_RECOG_OPERANDS];
2221 89962036 : bool curr_alt_win[MAX_RECOG_OPERANDS];
2222 89962036 : bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
2223 89962036 : int curr_alt_matches[MAX_RECOG_OPERANDS];
2224 : /* The number of elements in the following array. */
2225 89962036 : int curr_alt_dont_inherit_ops_num;
2226 : /* Numbers of operands whose reload pseudos should not be inherited. */
2227 89962036 : int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
2228 89962036 : bool curr_reuse_alt_p;
2229 : /* True if output stack pointer reload should be generated for the current
2230 : alternative. */
2231 89962036 : bool curr_alt_out_sp_reload_p;
2232 89962036 : bool curr_alt_class_change_p;
2233 89962036 : rtx op;
2234 : /* The register when the operand is a subreg of register, otherwise the
2235 : operand itself. */
2236 89962036 : rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
2237 : /* The register if the operand is a register or subreg of register,
2238 : otherwise NULL. */
2239 89962036 : rtx operand_reg[MAX_RECOG_OPERANDS];
2240 89962036 : int hard_regno[MAX_RECOG_OPERANDS];
2241 89962036 : machine_mode biggest_mode[MAX_RECOG_OPERANDS];
2242 89962036 : int reload_nregs, reload_sum;
2243 89962036 : bool costly_p;
2244 89962036 : enum reg_class cl;
2245 89962036 : const HARD_REG_SET *cl_filter;
2246 89962036 : HARD_REG_SET hard_reg_constraint;
2247 :
2248 : /* Calculate some data common for all alternatives to speed up the
2249 : function. */
2250 299065767 : for (nop = 0; nop < n_operands; nop++)
2251 : {
2252 209103731 : rtx reg;
2253 :
2254 209103731 : op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
2255 : /* The real hard regno of the operand after the allocation. */
2256 209103731 : hard_regno[nop] = get_hard_regno (op);
2257 :
2258 209103731 : operand_reg[nop] = reg = op;
2259 209103731 : biggest_mode[nop] = GET_MODE (op);
2260 209103731 : if (GET_CODE (op) == SUBREG)
2261 : {
2262 4120974 : biggest_mode[nop] = wider_subreg_mode (op);
2263 4120974 : operand_reg[nop] = reg = SUBREG_REG (op);
2264 : }
2265 209103731 : if (! REG_P (reg))
2266 88374251 : operand_reg[nop] = NULL_RTX;
2267 120729480 : else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
2268 141696396 : || ((int) REGNO (reg)
2269 20966916 : == lra_get_elimination_hard_regno (REGNO (reg))))
2270 117792451 : no_subreg_reg_operand[nop] = reg;
2271 : else
2272 2937029 : operand_reg[nop] = no_subreg_reg_operand[nop]
2273 : /* Just use natural mode for elimination result. It should
2274 : be enough for extra constraints hooks. */
2275 2937029 : = regno_reg_rtx[hard_regno[nop]];
2276 : }
2277 :
2278 : /* The constraints are made of several alternatives. Each operand's
2279 : constraint looks like foo,bar,... with commas separating the
2280 : alternatives. The first alternatives for all operands go
2281 : together, the second alternatives go together, etc.
2282 :
2283 : First loop over alternatives. */
2284 89962036 : alternative_mask preferred = curr_id->preferred_alternatives;
2285 89962036 : if (only_alternative >= 0)
2286 970741 : preferred &= ALTERNATIVE_BIT (only_alternative);
2287 :
2288 89962036 : bool prefer_memory_p = false;
2289 89962138 : repeat:
2290 369787337 : for (nalt = 0; nalt < n_alternatives; nalt++)
2291 : {
2292 : /* Loop over operands for one constraint alternative. */
2293 356033523 : if (!TEST_BIT (preferred, nalt))
2294 98463398 : continue;
2295 :
2296 257570125 : if (lra_dump_file != NULL)
2297 : {
2298 3403 : fprintf (lra_dump_file, " Considering alt=%d of insn %d: ",
2299 3403 : nalt, INSN_UID (curr_insn));
2300 3403 : print_curr_insn_alt (nalt);
2301 3403 : fprintf (lra_dump_file, "\n");
2302 : }
2303 :
2304 257570125 : bool matching_early_clobber[MAX_RECOG_OPERANDS];
2305 257570125 : curr_small_class_check++;
2306 257570125 : overall = losers = addr_losers = 0;
2307 257570125 : static_reject = reject = reload_nregs = reload_sum = 0;
2308 854178488 : for (nop = 0; nop < n_operands; nop++)
2309 : {
2310 596608363 : int inc = (curr_static_id
2311 596608363 : ->operand_alternative[nalt * n_operands + nop].reject);
2312 596608363 : if (lra_dump_file != NULL && inc != 0)
2313 53 : fprintf (lra_dump_file,
2314 : " Staticly defined alt reject+=%d\n", inc);
2315 596608363 : static_reject += inc;
2316 596608363 : matching_early_clobber[nop] = 0;
2317 : }
2318 : reject += static_reject;
2319 : early_clobbered_regs_num = 0;
2320 : curr_alt_out_sp_reload_p = false;
2321 : curr_reuse_alt_p = true;
2322 : curr_alt_class_change_p = false;
2323 : all_this_alternative = NO_REGS;
2324 : all_used_nregs = all_reload_nregs = 0;
2325 666983117 : for (nop = 0; nop < n_operands; nop++)
2326 : {
2327 531879422 : const char *p;
2328 531879422 : char *end;
2329 531879422 : int len, c, m, i, opalt_num, this_alternative_matches;
2330 531879422 : bool win, did_match, offmemok, early_clobber_p;
2331 : /* false => this operand can be reloaded somehow for this
2332 : alternative. */
2333 531879422 : bool badop;
2334 : /* true => this operand can be reloaded if the alternative
2335 : allows regs. */
2336 531879422 : bool winreg;
2337 : /* True if a constant forced into memory would be OK for
2338 : this operand. */
2339 531879422 : bool constmemok;
2340 531879422 : enum reg_class this_alternative, this_costly_alternative;
2341 531879422 : HARD_REG_SET this_alternative_set, this_costly_alternative_set;
2342 531879422 : HARD_REG_SET this_alternative_exclude_start_hard_regs;
2343 531879422 : bool this_alternative_match_win, this_alternative_win;
2344 531879422 : bool this_alternative_offmemok;
2345 531879422 : bool scratch_p;
2346 531879422 : machine_mode mode;
2347 531879422 : enum constraint_num cn;
2348 531879422 : bool class_change_p = false;
2349 :
2350 531879422 : opalt_num = nalt * n_operands + nop;
2351 531879422 : if (curr_static_id->operand_alternative[opalt_num].anything_ok)
2352 : {
2353 : /* Fast track for no constraints at all. */
2354 14586643 : curr_alt[nop] = NO_REGS;
2355 14586643 : CLEAR_HARD_REG_SET (curr_alt_set[nop]);
2356 14586643 : curr_alt_win[nop] = true;
2357 14586643 : curr_alt_match_win[nop] = false;
2358 14586643 : curr_alt_offmemok[nop] = false;
2359 14586643 : curr_alt_matches[nop] = -1;
2360 14586643 : continue;
2361 : }
2362 :
2363 517292779 : op = no_subreg_reg_operand[nop];
2364 517292779 : mode = curr_operand_mode[nop];
2365 :
2366 517292779 : win = did_match = winreg = offmemok = constmemok = false;
2367 517292779 : badop = true;
2368 :
2369 517292779 : early_clobber_p = false;
2370 517292779 : p = curr_static_id->operand_alternative[opalt_num].constraint;
2371 :
2372 517292779 : this_costly_alternative = this_alternative = NO_REGS;
2373 : /* We update set of possible hard regs besides its class
2374 : because reg class might be inaccurate. For example,
2375 : union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
2376 : is translated in HI_REGS because classes are merged by
2377 : pairs and there is no accurate intermediate class. */
2378 2069171116 : CLEAR_HARD_REG_SET (this_alternative_set);
2379 1551878337 : CLEAR_HARD_REG_SET (this_costly_alternative_set);
2380 517292779 : CLEAR_HARD_REG_SET (this_alternative_exclude_start_hard_regs);
2381 517292779 : this_alternative_win = false;
2382 517292779 : this_alternative_match_win = false;
2383 517292779 : this_alternative_offmemok = false;
2384 517292779 : this_alternative_matches = -1;
2385 :
2386 : /* An empty constraint should be excluded by the fast
2387 : track. */
2388 517292779 : lra_assert (*p != 0 && *p != ',');
2389 :
2390 : op_reject = 0;
2391 : /* Scan this alternative's specs for this operand; set WIN
2392 : if the operand fits any letter in this alternative.
2393 : Otherwise, clear BADOP if this operand could fit some
2394 : letter after reloads, or set WINREG if this operand could
2395 : fit after reloads provided the constraint allows some
2396 : registers. */
2397 : costly_p = false;
2398 1313413106 : do
2399 : {
2400 1313413106 : switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
2401 : {
2402 : case '\0':
2403 : len = 0;
2404 : break;
2405 494373986 : case ',':
2406 494373986 : c = '\0';
2407 494373986 : break;
2408 :
2409 177154 : case '&':
2410 177154 : early_clobber_p = true;
2411 177154 : break;
2412 :
2413 21138 : case '$':
2414 21138 : op_reject += LRA_MAX_REJECT;
2415 21138 : break;
2416 0 : case '^':
2417 0 : op_reject += LRA_LOSER_COST_FACTOR;
2418 0 : break;
2419 :
2420 0 : case '#':
2421 : /* Ignore rest of this alternative. */
2422 0 : c = '\0';
2423 0 : break;
2424 :
2425 56409715 : case '0': case '1': case '2': case '3': case '4':
2426 56409715 : case '5': case '6': case '7': case '8': case '9':
2427 56409715 : {
2428 56409715 : int m_hregno;
2429 56409715 : bool match_p;
2430 :
2431 56409715 : m = strtoul (p, &end, 10);
2432 56409715 : p = end;
2433 56409715 : len = 0;
2434 56409715 : lra_assert (nop > m);
2435 :
2436 : /* Reject matches if we don't know which operand is
2437 : bigger. This situation would arguably be a bug in
2438 : an .md pattern, but could also occur in a user asm. */
2439 169229145 : if (!ordered_p (GET_MODE_SIZE (biggest_mode[m]),
2440 56409715 : GET_MODE_SIZE (biggest_mode[nop])))
2441 : break;
2442 :
2443 : /* Don't match wrong asm insn operands for proper
2444 : diagnostic later. */
2445 56409715 : if (INSN_CODE (curr_insn) < 0
2446 33072 : && (curr_operand_mode[m] == BLKmode
2447 33071 : || curr_operand_mode[nop] == BLKmode)
2448 1 : && curr_operand_mode[m] != curr_operand_mode[nop])
2449 : break;
2450 :
2451 56409714 : m_hregno = get_hard_regno (*curr_id->operand_loc[m]);
2452 : /* We are supposed to match a previous operand.
2453 : If we do, we win if that one did. If we do
2454 : not, count both of the operands as losers.
2455 : (This is too conservative, since most of the
2456 : time only a single reload insn will be needed
2457 : to make the two operands win. As a result,
2458 : this alternative may be rejected when it is
2459 : actually desirable.) */
2460 56409714 : match_p = false;
2461 56409714 : if (operands_match_p (*curr_id->operand_loc[nop],
2462 56409714 : *curr_id->operand_loc[m], m_hregno))
2463 : {
2464 : /* We should reject matching of an early
2465 : clobber operand if the matching operand is
2466 : not dying in the insn. */
2467 14885122 : if (!TEST_BIT (curr_static_id->operand[m]
2468 : .early_clobber_alts, nalt)
2469 18812 : || operand_reg[nop] == NULL_RTX
2470 14903934 : || (find_regno_note (curr_insn, REG_DEAD,
2471 : REGNO (op))
2472 4648 : || REGNO (op) == REGNO (operand_reg[m])))
2473 14885122 : match_p = true;
2474 : }
2475 14885122 : if (match_p)
2476 : {
2477 : /* If we are matching a non-offsettable
2478 : address where an offsettable address was
2479 : expected, then we must reject this
2480 : combination, because we can't reload
2481 : it. */
2482 14885122 : if (curr_alt_offmemok[m]
2483 1484 : && MEM_P (*curr_id->operand_loc[m])
2484 0 : && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
2485 0 : continue;
2486 : }
2487 : else
2488 : {
2489 : /* If the operands do not match and one
2490 : operand is INOUT, we can not match them.
2491 : Try other possibilities, e.g. other
2492 : alternatives or commutative operand
2493 : exchange. */
2494 41524592 : if (curr_static_id->operand[nop].type == OP_INOUT
2495 41524592 : || curr_static_id->operand[m].type == OP_INOUT)
2496 : break;
2497 : /* Operands don't match. For asm if the operands
2498 : are different user defined explicit hard
2499 : registers, then we cannot make them match
2500 : when one is early clobber operand. */
2501 41524164 : if ((REG_P (*curr_id->operand_loc[nop])
2502 26034143 : || SUBREG_P (*curr_id->operand_loc[nop]))
2503 16086869 : && (REG_P (*curr_id->operand_loc[m])
2504 201623 : || SUBREG_P (*curr_id->operand_loc[m]))
2505 15993968 : && INSN_CODE (curr_insn) < 0)
2506 : {
2507 590 : rtx nop_reg = *curr_id->operand_loc[nop];
2508 590 : if (SUBREG_P (nop_reg))
2509 0 : nop_reg = SUBREG_REG (nop_reg);
2510 590 : rtx m_reg = *curr_id->operand_loc[m];
2511 590 : if (SUBREG_P (m_reg))
2512 0 : m_reg = SUBREG_REG (m_reg);
2513 :
2514 590 : if (REG_P (nop_reg)
2515 590 : && HARD_REGISTER_P (nop_reg)
2516 0 : && REG_USERVAR_P (nop_reg)
2517 0 : && REG_P (m_reg)
2518 0 : && HARD_REGISTER_P (m_reg)
2519 590 : && REG_USERVAR_P (m_reg))
2520 : {
2521 : int i;
2522 :
2523 0 : for (i = 0; i < early_clobbered_regs_num; i++)
2524 0 : if (m == early_clobbered_nops[i])
2525 : break;
2526 0 : if (i < early_clobbered_regs_num
2527 0 : || early_clobber_p)
2528 : break;
2529 : }
2530 : }
2531 : /* Both operands must allow a reload register,
2532 : otherwise we cannot make them match. */
2533 41524164 : if (curr_alt[m] == NO_REGS)
2534 : break;
2535 : /* Retroactively mark the operand we had to
2536 : match as a loser, if it wasn't already and
2537 : it wasn't matched to a register constraint
2538 : (e.g it might be matched by memory). */
2539 41499343 : if (curr_alt_win[m]
2540 40655794 : && (operand_reg[m] == NULL_RTX
2541 40151252 : || hard_regno[m] < 0))
2542 : {
2543 1263913 : if (lra_dump_file != NULL)
2544 9 : fprintf
2545 9 : (lra_dump_file,
2546 : " %d Matched operand reload: "
2547 : "losers++\n", m);
2548 1263913 : losers++;
2549 1263913 : reload_nregs
2550 1263913 : += (ira_reg_class_max_nregs[curr_alt[m]]
2551 1263913 : [GET_MODE (*curr_id->operand_loc[m])]);
2552 : }
2553 :
2554 : /* Prefer matching earlyclobber alternative as
2555 : it results in less hard regs required for
2556 : the insn than a non-matching earlyclobber
2557 : alternative. */
2558 41499343 : if (TEST_BIT (curr_static_id->operand[m]
2559 : .early_clobber_alts, nalt))
2560 : {
2561 18110 : if (lra_dump_file != NULL)
2562 0 : fprintf
2563 0 : (lra_dump_file,
2564 : " %d Matching earlyclobber alt:"
2565 : " reject--\n",
2566 : nop);
2567 18110 : if (!matching_early_clobber[m])
2568 : {
2569 18110 : reject--;
2570 18110 : matching_early_clobber[m] = 1;
2571 : }
2572 : }
2573 : /* Otherwise we prefer no matching
2574 : alternatives because it gives more freedom
2575 : in RA. */
2576 41481233 : else if (operand_reg[nop] == NULL_RTX
2577 41481233 : || (find_regno_note (curr_insn, REG_DEAD,
2578 16061030 : REGNO (operand_reg[nop]))
2579 : == NULL_RTX))
2580 : {
2581 36504921 : if (lra_dump_file != NULL)
2582 912 : fprintf
2583 912 : (lra_dump_file,
2584 : " %d Matching alt: reject+=2\n",
2585 : nop);
2586 36504921 : reject += 2;
2587 : }
2588 : }
2589 : /* If we have to reload this operand and some
2590 : previous operand also had to match the same
2591 : thing as this operand, we don't know how to do
2592 : that. */
2593 56384465 : if (!match_p || !curr_alt_win[m])
2594 : {
2595 87154234 : for (i = 0; i < nop; i++)
2596 45547686 : if (curr_alt_matches[i] == m)
2597 : break;
2598 41606549 : if (i < nop)
2599 : break;
2600 : }
2601 : else
2602 : did_match = true;
2603 :
2604 56384464 : this_alternative_matches = m;
2605 : /* This can be fixed with reloads if the operand
2606 : we are supposed to match can be fixed with
2607 : reloads. */
2608 56384464 : badop = false;
2609 56384464 : this_alternative = curr_alt[m];
2610 56384464 : this_alternative_set = curr_alt_set[m];
2611 56384464 : this_alternative_exclude_start_hard_regs
2612 56384464 : = curr_alt_exclude_start_hard_regs[m];
2613 56384464 : winreg = this_alternative != NO_REGS;
2614 56384464 : break;
2615 : }
2616 :
2617 11731646 : case 'g':
2618 11731646 : if (MEM_P (op)
2619 7851057 : || general_constant_p (op)
2620 16305609 : || spilled_pseudo_p (op))
2621 : win = true;
2622 11731646 : if (REG_P (op) && prefer_memory_p)
2623 : {
2624 11731646 : badop = false;
2625 11731646 : offmemok = true;
2626 : }
2627 11731646 : cl = GENERAL_REGS;
2628 11731646 : cl_filter = nullptr;
2629 11731646 : goto reg;
2630 :
2631 1140 : case '{':
2632 1140 : {
2633 1140 : int regno = decode_hard_reg_constraint (p);
2634 1140 : gcc_assert (regno >= 0);
2635 1140 : cl = NO_REGS;
2636 1140 : int nregs = hard_regno_nregs (regno, mode);
2637 2280 : for (int i = 0; i < nregs; ++i)
2638 1140 : cl = reg_class_superunion[cl][REGNO_REG_CLASS (regno + i)];
2639 1140 : CLEAR_HARD_REG_SET (hard_reg_constraint);
2640 1140 : SET_HARD_REG_BIT (hard_reg_constraint, regno);
2641 1140 : cl_filter = &hard_reg_constraint;
2642 1140 : goto reg;
2643 : }
2644 :
2645 727779534 : default:
2646 727779534 : cn = lookup_constraint (p);
2647 727779534 : switch (get_constraint_type (cn))
2648 : {
2649 481565813 : case CT_REGISTER:
2650 481565813 : cl = reg_class_for_constraint (cn);
2651 354435510 : if (cl != NO_REGS)
2652 : {
2653 344983547 : cl_filter = get_register_filter (cn);
2654 344983547 : goto reg;
2655 : }
2656 : break;
2657 :
2658 2082824 : case CT_CONST_INT:
2659 2082824 : if (CONST_INT_P (op)
2660 2082824 : && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2661 : win = true;
2662 : break;
2663 :
2664 110947746 : case CT_MEMORY:
2665 110947746 : case CT_RELAXED_MEMORY:
2666 110947746 : if (MEM_P (op)
2667 110947746 : && satisfies_memory_constraint_p (op, cn))
2668 : win = true;
2669 75325637 : else if (spilled_pseudo_p (op))
2670 45106811 : win = true;
2671 :
2672 : /* If we didn't already win, we can reload constants
2673 : via force_const_mem or put the pseudo value into
2674 : memory, or make other memory by reloading the
2675 : address like for 'o'. */
2676 116069008 : if (CONST_POOL_OK_P (mode, op)
2677 105826326 : || MEM_P (op) || REG_P (op)
2678 : /* We can restore the equiv insn by a
2679 : reload. */
2680 111521087 : || equiv_substition_p[nop])
2681 110912968 : badop = false;
2682 : constmemok = true;
2683 : offmemok = true;
2684 : break;
2685 :
2686 1686769 : case CT_ADDRESS:
2687 : /* An asm operand with an address constraint
2688 : that doesn't satisfy address_operand has
2689 : is_address cleared, so that we don't try to
2690 : make a non-address fit. */
2691 1686769 : if (!curr_static_id->operand[nop].is_address)
2692 : break;
2693 : /* If we didn't already win, we can reload the address
2694 : into a base register. */
2695 1686750 : if (satisfies_address_constraint_p (op, cn))
2696 1686750 : win = true;
2697 1686750 : cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2698 : ADDRESS, SCRATCH);
2699 1686750 : cl_filter = nullptr;
2700 1686750 : badop = false;
2701 1686750 : goto reg;
2702 :
2703 130280318 : case CT_FIXED_FORM:
2704 130280318 : if (constraint_satisfied_p (op, cn))
2705 1313413106 : win = true;
2706 : break;
2707 :
2708 1216064 : case CT_SPECIAL_MEMORY:
2709 1216064 : if (satisfies_memory_constraint_p (op, cn))
2710 : win = true;
2711 1042702 : else if (spilled_pseudo_p (op))
2712 : {
2713 1313413106 : curr_reuse_alt_p = false;
2714 1313413106 : win = true;
2715 : }
2716 : break;
2717 : }
2718 : break;
2719 :
2720 358403083 : reg:
2721 358403083 : if (mode == BLKmode)
2722 : break;
2723 358403065 : this_alternative = reg_class_subunion[this_alternative][cl];
2724 358403065 : if (hard_reg_set_subset_p (this_alternative_set,
2725 358403065 : reg_class_contents[cl]))
2726 358399440 : this_alternative_exclude_start_hard_regs
2727 358399440 : = ira_exclude_class_mode_regs[cl][mode];
2728 3625 : else if (!hard_reg_set_subset_p (reg_class_contents[cl],
2729 : this_alternative_set))
2730 3624 : this_alternative_exclude_start_hard_regs
2731 1075212819 : |= ira_exclude_class_mode_regs[cl][mode];
2732 358403065 : this_alternative_set |= reg_class_contents[cl];
2733 358403065 : if (cl_filter)
2734 2280 : this_alternative_exclude_start_hard_regs |= ~*cl_filter;
2735 358403065 : if (costly_p)
2736 : {
2737 21170024 : this_costly_alternative
2738 21170024 : = reg_class_subunion[this_costly_alternative][cl];
2739 21170024 : this_costly_alternative_set |= reg_class_contents[cl];
2740 : }
2741 358403065 : winreg = true;
2742 358403065 : if (REG_P (op))
2743 : {
2744 228259824 : rtx orig_op = *curr_id->operand_loc[nop];
2745 6579225 : if (GET_CODE (orig_op) == SUBREG && HARD_REGISTER_P (op)
2746 228259914 : && !targetm.hard_regno_mode_ok (REGNO (op),
2747 90 : GET_MODE(orig_op)))
2748 : break;
2749 :
2750 228259824 : tree decl;
2751 :
2752 228259824 : if (hard_regno[nop] >= 0
2753 192425124 : && in_hard_reg_set_p (this_alternative_set,
2754 : mode, hard_regno[nop])
2755 174372223 : && (!cl_filter
2756 590 : || TEST_HARD_REG_BIT (*cl_filter,
2757 : hard_regno[nop]))
2758 402632041 : && ((REG_ATTRS (op) && (decl = REG_EXPR (op)) != NULL
2759 97374623 : && VAR_P (decl) && DECL_HARD_REGISTER (decl))
2760 174368960 : || !(TEST_HARD_REG_BIT
2761 174368960 : (this_alternative_exclude_start_hard_regs,
2762 : hard_regno[nop]))))
2763 : win = true;
2764 53887637 : else if (hard_regno[nop] < 0 && !prefer_memory_p)
2765 : {
2766 35834564 : if (in_class_p (op, this_alternative, NULL))
2767 : win = true;
2768 26950216 : else if (in_class_p (op, this_alternative, NULL, true))
2769 : {
2770 1313413106 : class_change_p = true;
2771 1313413106 : win = true;
2772 : }
2773 : }
2774 : }
2775 : break;
2776 : }
2777 1313413106 : if (c != ' ' && c != '\t')
2778 1313413106 : costly_p = c == '*';
2779 : }
2780 1313413106 : while ((p += len), c);
2781 :
2782 1034585558 : scratch_p = (operand_reg[nop] != NULL_RTX
2783 517292779 : && ira_former_scratch_p (REGNO (operand_reg[nop])));
2784 : /* Record which operands fit this alternative. */
2785 517292779 : if (win)
2786 : {
2787 278665505 : if (early_clobber_p
2788 278522282 : || curr_static_id->operand[nop].type != OP_OUT)
2789 : {
2790 121894227 : if (winreg)
2791 100759363 : all_used_nregs
2792 100759363 : += ira_reg_class_min_nregs[this_alternative][mode];
2793 121894227 : all_this_alternative
2794 121894227 : = (reg_class_subunion
2795 121894227 : [all_this_alternative][this_alternative]);
2796 : }
2797 278665505 : this_alternative_win = true;
2798 278665505 : if (class_change_p)
2799 : {
2800 270958 : curr_alt_class_change_p = true;
2801 270958 : if (lra_dump_file != NULL)
2802 10 : fprintf (lra_dump_file,
2803 : " %d Narrowing class: reject+=3\n",
2804 : nop);
2805 270958 : reject += 3;
2806 : }
2807 278665505 : if (operand_reg[nop] != NULL_RTX)
2808 : {
2809 194430796 : if (hard_regno[nop] >= 0)
2810 : {
2811 174315984 : if (in_hard_reg_set_p (this_costly_alternative_set,
2812 : mode, hard_regno[nop]))
2813 : {
2814 779078 : if (lra_dump_file != NULL)
2815 21 : fprintf (lra_dump_file,
2816 : " %d Costly set: reject++\n",
2817 : nop);
2818 779078 : reject++;
2819 : }
2820 : }
2821 : else
2822 : {
2823 : /* Prefer won reg to spilled pseudo under other
2824 : equal conditions for possibe inheritance. */
2825 20114812 : if (! scratch_p)
2826 : {
2827 20110215 : if (lra_dump_file != NULL)
2828 59 : fprintf
2829 59 : (lra_dump_file,
2830 : " %d Non pseudo reload: reject++\n",
2831 : nop);
2832 20110215 : reject++;
2833 : }
2834 20114812 : if (in_class_p (operand_reg[nop],
2835 : this_costly_alternative, NULL, true))
2836 : {
2837 134016 : if (lra_dump_file != NULL)
2838 0 : fprintf
2839 0 : (lra_dump_file,
2840 : " %d Non pseudo costly reload:"
2841 : " reject++\n",
2842 : nop);
2843 134016 : reject++;
2844 : }
2845 : }
2846 : /* We simulate the behavior of old reload here.
2847 : Although scratches need hard registers and it
2848 : might result in spilling other pseudos, no reload
2849 : insns are generated for the scratches. So it
2850 : might cost something but probably less than old
2851 : reload pass believes. */
2852 194430796 : if (scratch_p)
2853 : {
2854 116909 : if (lra_dump_file != NULL)
2855 6 : fprintf (lra_dump_file,
2856 : " %d Scratch win: reject+=2\n",
2857 : nop);
2858 116909 : reject += 2;
2859 : }
2860 : }
2861 : }
2862 238627274 : else if (did_match)
2863 : this_alternative_match_win = true;
2864 : else
2865 : {
2866 223849358 : if (prefer_memory_p && offmemok)
2867 : {
2868 0 : winreg = false;
2869 0 : this_alternative = NO_REGS;
2870 : }
2871 :
2872 223849358 : int const_to_mem = 0;
2873 223849358 : bool no_regs_p;
2874 :
2875 223849358 : reject += op_reject;
2876 : /* Mark output reload of the stack pointer. */
2877 223849358 : if (op == stack_pointer_rtx
2878 56812 : && curr_static_id->operand[nop].type != OP_IN)
2879 223849358 : curr_alt_out_sp_reload_p = true;
2880 :
2881 : /* If this alternative asks for a specific reg class, see if there
2882 : is at least one allocatable register in that class. */
2883 223849358 : no_regs_p
2884 390310292 : = (this_alternative == NO_REGS
2885 223849358 : || (hard_reg_set_subset_p
2886 332921890 : (reg_class_contents[this_alternative],
2887 : lra_no_alloc_regs)));
2888 :
2889 : /* For asms, verify that the class for this alternative is possible
2890 : for the mode that is specified. */
2891 166460934 : if (!no_regs_p && INSN_CODE (curr_insn) < 0)
2892 : {
2893 : int i;
2894 68994 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2895 68992 : if (targetm.hard_regno_mode_ok (i, mode)
2896 68992 : && in_hard_reg_set_p (reg_class_contents[this_alternative],
2897 : mode, i))
2898 : break;
2899 20166 : if (i == FIRST_PSEUDO_REGISTER)
2900 223849358 : winreg = false;
2901 : }
2902 :
2903 : /* If this operand accepts a register, and if the
2904 : register class has at least one allocatable register,
2905 : then this operand can be reloaded. */
2906 223849358 : if (winreg && !no_regs_p)
2907 : badop = false;
2908 :
2909 57388426 : if (badop)
2910 : {
2911 48019516 : if (lra_dump_file != NULL)
2912 606 : fprintf (lra_dump_file,
2913 : " Bad operand -- refuse\n");
2914 122466430 : goto fail;
2915 : }
2916 :
2917 175829842 : if (this_alternative != NO_REGS)
2918 : {
2919 166460933 : HARD_REG_SET available_regs
2920 166460933 : = (reg_class_contents[this_alternative]
2921 166460933 : & ~((ira_prohibited_class_mode_regs
2922 166460933 : [this_alternative][mode])
2923 166460933 : | lra_no_alloc_regs));
2924 332921866 : if (!hard_reg_set_empty_p (available_regs))
2925 : {
2926 166459498 : if (early_clobber_p
2927 166425567 : || curr_static_id->operand[nop].type != OP_OUT)
2928 : {
2929 87285428 : all_reload_nregs
2930 87285428 : += ira_reg_class_min_nregs[this_alternative][mode];
2931 87285428 : all_this_alternative
2932 87285428 : = (reg_class_subunion
2933 87285428 : [all_this_alternative][this_alternative]);
2934 : }
2935 : }
2936 : else
2937 : {
2938 : /* There are no hard regs holding a value of given
2939 : mode. */
2940 1435 : if (offmemok)
2941 : {
2942 171 : this_alternative = NO_REGS;
2943 171 : if (lra_dump_file != NULL)
2944 0 : fprintf (lra_dump_file,
2945 : " %d Using memory because of"
2946 : " a bad mode: reject+=2\n",
2947 : nop);
2948 171 : reject += 2;
2949 : }
2950 : else
2951 : {
2952 1264 : if (lra_dump_file != NULL)
2953 0 : fprintf (lra_dump_file,
2954 : " Wrong mode -- refuse\n");
2955 1264 : goto fail;
2956 : }
2957 : }
2958 : }
2959 :
2960 : /* If not assigned pseudo has a class which a subset of
2961 : required reg class, it is a less costly alternative
2962 : as the pseudo still can get a hard reg of necessary
2963 : class. */
2964 166459669 : if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
2965 21552676 : && (cl = get_reg_class (REGNO (op))) != NO_REGS
2966 178917664 : && ira_class_subset_p[this_alternative][cl])
2967 : {
2968 1059 : if (lra_dump_file != NULL)
2969 0 : fprintf
2970 0 : (lra_dump_file,
2971 : " %d Super set class reg: reject-=3\n", nop);
2972 1059 : reject -= 3;
2973 : }
2974 :
2975 175828578 : this_alternative_offmemok = offmemok;
2976 175828578 : if (this_costly_alternative != NO_REGS)
2977 : {
2978 19001832 : if (lra_dump_file != NULL)
2979 25 : fprintf (lra_dump_file,
2980 : " %d Costly loser: reject++\n", nop);
2981 19001832 : reject++;
2982 : }
2983 : /* If the operand is dying, has a matching constraint,
2984 : and satisfies constraints of the matched operand
2985 : which failed to satisfy the own constraints, most probably
2986 : the reload for this operand will be gone. */
2987 175828578 : if (this_alternative_matches >= 0
2988 41589198 : && !curr_alt_win[this_alternative_matches]
2989 950279 : && REG_P (op)
2990 694703 : && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
2991 176544530 : && (hard_regno[nop] >= 0
2992 376107 : ? in_hard_reg_set_p (this_alternative_set,
2993 : mode, hard_regno[nop])
2994 36262 : : in_class_p (op, this_alternative, NULL)))
2995 : {
2996 226273 : if (lra_dump_file != NULL)
2997 1 : fprintf
2998 1 : (lra_dump_file,
2999 : " %d Dying matched operand reload: reject++\n",
3000 : nop);
3001 226273 : reject++;
3002 : }
3003 : else
3004 : {
3005 : /* Strict_low_part requires to reload the register
3006 : not the sub-register. In this case we should
3007 : check that a final reload hard reg can hold the
3008 : value mode. */
3009 175602305 : if (curr_static_id->operand[nop].strict_low
3010 113 : && REG_P (op)
3011 106 : && hard_regno[nop] < 0
3012 80 : && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
3013 80 : && ira_class_hard_regs_num[this_alternative] > 0
3014 175602385 : && (!targetm.hard_regno_mode_ok
3015 80 : (ira_class_hard_regs[this_alternative][0],
3016 80 : GET_MODE (*curr_id->operand_loc[nop]))))
3017 : {
3018 0 : if (lra_dump_file != NULL)
3019 0 : fprintf
3020 0 : (lra_dump_file,
3021 : " Strict low subreg reload -- refuse\n");
3022 0 : goto fail;
3023 : }
3024 175602305 : if (lra_dump_file != NULL)
3025 2177 : fprintf
3026 2177 : (lra_dump_file,
3027 : " %d Operand reload: losers++\n", nop);
3028 175602305 : losers++;
3029 : }
3030 175828578 : if (operand_reg[nop] != NULL_RTX
3031 : /* Output operands and matched input operands are
3032 : not inherited. The following conditions do not
3033 : exactly describe the previous statement but they
3034 : are pretty close. */
3035 63077015 : && curr_static_id->operand[nop].type != OP_OUT
3036 27804242 : && (this_alternative_matches < 0
3037 16151728 : || curr_static_id->operand[nop].type != OP_IN))
3038 : {
3039 11652514 : int last_reload = (lra_reg_info[ORIGINAL_REGNO
3040 11652514 : (operand_reg[nop])]
3041 11652514 : .last_reload);
3042 :
3043 : /* The value of reload_sum has sense only if we
3044 : process insns in their order. It happens only on
3045 : the first constraints sub-pass when we do most of
3046 : reload work. */
3047 11652514 : if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
3048 2530792 : reload_sum += last_reload - bb_reload_num;
3049 : }
3050 : /* If this is a constant that is reloaded into the
3051 : desired class by copying it to memory first, count
3052 : that as another reload. This is consistent with
3053 : other code and is required to avoid choosing another
3054 : alternative when the constant is moved into memory.
3055 : Note that the test here is precisely the same as in
3056 : the code below that calls force_const_mem. */
3057 226156141 : if (CONST_POOL_OK_P (mode, op)
3058 226156206 : && ((targetm.preferred_reload_class
3059 50327628 : (op, this_alternative) == NO_REGS)
3060 48774667 : || no_input_reloads_p))
3061 : {
3062 1552961 : const_to_mem = 1;
3063 1552961 : if (! no_regs_p)
3064 : {
3065 707281 : if (lra_dump_file != NULL)
3066 0 : fprintf
3067 0 : (lra_dump_file,
3068 : " %d Constant reload through memory: "
3069 : "losers++\n", nop);
3070 707281 : losers++;
3071 : }
3072 : }
3073 :
3074 : /* Alternative loses if it requires a type of reload not
3075 : permitted for this insn. We can always reload
3076 : objects with a REG_UNUSED note. */
3077 175828578 : if ((curr_static_id->operand[nop].type != OP_IN
3078 85311059 : && no_output_reloads_p
3079 0 : && ! find_reg_note (curr_insn, REG_UNUSED, op)
3080 0 : && ! scratch_p)
3081 175828578 : || (curr_static_id->operand[nop].type != OP_OUT
3082 90517728 : && no_input_reloads_p && ! const_to_mem)
3083 351657156 : || (this_alternative_matches >= 0
3084 41589198 : && (no_input_reloads_p
3085 41589198 : || (no_output_reloads_p
3086 0 : && (curr_static_id->operand
3087 0 : [this_alternative_matches].type != OP_IN)
3088 0 : && ! find_reg_note (curr_insn, REG_UNUSED,
3089 : no_subreg_reg_operand
3090 0 : [this_alternative_matches])
3091 0 : && ! scratch_p))))
3092 : {
3093 0 : if (lra_dump_file != NULL)
3094 0 : fprintf
3095 0 : (lra_dump_file,
3096 : " No input/output reload -- refuse\n");
3097 0 : goto fail;
3098 : }
3099 :
3100 : /* Alternative loses if it required class pseudo cannot
3101 : hold value of required mode. Such insns can be
3102 : described by insn definitions with mode iterators. */
3103 175828578 : if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
3104 126911167 : && ! hard_reg_set_empty_p (this_alternative_set)
3105 : /* It is common practice for constraints to use a
3106 : class which does not have actually enough regs to
3107 : hold the value (e.g. x86 AREG for mode requiring
3108 : more one general reg). Therefore we have 2
3109 : conditions to check that the reload pseudo cannot
3110 : hold the mode value. */
3111 118287463 : && (!targetm.hard_regno_mode_ok
3112 118287463 : (ira_class_hard_regs[this_alternative][0],
3113 : GET_MODE (*curr_id->operand_loc[nop])))
3114 : /* The above condition is not enough as the first
3115 : reg in ira_class_hard_regs can be not aligned for
3116 : multi-words mode values. */
3117 175828578 : && (prohibited_class_reg_set_mode_p
3118 0 : (this_alternative, this_alternative_set,
3119 0 : GET_MODE (*curr_id->operand_loc[nop]))))
3120 : {
3121 0 : if (lra_dump_file != NULL)
3122 0 : fprintf (lra_dump_file,
3123 : " reload pseudo for op %d "
3124 : "cannot hold the mode value -- refuse\n",
3125 : nop);
3126 0 : goto fail;
3127 : }
3128 :
3129 : /* Check strong discouragement of reload of non-constant
3130 : into class THIS_ALTERNATIVE. */
3131 125500950 : if (! CONSTANT_P (op) && ! no_regs_p
3132 292806299 : && (targetm.preferred_reload_class
3133 116977721 : (op, this_alternative) == NO_REGS
3134 108508863 : || (curr_static_id->operand[nop].type == OP_OUT
3135 74759861 : && (targetm.preferred_output_reload_class
3136 74759861 : (op, this_alternative) == NO_REGS))))
3137 : {
3138 13005931 : if (offmemok && REG_P (op))
3139 : {
3140 792383 : if (lra_dump_file != NULL)
3141 0 : fprintf
3142 0 : (lra_dump_file,
3143 : " %d Spill pseudo into memory: reject+=3\n",
3144 : nop);
3145 792383 : reject += 3;
3146 : }
3147 : else
3148 : {
3149 12213548 : if (lra_dump_file != NULL)
3150 0 : fprintf
3151 0 : (lra_dump_file,
3152 : " %d Non-prefered reload: reject+=%d\n",
3153 : nop, LRA_MAX_REJECT);
3154 12213548 : reject += LRA_MAX_REJECT;
3155 : }
3156 : }
3157 :
3158 175828578 : if (! (MEM_P (op) && offmemok)
3159 175828506 : && ! (const_to_mem && constmemok))
3160 : {
3161 : /* We prefer to reload pseudos over reloading other
3162 : things, since such reloads may be able to be
3163 : eliminated later. So bump REJECT in other cases.
3164 : Don't do this in the case where we are forcing a
3165 : constant into memory and it will then win since
3166 : we don't want to have a different alternative
3167 : match then. */
3168 174865594 : if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
3169 : {
3170 124865669 : if (lra_dump_file != NULL)
3171 1648 : fprintf
3172 1648 : (lra_dump_file,
3173 : " %d Non-pseudo reload: reject+=2\n",
3174 : nop);
3175 124865669 : reject += 2;
3176 : }
3177 :
3178 174865594 : if (! no_regs_p)
3179 166342409 : reload_nregs
3180 166342409 : += ira_reg_class_max_nregs[this_alternative][mode];
3181 :
3182 174865594 : if (SMALL_REGISTER_CLASS_P (this_alternative))
3183 : {
3184 884575 : if (lra_dump_file != NULL)
3185 45 : fprintf
3186 45 : (lra_dump_file,
3187 : " %d Small class reload: reject+=%d\n",
3188 : nop, LRA_LOSER_COST_FACTOR / 2);
3189 884575 : reject += LRA_LOSER_COST_FACTOR / 2;
3190 : }
3191 : }
3192 :
3193 : /* We are trying to spill pseudo into memory. It is
3194 : usually more costly than moving to a hard register
3195 : although it might takes the same number of
3196 : reloads.
3197 :
3198 : Non-pseudo spill may happen also. Suppose a target allows both
3199 : register and memory in the operand constraint alternatives,
3200 : then it's typical that an eliminable register has a substition
3201 : of "base + offset" which can either be reloaded by a simple
3202 : "new_reg <= base + offset" which will match the register
3203 : constraint, or a similar reg addition followed by further spill
3204 : to and reload from memory which will match the memory
3205 : constraint, but this memory spill will be much more costly
3206 : usually.
3207 :
3208 : Code below increases the reject for both pseudo and non-pseudo
3209 : spill. */
3210 175828578 : if (no_regs_p
3211 9368909 : && !(MEM_P (op) && offmemok)
3212 9368865 : && !(REG_P (op) && hard_regno[nop] < 0))
3213 : {
3214 8252750 : if (lra_dump_file != NULL)
3215 13 : fprintf
3216 20 : (lra_dump_file,
3217 : " %d Spill %spseudo into memory: reject+=3\n",
3218 : nop, REG_P (op) ? "" : "Non-");
3219 8252750 : reject += 3;
3220 8252750 : if (VECTOR_MODE_P (mode))
3221 : {
3222 : /* Spilling vectors into memory is usually more
3223 : costly as they contain big values. */
3224 366508 : if (lra_dump_file != NULL)
3225 0 : fprintf
3226 0 : (lra_dump_file,
3227 : " %d Spill vector pseudo: reject+=2\n",
3228 : nop);
3229 366508 : reject += 2;
3230 : }
3231 : }
3232 :
3233 : /* When we use an operand requiring memory in given
3234 : alternative, the insn should write *and* read the
3235 : value to/from memory it is costly in comparison with
3236 : an insn alternative which does not use memory
3237 : (e.g. register or immediate operand). We exclude
3238 : memory operand for such case as we can satisfy the
3239 : memory constraints by reloading address. */
3240 9368909 : if (no_regs_p && offmemok && !MEM_P (op))
3241 : {
3242 9368713 : if (lra_dump_file != NULL)
3243 27 : fprintf
3244 27 : (lra_dump_file,
3245 : " Using memory insn operand %d: reject+=3\n",
3246 : nop);
3247 9368713 : reject += 3;
3248 : }
3249 :
3250 : /* If reload requires moving value through secondary
3251 : memory, it will need one more insn at least. */
3252 175828578 : if (this_alternative != NO_REGS
3253 166459498 : && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
3254 211924267 : && ((curr_static_id->operand[nop].type != OP_OUT
3255 20477830 : && targetm.secondary_memory_needed (mode, cl,
3256 : this_alternative))
3257 32844337 : || (curr_static_id->operand[nop].type != OP_IN
3258 15617976 : && (targetm.secondary_memory_needed
3259 15617976 : (mode, this_alternative, cl)))))
3260 : {
3261 10805531 : if (lra_dump_file != NULL)
3262 16 : fprintf
3263 16 : (lra_dump_file,
3264 : " %d Secondary memory reload needed: "
3265 : "losers++\n", nop);
3266 10805531 : losers++;
3267 : }
3268 :
3269 175828578 : if (MEM_P (op) && offmemok)
3270 72 : addr_losers++;
3271 : else
3272 : {
3273 : /* Input reloads can be inherited more often than
3274 : output reloads can be removed, so penalize output
3275 : reloads. */
3276 175828506 : if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
3277 : {
3278 148024466 : if (lra_dump_file != NULL)
3279 1722 : fprintf
3280 1722 : (lra_dump_file,
3281 : " %d Non input pseudo reload: reject++\n",
3282 : nop);
3283 148024466 : reject++;
3284 : }
3285 :
3286 175828506 : if (curr_static_id->operand[nop].type == OP_INOUT)
3287 : {
3288 209 : if (lra_dump_file != NULL)
3289 0 : fprintf
3290 0 : (lra_dump_file,
3291 : " %d Input/Output reload: reject+=%d\n",
3292 : nop, LRA_LOSER_COST_FACTOR);
3293 209 : reject += LRA_LOSER_COST_FACTOR;
3294 : }
3295 : }
3296 : }
3297 :
3298 469271999 : if (early_clobber_p && ! scratch_p)
3299 : {
3300 165870 : if (lra_dump_file != NULL)
3301 4 : fprintf (lra_dump_file,
3302 : " %d Early clobber: reject++\n", nop);
3303 165870 : reject++;
3304 : }
3305 : /* ??? We check early clobbers after processing all operands
3306 : (see loop below) and there we update the costs more.
3307 : Should we update the cost (may be approximately) here
3308 : because of early clobber register reloads or it is a rare
3309 : or non-important thing to be worth to do it. */
3310 938543998 : overall = (losers * LRA_LOSER_COST_FACTOR + reject
3311 469271999 : - (addr_losers == losers ? static_reject : 0));
3312 469271999 : if ((best_losers == 0 || losers != 0) && best_overall < overall)
3313 : {
3314 74445650 : if (lra_dump_file != NULL)
3315 1036 : fprintf (lra_dump_file,
3316 : " overall=%d,losers=%d -- refuse\n",
3317 : overall, losers);
3318 74445650 : goto fail;
3319 : }
3320 :
3321 394826349 : if (update_and_check_small_class_inputs (nop, nalt,
3322 : this_alternative))
3323 : {
3324 0 : if (lra_dump_file != NULL)
3325 0 : fprintf (lra_dump_file,
3326 : " not enough small class regs -- refuse\n");
3327 0 : goto fail;
3328 : }
3329 394826349 : curr_alt[nop] = this_alternative;
3330 394826349 : curr_alt_set[nop] = this_alternative_set;
3331 394826349 : curr_alt_exclude_start_hard_regs[nop]
3332 394826349 : = this_alternative_exclude_start_hard_regs;
3333 394826349 : curr_alt_win[nop] = this_alternative_win;
3334 394826349 : curr_alt_match_win[nop] = this_alternative_match_win;
3335 394826349 : curr_alt_offmemok[nop] = this_alternative_offmemok;
3336 394826349 : curr_alt_matches[nop] = this_alternative_matches;
3337 :
3338 394826349 : if (this_alternative_matches >= 0
3339 394826349 : && !did_match && !this_alternative_win)
3340 13233895 : curr_alt_win[this_alternative_matches] = false;
3341 :
3342 394826349 : if (early_clobber_p && operand_reg[nop] != NULL_RTX)
3343 170096 : early_clobbered_nops[early_clobbered_regs_num++] = nop;
3344 : }
3345 :
3346 135103695 : if (curr_insn_set != NULL_RTX
3347 : /* Allow just two operands or three operands where the third
3348 : is a clobber. */
3349 131250927 : && (n_operands == 2
3350 28771257 : || (n_operands == 3
3351 26646933 : && GET_CODE (PATTERN (curr_insn)) == PARALLEL
3352 22614549 : && XVECLEN (PATTERN (curr_insn), 0) == 2
3353 22560976 : && GET_CODE (XVECEXP (PATTERN (curr_insn), 0, 1))
3354 : == CLOBBER))
3355 : /* Prevent processing non-move insns. */
3356 124957152 : && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
3357 123162445 : || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
3358 226559432 : && ((! curr_alt_win[0] && ! curr_alt_win[1]
3359 6035120 : && REG_P (no_subreg_reg_operand[0])
3360 2951769 : && REG_P (no_subreg_reg_operand[1])
3361 1220011 : && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
3362 1013958 : || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
3363 90929789 : || (! curr_alt_win[0] && curr_alt_win[1]
3364 27209989 : && REG_P (no_subreg_reg_operand[1])
3365 : /* Check that we reload memory not the memory
3366 : address. */
3367 15613918 : && ! (curr_alt_offmemok[0]
3368 386717 : && MEM_P (no_subreg_reg_operand[0]))
3369 15613918 : && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
3370 76708673 : || (curr_alt_win[0] && ! curr_alt_win[1]
3371 9514225 : && REG_P (no_subreg_reg_operand[0])
3372 : /* Check that we reload memory not the memory
3373 : address. */
3374 7047478 : && ! (curr_alt_offmemok[1]
3375 1018673 : && MEM_P (no_subreg_reg_operand[1]))
3376 7047476 : && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
3377 6167479 : && (! CONST_POOL_OK_P (curr_operand_mode[1],
3378 : no_subreg_reg_operand[1])
3379 2264519 : || (targetm.preferred_reload_class
3380 2264519 : (no_subreg_reg_operand[1],
3381 : (enum reg_class) curr_alt[1]) != NO_REGS))
3382 : /* If it is a result of recent elimination in move
3383 : insn we can transform it into an add still by
3384 : using this alternative. */
3385 6124245 : && GET_CODE (no_subreg_reg_operand[1]) != PLUS
3386 : /* Likewise if the source has been replaced with an
3387 : equivalent value. This only happens once -- the reload
3388 : will use the equivalent value instead of the register it
3389 : replaces -- so there should be no danger of cycling. */
3390 5645346 : && !equiv_substition_p[1])))
3391 : {
3392 : /* We have a move insn and a new reload insn will be similar
3393 : to the current insn. We should avoid such situation as
3394 : it results in LRA cycling. */
3395 20363498 : if (lra_dump_file != NULL)
3396 239 : fprintf (lra_dump_file,
3397 : " Cycle danger: overall += LRA_MAX_REJECT\n");
3398 20363498 : overall += LRA_MAX_REJECT;
3399 : }
3400 135103695 : if (all_this_alternative != NO_REGS
3401 115664344 : && !SMALL_REGISTER_CLASS_P (all_this_alternative)
3402 114794402 : && all_used_nregs != 0 && all_reload_nregs != 0
3403 135103695 : && (all_used_nregs + all_reload_nregs + 1
3404 3999137 : >= ira_class_hard_regs_num[all_this_alternative]))
3405 : {
3406 366 : if (lra_dump_file != NULL)
3407 0 : fprintf
3408 0 : (lra_dump_file,
3409 : " Register starvation: overall += LRA_MAX_REJECT"
3410 : "(class=%s,avail=%d,used=%d,reload=%d)\n",
3411 : reg_class_names[all_this_alternative],
3412 : ira_class_hard_regs_num[all_this_alternative],
3413 : all_used_nregs, all_reload_nregs);
3414 366 : overall += LRA_MAX_REJECT;
3415 366 : if (!prefer_memory_p && INSN_CODE (curr_insn) < 0)
3416 : {
3417 : /* asm can permit memory and reg and can be not enough regs for
3418 : asm -- try now memory: */
3419 102 : prefer_memory_p = true;
3420 102 : if (lra_dump_file != NULL)
3421 0 : fprintf
3422 0 : (lra_dump_file,
3423 : " Trying now memory for operands\n");
3424 102 : goto repeat;
3425 : }
3426 : }
3427 135270205 : ok_p = true;
3428 : curr_alt_dont_inherit_ops_num = 0;
3429 135270205 : for (nop = 0; nop < early_clobbered_regs_num; nop++)
3430 : {
3431 166613 : int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
3432 166613 : HARD_REG_SET temp_set;
3433 :
3434 166613 : i = early_clobbered_nops[nop];
3435 166613 : if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
3436 125251 : || hard_regno[i] < 0)
3437 165942 : continue;
3438 123415 : lra_assert (operand_reg[i] != NULL_RTX);
3439 : clobbered_hard_regno = hard_regno[i];
3440 123415 : CLEAR_HARD_REG_SET (temp_set);
3441 123415 : add_to_hard_reg_set (&temp_set, GET_MODE (*curr_id->operand_loc[i]),
3442 : clobbered_hard_regno);
3443 123415 : first_conflict_j = last_conflict_j = -1;
3444 623057 : for (j = 0; j < n_operands; j++)
3445 499643 : if (j == i
3446 : /* We don't want process insides of match_operator and
3447 : match_parallel because otherwise we would process
3448 : their operands once again generating a wrong
3449 : code. */
3450 376228 : || curr_static_id->operand[j].is_operator)
3451 125556 : continue;
3452 374087 : else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
3453 355653 : || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
3454 18434 : continue;
3455 : /* If we don't reload j-th operand, check conflicts. */
3456 123444 : else if ((curr_alt_win[j] || curr_alt_match_win[j])
3457 417146 : && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
3458 : {
3459 1145 : if (first_conflict_j < 0)
3460 671 : first_conflict_j = j;
3461 1145 : last_conflict_j = j;
3462 : /* Both the earlyclobber operand and conflicting operand
3463 : cannot both be user defined hard registers for asm.
3464 : Let curr_insn_transform diagnose it. */
3465 1145 : if (HARD_REGISTER_P (operand_reg[i])
3466 1 : && REG_USERVAR_P (operand_reg[i])
3467 1 : && operand_reg[j] != NULL_RTX
3468 1 : && HARD_REGISTER_P (operand_reg[j])
3469 1 : && REG_USERVAR_P (operand_reg[j])
3470 1146 : && INSN_CODE (curr_insn) < 0)
3471 1 : return false;
3472 : }
3473 123414 : if (last_conflict_j < 0)
3474 122744 : continue;
3475 :
3476 : /* If an earlyclobber operand conflicts with another non-matching
3477 : operand (ie, they have been assigned the same hard register),
3478 : then it is better to reload the other operand, as there may
3479 : exist yet another operand with a matching constraint associated
3480 : with the earlyclobber operand. However, if one of the operands
3481 : is an explicit use of a hard register, then we must reload the
3482 : other non-hard register operand. */
3483 670 : if (HARD_REGISTER_P (operand_reg[i])
3484 670 : || (first_conflict_j == last_conflict_j
3485 196 : && operand_reg[last_conflict_j] != NULL_RTX
3486 60 : && !curr_alt_match_win[last_conflict_j]
3487 60 : && !HARD_REGISTER_P (operand_reg[last_conflict_j])))
3488 : {
3489 60 : curr_alt_win[last_conflict_j] = false;
3490 60 : curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
3491 60 : = last_conflict_j;
3492 60 : losers++;
3493 60 : if (lra_dump_file != NULL)
3494 0 : fprintf
3495 0 : (lra_dump_file,
3496 : " %d Conflict early clobber reload: losers++\n",
3497 : i);
3498 : }
3499 : else
3500 : {
3501 : /* We need to reload early clobbered register and the
3502 : matched registers. */
3503 3044 : for (j = 0; j < n_operands; j++)
3504 2434 : if (curr_alt_matches[j] == i)
3505 : {
3506 2 : curr_alt_match_win[j] = false;
3507 2 : losers++;
3508 2 : if (lra_dump_file != NULL)
3509 0 : fprintf
3510 0 : (lra_dump_file,
3511 : " %d Matching conflict early clobber "
3512 : "reloads: losers++\n",
3513 : j);
3514 2 : overall += LRA_LOSER_COST_FACTOR;
3515 : }
3516 610 : if (! curr_alt_match_win[i])
3517 610 : curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
3518 : else
3519 : {
3520 : /* Remember pseudos used for match reloads are never
3521 : inherited. */
3522 0 : lra_assert (curr_alt_matches[i] >= 0);
3523 0 : curr_alt_win[curr_alt_matches[i]] = false;
3524 : }
3525 610 : curr_alt_win[i] = curr_alt_match_win[i] = false;
3526 610 : losers++;
3527 610 : if (lra_dump_file != NULL)
3528 0 : fprintf
3529 0 : (lra_dump_file,
3530 : " %d Matched conflict early clobber reloads: "
3531 : "losers++\n",
3532 : i);
3533 : }
3534 : /* Early clobber was already reflected in REJECT. */
3535 670 : if (!matching_early_clobber[i])
3536 : {
3537 670 : lra_assert (reject > 0);
3538 670 : reject--;
3539 670 : matching_early_clobber[i] = 1;
3540 : }
3541 670 : overall += LRA_LOSER_COST_FACTOR - 1;
3542 : }
3543 135103592 : if (lra_dump_file != NULL)
3544 1761 : fprintf (lra_dump_file, " overall=%d,losers=%d,rld_nregs=%d\n",
3545 : overall, losers, reload_nregs);
3546 :
3547 : /* If this alternative can be made to work by reloading, and it
3548 : needs less reloading than the others checked so far, record
3549 : it as the chosen goal for reloading. */
3550 135103592 : if ((best_losers != 0 && losers == 0)
3551 59884606 : || (((best_losers == 0 && losers == 0)
3552 58882619 : || (best_losers != 0 && losers != 0))
3553 59884606 : && (best_overall > overall
3554 15560268 : || (best_overall == overall
3555 : /* If the cost of the reloads is the same,
3556 : prefer alternative which requires minimal
3557 : number of reload regs. */
3558 11590999 : && (reload_nregs < best_reload_nregs
3559 11488254 : || (reload_nregs == best_reload_nregs
3560 11445122 : && (best_reload_sum < reload_sum
3561 11424612 : || (best_reload_sum == reload_sum
3562 11400722 : && nalt < goal_alt_number))))))))
3563 : {
3564 389349233 : for (nop = 0; nop < n_operands; nop++)
3565 : {
3566 269435056 : goal_alt_win[nop] = curr_alt_win[nop];
3567 269435056 : goal_alt_match_win[nop] = curr_alt_match_win[nop];
3568 269435056 : goal_alt_matches[nop] = curr_alt_matches[nop];
3569 269435056 : goal_alt[nop] = curr_alt[nop];
3570 269435056 : goal_alt_exclude_start_hard_regs[nop]
3571 269435056 : = curr_alt_exclude_start_hard_regs[nop];
3572 269435056 : goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
3573 : }
3574 119914177 : goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
3575 119914177 : goal_reuse_alt_p = curr_reuse_alt_p;
3576 119914834 : for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
3577 657 : goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
3578 119914177 : goal_alt_swapped = curr_swapped;
3579 119914177 : goal_alt_out_sp_reload_p = curr_alt_out_sp_reload_p;
3580 119914177 : best_overall = overall;
3581 119914177 : best_losers = losers;
3582 119914177 : best_reload_nregs = reload_nregs;
3583 119914177 : best_reload_sum = reload_sum;
3584 119914177 : goal_alt_number = nalt;
3585 : }
3586 135103592 : if (losers == 0 && !curr_alt_class_change_p)
3587 : /* Everything is satisfied. Do not process alternatives
3588 : anymore. */
3589 : break;
3590 58895371 : fail:
3591 181361801 : ;
3592 : }
3593 : return ok_p;
3594 : }
3595 :
3596 : /* Make reload base reg from address AD. */
3597 : static rtx
3598 0 : base_to_reg (struct address_info *ad)
3599 : {
3600 0 : enum reg_class cl;
3601 0 : int code = -1;
3602 0 : rtx new_inner = NULL_RTX;
3603 0 : rtx new_reg = NULL_RTX;
3604 0 : rtx_insn *insn;
3605 0 : rtx_insn *last_insn = get_last_insn();
3606 :
3607 0 : lra_assert (ad->disp == ad->disp_term);
3608 0 : cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3609 : get_index_code (ad));
3610 0 : new_reg = lra_create_new_reg (GET_MODE (*ad->base), NULL_RTX, cl, NULL,
3611 : "base");
3612 0 : new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
3613 0 : ad->disp_term == NULL
3614 : ? const0_rtx
3615 : : *ad->disp_term);
3616 0 : if (!valid_address_p (ad->mode, new_inner, ad->as))
3617 : return NULL_RTX;
3618 0 : insn = emit_insn (gen_rtx_SET (new_reg, *ad->base));
3619 0 : code = recog_memoized (insn);
3620 0 : if (code < 0)
3621 : {
3622 0 : delete_insns_since (last_insn);
3623 0 : return NULL_RTX;
3624 : }
3625 :
3626 : return new_inner;
3627 : }
3628 :
3629 : /* Make reload base reg + DISP from address AD. Return the new pseudo. */
3630 : static rtx
3631 39 : base_plus_disp_to_reg (struct address_info *ad, rtx disp)
3632 : {
3633 39 : enum reg_class cl;
3634 39 : rtx new_reg;
3635 :
3636 39 : lra_assert (ad->base == ad->base_term);
3637 39 : cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3638 : get_index_code (ad));
3639 39 : new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX, cl, NULL,
3640 : "base + disp");
3641 39 : lra_emit_add (new_reg, *ad->base_term, disp);
3642 39 : return new_reg;
3643 : }
3644 :
3645 : /* Make reload of index part of address AD. Return the new
3646 : pseudo. */
3647 : static rtx
3648 0 : index_part_to_reg (struct address_info *ad, enum reg_class index_class)
3649 : {
3650 0 : rtx new_reg;
3651 :
3652 0 : new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
3653 : index_class, NULL, "index term");
3654 0 : expand_mult (GET_MODE (*ad->index), *ad->index_term,
3655 : GEN_INT (get_index_scale (ad)), new_reg, 1);
3656 0 : return new_reg;
3657 : }
3658 :
3659 : /* Return true if we can add a displacement to address AD, even if that
3660 : makes the address invalid. The fix-up code requires any new address
3661 : to be the sum of the BASE_TERM, INDEX and DISP_TERM fields. */
3662 : static bool
3663 19103 : can_add_disp_p (struct address_info *ad)
3664 : {
3665 19103 : return (!ad->autoinc_p
3666 19103 : && ad->segment == NULL
3667 19103 : && ad->base == ad->base_term
3668 38206 : && ad->disp == ad->disp_term);
3669 : }
3670 :
3671 : /* Make equiv substitution in address AD. Return true if a substitution
3672 : was made. */
3673 : static bool
3674 39491463 : equiv_address_substitution (struct address_info *ad)
3675 : {
3676 39491463 : rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
3677 39491463 : poly_int64 disp;
3678 39491463 : HOST_WIDE_INT scale;
3679 39491463 : bool change_p;
3680 :
3681 39491463 : base_term = strip_subreg (ad->base_term);
3682 10023 : if (base_term == NULL)
3683 : base_reg = new_base_reg = NULL_RTX;
3684 : else
3685 : {
3686 33253939 : base_reg = *base_term;
3687 33253939 : new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
3688 : }
3689 39491463 : index_term = strip_subreg (ad->index_term);
3690 5160 : if (index_term == NULL)
3691 : index_reg = new_index_reg = NULL_RTX;
3692 : else
3693 : {
3694 1892697 : index_reg = *index_term;
3695 1892697 : new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
3696 : }
3697 39491463 : if (base_reg == new_base_reg && index_reg == new_index_reg)
3698 : return false;
3699 136982 : disp = 0;
3700 136982 : change_p = false;
3701 136982 : if (lra_dump_file != NULL)
3702 : {
3703 0 : fprintf (lra_dump_file, "Changing address in insn %d ",
3704 0 : INSN_UID (curr_insn));
3705 0 : dump_value_slim (lra_dump_file, *ad->outer, 1);
3706 : }
3707 136982 : if (base_reg != new_base_reg)
3708 : {
3709 136499 : poly_int64 offset;
3710 136499 : if (REG_P (new_base_reg))
3711 : {
3712 7428 : *base_term = new_base_reg;
3713 7428 : change_p = true;
3714 : }
3715 129071 : else if (GET_CODE (new_base_reg) == PLUS
3716 19103 : && REG_P (XEXP (new_base_reg, 0))
3717 19103 : && poly_int_rtx_p (XEXP (new_base_reg, 1), &offset)
3718 148174 : && can_add_disp_p (ad))
3719 : {
3720 : disp += offset;
3721 19103 : *base_term = XEXP (new_base_reg, 0);
3722 19103 : change_p = true;
3723 : }
3724 136499 : if (ad->base_term2 != NULL)
3725 0 : *ad->base_term2 = *ad->base_term;
3726 : }
3727 136982 : if (index_reg != new_index_reg)
3728 : {
3729 689 : poly_int64 offset;
3730 689 : if (REG_P (new_index_reg))
3731 : {
3732 0 : *index_term = new_index_reg;
3733 0 : change_p = true;
3734 : }
3735 689 : else if (GET_CODE (new_index_reg) == PLUS
3736 0 : && REG_P (XEXP (new_index_reg, 0))
3737 0 : && poly_int_rtx_p (XEXP (new_index_reg, 1), &offset)
3738 0 : && can_add_disp_p (ad)
3739 689 : && (scale = get_index_scale (ad)))
3740 : {
3741 0 : disp += offset * scale;
3742 0 : *index_term = XEXP (new_index_reg, 0);
3743 0 : change_p = true;
3744 : }
3745 : }
3746 136982 : if (maybe_ne (disp, 0))
3747 : {
3748 19103 : if (ad->disp != NULL)
3749 6012 : *ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
3750 : else
3751 : {
3752 13091 : *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
3753 13091 : update_address (ad);
3754 : }
3755 : change_p = true;
3756 : }
3757 136982 : if (lra_dump_file != NULL)
3758 : {
3759 0 : if (! change_p)
3760 0 : fprintf (lra_dump_file, " -- no change\n");
3761 : else
3762 : {
3763 0 : fprintf (lra_dump_file, " on equiv ");
3764 0 : dump_value_slim (lra_dump_file, *ad->outer, 1);
3765 0 : fprintf (lra_dump_file, "\n");
3766 : }
3767 : }
3768 : return change_p;
3769 : }
3770 :
3771 : /* Skip all modifiers and whitespaces in constraint STR and return the
3772 : result. */
3773 : static const char *
3774 512234982 : skip_constraint_modifiers (const char *str)
3775 : {
3776 721746490 : for (;;str++)
3777 616990736 : switch (*str)
3778 : {
3779 104755754 : case '+': case '&' : case '=': case '*': case ' ': case '\t':
3780 104755754 : case '$': case '^' : case '%': case '?': case '!':
3781 104755754 : break;
3782 512234982 : default: return str;
3783 : }
3784 : }
3785 :
3786 : /* Takes a string of 0 or more comma-separated constraints. When more
3787 : than one constraint is present, evaluate whether they all correspond
3788 : to a single, repeated constraint (e.g. "r,r") or whether we have
3789 : more than one distinct constraints (e.g. "r,m"). */
3790 : static bool
3791 162870157 : constraint_unique (const char *cstr)
3792 : {
3793 162870157 : enum constraint_num ca, cb;
3794 162870157 : ca = CONSTRAINT__UNKNOWN;
3795 322046553 : for (;;)
3796 : {
3797 322046553 : cstr = skip_constraint_modifiers (cstr);
3798 322046553 : if (*cstr == '\0' || *cstr == ',')
3799 : cb = CONSTRAINT_X;
3800 : else
3801 : {
3802 322046553 : cb = lookup_constraint (cstr);
3803 322046553 : if (cb == CONSTRAINT__UNKNOWN)
3804 : return false;
3805 309935442 : cstr += CONSTRAINT_LEN (cstr[0], cstr);
3806 : }
3807 : /* Handle the first iteration of the loop. */
3808 309935442 : if (ca == CONSTRAINT__UNKNOWN)
3809 : ca = cb;
3810 : /* Handle the general case of comparing ca with subsequent
3811 : constraints. */
3812 159045020 : else if (ca != cb)
3813 : return false;
3814 166663468 : if (*cstr == '\0')
3815 : return true;
3816 159176396 : if (*cstr == ',')
3817 87775288 : cstr += 1;
3818 : }
3819 : }
3820 :
3821 : /* Major function to make reloads for an address in operand NOP or
3822 : check its correctness (If CHECK_ONLY_P is true). The supported
3823 : cases are:
3824 :
3825 : 1) an address that existed before LRA started, at which point it
3826 : must have been valid. These addresses are subject to elimination
3827 : and may have become invalid due to the elimination offset being out
3828 : of range.
3829 :
3830 : 2) an address created by forcing a constant to memory
3831 : (force_const_to_mem). The initial form of these addresses might
3832 : not be valid, and it is this function's job to make them valid.
3833 :
3834 : 3) a frame address formed from a register and a (possibly zero)
3835 : constant offset. As above, these addresses might not be valid and
3836 : this function must make them so.
3837 :
3838 : Add reloads to the lists *BEFORE and *AFTER. We might need to add
3839 : reloads to *AFTER because of inc/dec, {pre, post} modify in the
3840 : address. Return true for any RTL change.
3841 :
3842 : The function is a helper function which does not produce all
3843 : transformations (when CHECK_ONLY_P is false) which can be
3844 : necessary. It does just basic steps. To do all necessary
3845 : transformations use function process_address. */
3846 : static bool
3847 176439694 : process_address_1 (int nop, bool check_only_p,
3848 : rtx_insn **before, rtx_insn **after)
3849 : {
3850 176439694 : struct address_info ad;
3851 176439694 : rtx new_reg;
3852 176439694 : HOST_WIDE_INT scale;
3853 176439694 : rtx op = *curr_id->operand_loc[nop];
3854 176439694 : rtx mem = extract_mem_from_operand (op);
3855 176439694 : const char *constraint;
3856 176439694 : enum constraint_num cn;
3857 176439694 : bool change_p = false;
3858 :
3859 176439694 : if (MEM_P (mem)
3860 37747820 : && GET_MODE (mem) == BLKmode
3861 25539 : && GET_CODE (XEXP (mem, 0)) == SCRATCH)
3862 : return false;
3863 :
3864 176439694 : constraint
3865 176439694 : = skip_constraint_modifiers (curr_static_id->operand[nop].constraint);
3866 176439694 : if (IN_RANGE (constraint[0], '0', '9'))
3867 : {
3868 13748735 : char *end;
3869 13748735 : unsigned long dup = strtoul (constraint, &end, 10);
3870 13748735 : constraint
3871 13748735 : = skip_constraint_modifiers (curr_static_id->operand[dup].constraint);
3872 : }
3873 188258448 : cn = lookup_constraint (*constraint == '\0' ? "X" : constraint);
3874 : /* If we have several alternatives or/and several constraints in an
3875 : alternative and we can not say at this stage what constraint will be used,
3876 : use unknown constraint. The exception is an address constraint. If
3877 : operand has one address constraint, probably all others constraints are
3878 : address ones. */
3879 164620940 : if (constraint[0] != '\0' && get_constraint_type (cn) != CT_ADDRESS
3880 339309851 : && !constraint_unique (constraint))
3881 : cn = CONSTRAINT__UNKNOWN;
3882 21056609 : if (insn_extra_address_constraint (cn)
3883 : /* When we find an asm operand with an address constraint that
3884 : doesn't satisfy address_operand to begin with, we clear
3885 : is_address, so that we don't try to make a non-address fit.
3886 : If the asm statement got this far, it's because other
3887 : constraints are available, and we'll use them, disregarding
3888 : the unsatisfiable address ones. */
3889 21056609 : && curr_static_id->operand[nop].is_address)
3890 1750764 : decompose_lea_address (&ad, curr_id->operand_loc[nop]);
3891 : /* Do not attempt to decompose arbitrary addresses generated by combine
3892 : for asm operands with loose constraints, e.g 'X'.
3893 : Need to extract memory from op for special memory constraint,
3894 : i.e. bcst_mem_operand in i386 backend. */
3895 174688930 : else if (MEM_P (mem)
3896 174689128 : && !(INSN_CODE (curr_insn) < 0
3897 19399 : && get_constraint_type (cn) == CT_FIXED_FORM
3898 198 : && constraint_satisfied_p (op, cn)))
3899 37747622 : decompose_mem_address (&ad, mem);
3900 136941308 : else if (GET_CODE (op) == SUBREG
3901 3588756 : && MEM_P (SUBREG_REG (op)))
3902 0 : decompose_mem_address (&ad, SUBREG_REG (op));
3903 : else
3904 : return false;
3905 : /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
3906 : index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
3907 : when INDEX_REG_CLASS is a single register class. */
3908 39498386 : enum reg_class index_cl = index_reg_class (curr_insn);
3909 39498386 : if (ad.base_term != NULL
3910 33260794 : && ad.index_term != NULL
3911 1521025 : && ira_class_hard_regs_num[index_cl] == 1
3912 0 : && REG_P (*ad.base_term)
3913 0 : && REG_P (*ad.index_term)
3914 0 : && in_class_p (*ad.base_term, index_cl, NULL)
3915 39498386 : && ! in_class_p (*ad.index_term, index_cl, NULL))
3916 : {
3917 0 : std::swap (ad.base, ad.index);
3918 0 : std::swap (ad.base_term, ad.index_term);
3919 : }
3920 39498386 : if (! check_only_p)
3921 39491463 : change_p = equiv_address_substitution (&ad);
3922 39498386 : if (ad.base_term != NULL
3923 72759180 : && (process_addr_reg
3924 66521588 : (ad.base_term, check_only_p, before,
3925 33260794 : (ad.autoinc_p
3926 4165494 : && !(REG_P (*ad.base_term)
3927 2082747 : && find_regno_note (curr_insn, REG_DEAD,
3928 : REGNO (*ad.base_term)) != NULL_RTX)
3929 : ? after : NULL),
3930 33260794 : base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3931 : get_index_code (&ad), curr_insn))))
3932 : {
3933 436157 : change_p = true;
3934 436157 : if (ad.base_term2 != NULL)
3935 0 : *ad.base_term2 = *ad.base_term;
3936 : }
3937 39498386 : if (ad.index_term != NULL
3938 39498386 : && process_addr_reg (ad.index_term, check_only_p,
3939 : before, NULL, index_cl))
3940 : change_p = true;
3941 :
3942 : /* Target hooks sometimes don't treat extra-constraint addresses as
3943 : legitimate address_operands, so handle them specially. */
3944 39498386 : if (insn_extra_address_constraint (cn)
3945 39498386 : && satisfies_address_constraint_p (&ad, cn))
3946 : return change_p;
3947 :
3948 37747629 : if (check_only_p)
3949 : return change_p;
3950 :
3951 : /* There are three cases where the shape of *AD.INNER may now be invalid:
3952 :
3953 : 1) the original address was valid, but either elimination or
3954 : equiv_address_substitution was applied and that made
3955 : the address invalid.
3956 :
3957 : 2) the address is an invalid symbolic address created by
3958 : force_const_to_mem.
3959 :
3960 : 3) the address is a frame address with an invalid offset.
3961 :
3962 : 4) the address is a frame address with an invalid base.
3963 :
3964 : All these cases involve a non-autoinc address, so there is no
3965 : point revalidating other types. */
3966 37741317 : if (ad.autoinc_p || valid_address_p (op, &ad, cn))
3967 37740873 : return change_p;
3968 :
3969 : /* Any index existed before LRA started, so we can assume that the
3970 : presence and shape of the index is valid. */
3971 444 : push_to_sequence (*before);
3972 444 : lra_assert (ad.disp == ad.disp_term);
3973 444 : if (ad.base == NULL)
3974 : {
3975 352 : if (ad.index == NULL)
3976 : {
3977 352 : rtx_insn *insn;
3978 352 : rtx_insn *last = get_last_insn ();
3979 352 : int code = -1;
3980 352 : enum reg_class cl = base_reg_class (ad.mode, ad.as,
3981 : SCRATCH, SCRATCH,
3982 : curr_insn);
3983 352 : rtx addr = *ad.inner;
3984 :
3985 695 : new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
3986 352 : if (HAVE_lo_sum)
3987 : {
3988 : /* addr => lo_sum (new_base, addr), case (2) above. */
3989 : insn = emit_insn (gen_rtx_SET
3990 : (new_reg,
3991 : gen_rtx_HIGH (Pmode, copy_rtx (addr))));
3992 : code = recog_memoized (insn);
3993 : if (code >= 0)
3994 : {
3995 : *ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
3996 : if (!valid_address_p (op, &ad, cn))
3997 : {
3998 : /* Try to put lo_sum into register. */
3999 : insn = emit_insn (gen_rtx_SET
4000 : (new_reg,
4001 : gen_rtx_LO_SUM (Pmode, new_reg, addr)));
4002 : code = recog_memoized (insn);
4003 : if (code >= 0)
4004 : {
4005 : *ad.inner = new_reg;
4006 : if (!valid_address_p (op, &ad, cn))
4007 : {
4008 : *ad.inner = addr;
4009 : code = -1;
4010 : }
4011 : }
4012 :
4013 : }
4014 : }
4015 : if (code < 0)
4016 : delete_insns_since (last);
4017 : }
4018 :
4019 352 : if (code < 0)
4020 : {
4021 : /* addr => new_base, case (2) above. */
4022 352 : lra_emit_move (new_reg, addr);
4023 :
4024 704 : for (insn = last == NULL_RTX ? get_insns () : NEXT_INSN (last);
4025 704 : insn != NULL_RTX;
4026 352 : insn = NEXT_INSN (insn))
4027 352 : if (recog_memoized (insn) < 0)
4028 : break;
4029 352 : if (insn != NULL_RTX)
4030 : {
4031 : /* Do nothing if we cannot generate right insns.
4032 : This is analogous to reload pass behavior. */
4033 0 : delete_insns_since (last);
4034 0 : end_sequence ();
4035 0 : return false;
4036 : }
4037 352 : *ad.inner = new_reg;
4038 : }
4039 : }
4040 : else
4041 : {
4042 : /* index * scale + disp => new base + index * scale,
4043 : case (1) above. */
4044 0 : enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
4045 0 : GET_CODE (*ad.index),
4046 : curr_insn);
4047 :
4048 0 : lra_assert (index_cl != NO_REGS);
4049 0 : new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "disp");
4050 0 : lra_emit_move (new_reg, *ad.disp);
4051 0 : *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
4052 0 : new_reg, *ad.index);
4053 : }
4054 : }
4055 92 : else if (ad.index == NULL)
4056 : {
4057 53 : int regno;
4058 53 : enum reg_class cl;
4059 53 : rtx set;
4060 53 : rtx_insn *insns, *last_insn;
4061 :
4062 53 : cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
4063 : get_index_code (&ad), curr_insn);
4064 :
4065 53 : if (REG_P (*ad.base_term)
4066 53 : && ira_class_subset_p[get_reg_class (REGNO (*ad.base_term))][cl])
4067 : /* It seems base reg is already in the base reg class and changing it
4068 : does not make a progress. So reload the whole inner address. */
4069 53 : goto reload_inner_addr;
4070 :
4071 : /* Try to reload base into register only if the base is invalid
4072 : for the address but with valid offset, case (4) above. */
4073 0 : start_sequence ();
4074 0 : new_reg = base_to_reg (&ad);
4075 :
4076 : /* base + disp => new base, cases (1) and (3) above. */
4077 : /* Another option would be to reload the displacement into an
4078 : index register. However, postreload has code to optimize
4079 : address reloads that have the same base and different
4080 : displacements, so reloading into an index register would
4081 : not necessarily be a win. */
4082 0 : if (new_reg == NULL_RTX)
4083 : {
4084 : /* See if the target can split the displacement into a
4085 : legitimate new displacement from a local anchor. */
4086 0 : gcc_assert (ad.disp == ad.disp_term);
4087 0 : poly_int64 orig_offset;
4088 0 : rtx offset1, offset2;
4089 0 : if (poly_int_rtx_p (*ad.disp, &orig_offset)
4090 0 : && targetm.legitimize_address_displacement (&offset1, &offset2,
4091 : orig_offset,
4092 : ad.mode))
4093 : {
4094 0 : new_reg = base_plus_disp_to_reg (&ad, offset1);
4095 0 : new_reg = gen_rtx_PLUS (GET_MODE (new_reg), new_reg, offset2);
4096 : }
4097 : else
4098 0 : new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
4099 : }
4100 0 : insns = get_insns ();
4101 0 : last_insn = get_last_insn ();
4102 : /* If we generated at least two insns, try last insn source as
4103 : an address. If we succeed, we generate one less insn. */
4104 0 : if (REG_P (new_reg)
4105 0 : && last_insn != insns
4106 0 : && (set = single_set (last_insn)) != NULL_RTX
4107 0 : && GET_CODE (SET_SRC (set)) == PLUS
4108 0 : && REG_P (XEXP (SET_SRC (set), 0))
4109 0 : && CONSTANT_P (XEXP (SET_SRC (set), 1)))
4110 : {
4111 0 : *ad.inner = SET_SRC (set);
4112 0 : if (valid_address_p (op, &ad, cn))
4113 : {
4114 0 : *ad.base_term = XEXP (SET_SRC (set), 0);
4115 0 : *ad.disp_term = XEXP (SET_SRC (set), 1);
4116 0 : regno = REGNO (*ad.base_term);
4117 0 : if (regno >= FIRST_PSEUDO_REGISTER
4118 0 : && cl != lra_get_allocno_class (regno))
4119 0 : lra_change_class (regno, cl, " Change to", true);
4120 0 : new_reg = SET_SRC (set);
4121 0 : delete_insns_since (PREV_INSN (last_insn));
4122 : }
4123 : }
4124 0 : end_sequence ();
4125 0 : emit_insn (insns);
4126 0 : *ad.inner = new_reg;
4127 : }
4128 39 : else if (ad.disp_term != NULL)
4129 : {
4130 : /* base + scale * index + disp => new base + scale * index,
4131 : case (1) above. */
4132 39 : gcc_assert (ad.disp == ad.disp_term);
4133 39 : new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
4134 39 : *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
4135 39 : new_reg, *ad.index);
4136 : }
4137 0 : else if ((scale = get_index_scale (&ad)) == 1)
4138 : {
4139 : /* The last transformation to one reg will be made in
4140 : curr_insn_transform function. */
4141 0 : end_sequence ();
4142 0 : return false;
4143 : }
4144 0 : else if (scale != 0)
4145 : {
4146 : /* base + scale * index => base + new_reg,
4147 : case (1) above.
4148 : Index part of address may become invalid. For example, we
4149 : changed pseudo on the equivalent memory and a subreg of the
4150 : pseudo onto the memory of different mode for which the scale is
4151 : prohibitted. */
4152 0 : new_reg = index_part_to_reg (&ad, index_cl);
4153 0 : *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
4154 0 : *ad.base_term, new_reg);
4155 : }
4156 : else
4157 : {
4158 53 : enum reg_class cl;
4159 53 : rtx addr;
4160 0 : reload_inner_addr:
4161 53 : cl = base_reg_class (ad.mode, ad.as, SCRATCH, SCRATCH, curr_insn);
4162 53 : addr = *ad.inner;
4163 53 : new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
4164 : /* addr => new_base. */
4165 53 : lra_emit_move (new_reg, addr);
4166 53 : *ad.inner = new_reg;
4167 : }
4168 444 : *before = end_sequence ();
4169 444 : return true;
4170 : }
4171 :
4172 : /* If CHECK_ONLY_P is false, do address reloads until it is necessary.
4173 : Use process_address_1 as a helper function. Return true for any
4174 : RTL changes.
4175 :
4176 : If CHECK_ONLY_P is true, just check address correctness. Return
4177 : false if the address correct. */
4178 : static bool
4179 175952369 : process_address (int nop, bool check_only_p,
4180 : rtx_insn **before, rtx_insn **after)
4181 : {
4182 175952369 : bool res = false;
4183 : /* Use enough iterations to process all address parts: */
4184 176439694 : for (int i = 0; i < 10; i++)
4185 : {
4186 176439694 : if (!process_address_1 (nop, check_only_p, before, after))
4187 : {
4188 : return res;
4189 : }
4190 : else
4191 : {
4192 487325 : if (check_only_p)
4193 : return true;
4194 487325 : res = true;
4195 : }
4196 : }
4197 0 : fatal_insn ("unable to reload address in ", curr_insn);
4198 : }
4199 :
4200 : /* Override the generic address_reload_context in order to
4201 : control the creation of reload pseudos. */
4202 : class lra_autoinc_reload_context : public address_reload_context
4203 : {
4204 : machine_mode mode;
4205 : enum reg_class rclass;
4206 :
4207 : public:
4208 0 : lra_autoinc_reload_context (machine_mode mode, enum reg_class new_rclass)
4209 0 : : mode (mode), rclass (new_rclass) {}
4210 :
4211 0 : rtx get_reload_reg () const override final
4212 : {
4213 0 : return lra_create_new_reg (mode, NULL_RTX, rclass, NULL, "INC/DEC result");
4214 : }
4215 : };
4216 :
4217 : /* Emit insns to reload VALUE into a new register. VALUE is an
4218 : auto-increment or auto-decrement RTX whose operand is a register or
4219 : memory location; so reloading involves incrementing that location.
4220 :
4221 : INC_AMOUNT is the number to increment or decrement by (always
4222 : positive and ignored for POST_MODIFY/PRE_MODIFY).
4223 :
4224 : Return a pseudo containing the result. */
4225 : static rtx
4226 0 : emit_inc (enum reg_class new_rclass, rtx value, poly_int64 inc_amount)
4227 : {
4228 0 : lra_autoinc_reload_context context (GET_MODE (value), new_rclass);
4229 0 : return context.emit_autoinc (value, inc_amount);
4230 : }
4231 :
4232 : /* Return true if the current move insn does not need processing as we
4233 : already know that it satisfies its constraints. */
4234 : static bool
4235 100899690 : simple_move_p (void)
4236 : {
4237 100899690 : rtx dest, src;
4238 100899690 : enum reg_class dclass, sclass;
4239 :
4240 100899690 : lra_assert (curr_insn_set != NULL_RTX);
4241 100899690 : dest = SET_DEST (curr_insn_set);
4242 100899690 : src = SET_SRC (curr_insn_set);
4243 :
4244 : /* If the instruction has multiple sets we need to process it even if it
4245 : is single_set. This can happen if one or more of the SETs are dead.
4246 : See PR73650. */
4247 100899690 : if (multiple_sets (curr_insn))
4248 : return false;
4249 :
4250 100711492 : return ((dclass = get_op_class (dest)) != NO_REGS
4251 21157292 : && (sclass = get_op_class (src)) != NO_REGS
4252 : /* The backend guarantees that register moves of cost 2
4253 : never need reloads. */
4254 89822649 : && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
4255 : }
4256 :
4257 : /* Swap operands NOP and NOP + 1. */
4258 : static inline void
4259 21540627 : swap_operands (int nop)
4260 : {
4261 21540627 : std::swap (curr_operand_mode[nop], curr_operand_mode[nop + 1]);
4262 21540627 : std::swap (original_subreg_reg_mode[nop], original_subreg_reg_mode[nop + 1]);
4263 21540627 : std::swap (*curr_id->operand_loc[nop], *curr_id->operand_loc[nop + 1]);
4264 21540627 : std::swap (equiv_substition_p[nop], equiv_substition_p[nop + 1]);
4265 : /* Swap the duplicates too. */
4266 21540627 : lra_update_dup (curr_id, nop);
4267 21540627 : lra_update_dup (curr_id, nop + 1);
4268 21540627 : }
4269 :
4270 : /* Return TRUE if X is a (subreg of) reg and there are no hard regs of X class
4271 : which can contain value of MODE. */
4272 34 : static bool invalid_mode_reg_p (enum machine_mode mode, rtx x)
4273 : {
4274 34 : if (SUBREG_P (x))
4275 2 : x = SUBREG_REG (x);
4276 34 : if (! REG_P (x))
4277 : return false;
4278 34 : enum reg_class rclass = get_reg_class (REGNO (x));
4279 34 : return (!hard_reg_set_empty_p (reg_class_contents[rclass])
4280 34 : && hard_reg_set_subset_p
4281 34 : (reg_class_contents[rclass],
4282 34 : ira_prohibited_class_mode_regs[rclass][mode]));
4283 : }
4284 :
4285 : /* Return TRUE if regno is referenced in more than one non-debug insn. */
4286 : static bool
4287 2901250 : multiple_insn_refs_p (int regno)
4288 : {
4289 2901250 : unsigned int uid;
4290 2901250 : bitmap_iterator bi;
4291 2901250 : int nrefs = 0;
4292 6982815 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
4293 : {
4294 6977185 : if (!NONDEBUG_INSN_P (lra_insn_recog_data[uid]->insn))
4295 1180315 : continue;
4296 5796870 : if (nrefs == 1)
4297 : return true;
4298 2901250 : nrefs++;
4299 : }
4300 : return false;
4301 : }
4302 :
4303 : /* Mark insns starting with FIRST as postponed for processing their
4304 : constraints. See comments for lra_postponed_insns. */
4305 : static void
4306 92128 : postpone_insns (rtx_insn *first)
4307 : {
4308 104679 : for (auto insn = first; insn != NULL_RTX; insn = NEXT_INSN (insn))
4309 : {
4310 12551 : bitmap_set_bit (&lra_postponed_insns, INSN_UID (insn));
4311 12551 : if (lra_dump_file != NULL)
4312 : {
4313 7 : fprintf (lra_dump_file, " Postponing constraint processing: ");
4314 7 : dump_insn_slim (lra_dump_file, insn);
4315 : }
4316 : }
4317 92128 : }
4318 :
4319 : /* Main entry point of the constraint code: search the body of the
4320 : current insn to choose the best alternative. It is mimicking insn
4321 : alternative cost calculation model of former reload pass. That is
4322 : because machine descriptions were written to use this model. This
4323 : model can be changed in future. Make commutative operand exchange
4324 : if it is chosen.
4325 :
4326 : if CHECK_ONLY_P is false, do RTL changes to satisfy the
4327 : constraints. Return true if any change happened during function
4328 : call.
4329 :
4330 : If CHECK_ONLY_P is true then don't do any transformation. Just
4331 : check that the insn satisfies all constraints. If the insn does
4332 : not satisfy any constraint, return true. */
4333 : static bool
4334 106260892 : curr_insn_transform (bool check_only_p)
4335 : {
4336 106260892 : int i, j, k;
4337 106260892 : int n_operands;
4338 106260892 : int n_alternatives;
4339 106260892 : int n_outputs;
4340 106260892 : int commutative;
4341 106260892 : signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
4342 106260892 : signed char match_inputs[MAX_RECOG_OPERANDS + 1];
4343 106260892 : signed char outputs[MAX_RECOG_OPERANDS + 1];
4344 106260892 : rtx_insn *before, *after;
4345 106260892 : bool alt_p = false;
4346 : /* Flag that the insn has been changed through a transformation. */
4347 106260892 : bool change_p;
4348 106260892 : bool sec_mem_p;
4349 106260892 : bool use_sec_mem_p;
4350 106260892 : int max_regno_before;
4351 106260892 : int reused_alternative_num;
4352 :
4353 106260892 : curr_insn_set = single_set (curr_insn);
4354 106260892 : if (curr_insn_set != NULL_RTX && simple_move_p ())
4355 : {
4356 : /* We assume that the corresponding insn alternative has no
4357 : earlier clobbers. If it is not the case, don't define move
4358 : cost equal to 2 for the corresponding register classes. */
4359 16258345 : lra_set_used_insn_alternative (curr_insn, LRA_NON_CLOBBERED_ALT);
4360 16258345 : return false;
4361 : }
4362 :
4363 90002547 : no_input_reloads_p = no_output_reloads_p = false;
4364 90002547 : goal_alt_number = -1;
4365 90002547 : change_p = sec_mem_p = false;
4366 :
4367 : /* CALL_INSNs are not allowed to have any output reloads. */
4368 90002547 : if (CALL_P (curr_insn))
4369 5963464 : no_output_reloads_p = true;
4370 :
4371 90002547 : n_operands = curr_static_id->n_operands;
4372 90002547 : n_alternatives = curr_static_id->n_alternatives;
4373 :
4374 : /* Just return "no reloads" if insn has no operands with
4375 : constraints. */
4376 90002547 : if (n_operands == 0 || n_alternatives == 0)
4377 : return false;
4378 :
4379 79504949 : max_regno_before = max_reg_num ();
4380 :
4381 336235209 : for (i = 0; i < n_operands; i++)
4382 : {
4383 177225311 : goal_alt_matched[i][0] = -1;
4384 177225311 : goal_alt_matches[i] = -1;
4385 : }
4386 :
4387 79504949 : commutative = curr_static_id->commutative;
4388 :
4389 : /* Now see what we need for pseudos that didn't get hard regs or got
4390 : the wrong kind of hard reg. For this, we must consider all the
4391 : operands together against the register constraints. */
4392 :
4393 79504949 : best_losers = best_overall = INT_MAX;
4394 79504949 : best_reload_sum = 0;
4395 :
4396 79504949 : curr_swapped = false;
4397 79504949 : goal_alt_swapped = false;
4398 :
4399 79504949 : if (! check_only_p)
4400 : /* Make equivalence substitution and memory subreg elimination
4401 : before address processing because an address legitimacy can
4402 : depend on memory mode. */
4403 256657075 : for (i = 0; i < n_operands; i++)
4404 : {
4405 177172475 : rtx op, subst, old;
4406 177172475 : bool op_change_p = false;
4407 :
4408 177172475 : if (curr_static_id->operand[i].is_operator)
4409 1404575 : continue;
4410 :
4411 175767900 : old = op = *curr_id->operand_loc[i];
4412 175767900 : if (GET_CODE (old) == SUBREG)
4413 3643542 : old = SUBREG_REG (old);
4414 175767900 : subst = get_equiv_with_elimination (old, curr_insn);
4415 175767900 : original_subreg_reg_mode[i] = VOIDmode;
4416 175767900 : equiv_substition_p[i] = false;
4417 :
4418 175767900 : if (subst != old
4419 : /* We don't want to change an out operand by constant or invariant
4420 : which will require additional reloads, e.g. by putting a constant
4421 : into memory. */
4422 1508459 : && (curr_static_id->operand[i].type == OP_IN || MEM_P (subst)
4423 0 : || (GET_CODE (subst) == SUBREG && MEM_P (SUBREG_REG (subst)))))
4424 : {
4425 1508459 : equiv_substition_p[i] = true;
4426 1508459 : rtx new_subst = copy_rtx (subst);
4427 1508459 : if (lra_pointer_equiv_set_in (subst))
4428 767838 : lra_pointer_equiv_set_add (new_subst);
4429 1508459 : subst = new_subst;
4430 1508459 : lra_assert (REG_P (old));
4431 1508459 : if (GET_CODE (op) != SUBREG)
4432 1454120 : *curr_id->operand_loc[i] = subst;
4433 : else
4434 : {
4435 54339 : SUBREG_REG (op) = subst;
4436 54339 : if (GET_MODE (subst) == VOIDmode)
4437 90 : original_subreg_reg_mode[i] = GET_MODE (old);
4438 : }
4439 1508459 : if (lra_dump_file != NULL)
4440 : {
4441 3 : fprintf (lra_dump_file,
4442 : "Changing pseudo %d in operand %i of insn %u on equiv ",
4443 3 : REGNO (old), i, INSN_UID (curr_insn));
4444 3 : dump_value_slim (lra_dump_file, subst, 1);
4445 3 : fprintf (lra_dump_file, "\n");
4446 : }
4447 1508459 : op_change_p = change_p = true;
4448 : }
4449 175767900 : if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
4450 : {
4451 1508986 : change_p = true;
4452 1508986 : lra_update_dup (curr_id, i);
4453 : }
4454 : }
4455 :
4456 : /* We process equivalences before ignoring postponed insns on the current
4457 : constraint sub-pass but before any reload insn generation for the
4458 : postponed insn. */
4459 79484600 : if (! check_only_p
4460 79484600 : && bitmap_bit_p (&lra_postponed_insns, INSN_UID (curr_insn)))
4461 : return true;
4462 :
4463 : /* Reload address registers and displacements. We do it before
4464 : finding an alternative because of memory constraints. */
4465 79498248 : before = after = NULL;
4466 256710157 : for (i = 0; i < n_operands; i++)
4467 177211909 : if (! curr_static_id->operand[i].is_operator
4468 177211909 : && process_address (i, check_only_p, &before, &after))
4469 : {
4470 487322 : if (check_only_p)
4471 : return true;
4472 487322 : change_p = true;
4473 487322 : lra_update_dup (curr_id, i);
4474 : }
4475 :
4476 79498248 : if (change_p)
4477 : /* If we've changed the instruction then any alternative that
4478 : we chose previously may no longer be valid. */
4479 1949312 : lra_set_used_insn_alternative (curr_insn, LRA_UNKNOWN_ALT);
4480 :
4481 79477899 : if (! check_only_p && curr_insn_set != NULL_RTX
4482 155181824 : && check_and_process_move (&change_p, &sec_mem_p))
4483 0 : return change_p;
4484 :
4485 79498248 : try_swapped:
4486 :
4487 89962036 : reused_alternative_num = check_only_p ? LRA_UNKNOWN_ALT : curr_id->used_insn_alternative;
4488 89962036 : if (lra_dump_file != NULL && reused_alternative_num >= 0)
4489 0 : fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
4490 0 : reused_alternative_num, INSN_UID (curr_insn));
4491 :
4492 89962036 : if (process_alt_operands (reused_alternative_num))
4493 81278635 : alt_p = true;
4494 :
4495 89962036 : if (check_only_p)
4496 34700 : return ! alt_p || best_losers != 0;
4497 :
4498 : /* If insn is commutative (it's safe to exchange a certain pair of
4499 : operands) then we need to try each alternative twice, the second
4500 : time matching those two operands as if we had exchanged them. To
4501 : do this, really exchange them in operands.
4502 :
4503 : If we have just tried the alternatives the second time, return
4504 : operands to normal and drop through. */
4505 :
4506 89941687 : if (reused_alternative_num < 0 && commutative >= 0)
4507 : {
4508 20927576 : curr_swapped = !curr_swapped;
4509 20927576 : if (curr_swapped)
4510 : {
4511 10463788 : swap_operands (commutative);
4512 10463788 : goto try_swapped;
4513 : }
4514 : else
4515 10463788 : swap_operands (commutative);
4516 : }
4517 :
4518 79477899 : if (! alt_p && ! sec_mem_p)
4519 : {
4520 : /* No alternative works with reloads?? */
4521 6 : if (INSN_CODE (curr_insn) >= 0)
4522 0 : fatal_insn ("unable to generate reloads for:", curr_insn);
4523 6 : error_for_asm (curr_insn,
4524 : "inconsistent operand constraints in an %<asm%>");
4525 6 : lra_asm_error_p = true;
4526 6 : if (! JUMP_P (curr_insn))
4527 : {
4528 : /* Avoid further trouble with this insn. Don't generate use
4529 : pattern here as we could use the insn SP offset. */
4530 6 : lra_set_insn_deleted (curr_insn);
4531 : }
4532 : else
4533 : {
4534 0 : lra_invalidate_insn_data (curr_insn);
4535 0 : ira_nullify_asm_goto (curr_insn);
4536 0 : lra_update_insn_regno_info (curr_insn);
4537 : }
4538 6 : return true;
4539 : }
4540 :
4541 : /* If the best alternative is with operands 1 and 2 swapped, swap
4542 : them. Update the operand numbers of any reloads already
4543 : pushed. */
4544 :
4545 79477893 : if (goal_alt_swapped)
4546 : {
4547 608543 : if (lra_dump_file != NULL)
4548 18 : fprintf (lra_dump_file, " Commutative operand exchange in insn %u\n",
4549 18 : INSN_UID (curr_insn));
4550 :
4551 : /* Swap the duplicates too. */
4552 608543 : swap_operands (commutative);
4553 608543 : change_p = true;
4554 : }
4555 :
4556 : /* Some targets' TARGET_SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
4557 : too conservatively. So we use the secondary memory only if there
4558 : is no any alternative without reloads. */
4559 79477893 : use_sec_mem_p = false;
4560 79477893 : if (! alt_p)
4561 : use_sec_mem_p = true;
4562 79477893 : else if (sec_mem_p)
4563 : {
4564 15005 : for (i = 0; i < n_operands; i++)
4565 14833 : if (! goal_alt_win[i] && ! goal_alt_match_win[i])
4566 : break;
4567 13331 : use_sec_mem_p = i < n_operands;
4568 : }
4569 :
4570 13331 : if (use_sec_mem_p)
4571 : {
4572 13159 : int in = -1, out = -1;
4573 13159 : rtx new_reg, src, dest, rld;
4574 13159 : machine_mode sec_mode, rld_mode;
4575 :
4576 13159 : lra_assert (curr_insn_set != NULL_RTX && sec_mem_p);
4577 13159 : dest = SET_DEST (curr_insn_set);
4578 13159 : src = SET_SRC (curr_insn_set);
4579 39477 : for (i = 0; i < n_operands; i++)
4580 26318 : if (*curr_id->operand_loc[i] == dest)
4581 : out = i;
4582 13159 : else if (*curr_id->operand_loc[i] == src)
4583 13159 : in = i;
4584 13159 : for (i = 0; i < curr_static_id->n_dups; i++)
4585 0 : if (out < 0 && *curr_id->dup_loc[i] == dest)
4586 0 : out = curr_static_id->dup_num[i];
4587 0 : else if (in < 0 && *curr_id->dup_loc[i] == src)
4588 0 : in = curr_static_id->dup_num[i];
4589 13159 : lra_assert (out >= 0 && in >= 0
4590 : && curr_static_id->operand[out].type == OP_OUT
4591 : && curr_static_id->operand[in].type == OP_IN);
4592 13159 : rld = partial_subreg_p (GET_MODE (src), GET_MODE (dest)) ? src : dest;
4593 13159 : rld_mode = GET_MODE (rld);
4594 13159 : sec_mode = targetm.secondary_memory_needed_mode (rld_mode);
4595 13159 : if (rld_mode != sec_mode
4596 13159 : && (invalid_mode_reg_p (sec_mode, dest)
4597 17 : || invalid_mode_reg_p (sec_mode, src)))
4598 : sec_mode = rld_mode;
4599 13159 : new_reg = lra_create_new_reg (sec_mode, NULL_RTX, NO_REGS, NULL,
4600 : "secondary");
4601 : /* If the mode is changed, it should be wider. */
4602 13159 : lra_assert (!partial_subreg_p (sec_mode, rld_mode));
4603 13159 : if (sec_mode != rld_mode)
4604 : {
4605 : /* If the target says specifically to use another mode for
4606 : secondary memory moves we cannot reuse the original
4607 : insn. */
4608 17 : after = emit_spill_move (false, new_reg, dest);
4609 17 : lra_process_new_insns (curr_insn, NULL, after,
4610 : "Inserting the sec. move");
4611 : /* We may have non null BEFORE here (e.g. after address
4612 : processing. */
4613 17 : push_to_sequence (before);
4614 17 : before = emit_spill_move (true, new_reg, src);
4615 17 : emit_insn (before);
4616 17 : before = end_sequence ();
4617 17 : lra_process_new_insns (curr_insn, before, NULL, "Changing on");
4618 17 : lra_set_insn_deleted (curr_insn);
4619 : }
4620 13142 : else if (dest == rld)
4621 : {
4622 13142 : *curr_id->operand_loc[out] = new_reg;
4623 13142 : lra_update_dup (curr_id, out);
4624 13142 : after = emit_spill_move (false, new_reg, dest);
4625 13142 : lra_process_new_insns (curr_insn, NULL, after,
4626 : "Inserting the sec. move");
4627 : }
4628 : else
4629 : {
4630 0 : *curr_id->operand_loc[in] = new_reg;
4631 0 : lra_update_dup (curr_id, in);
4632 : /* See comments above. */
4633 0 : push_to_sequence (before);
4634 0 : before = emit_spill_move (true, new_reg, src);
4635 0 : emit_insn (before);
4636 0 : before = end_sequence ();
4637 0 : lra_process_new_insns (curr_insn, before, NULL,
4638 : "Inserting the sec. move");
4639 : }
4640 13159 : lra_update_insn_regno_info (curr_insn);
4641 13159 : return true;
4642 : }
4643 :
4644 79464734 : lra_assert (goal_alt_number >= 0);
4645 158834419 : lra_set_used_insn_alternative (curr_insn, goal_reuse_alt_p
4646 : ? goal_alt_number : LRA_UNKNOWN_ALT);
4647 :
4648 79464734 : if (lra_dump_file != NULL)
4649 : {
4650 1187 : const char *p;
4651 :
4652 1187 : fprintf (lra_dump_file, " Choosing alt %d in insn %u:",
4653 1187 : goal_alt_number, INSN_UID (curr_insn));
4654 1187 : print_curr_insn_alt (goal_alt_number);
4655 1187 : if (INSN_CODE (curr_insn) >= 0
4656 1187 : && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
4657 1180 : fprintf (lra_dump_file, " {%s}", p);
4658 1187 : if (maybe_ne (curr_id->sp_offset, 0))
4659 : {
4660 0 : fprintf (lra_dump_file, " (sp_off=");
4661 0 : print_dec (curr_id->sp_offset, lra_dump_file);
4662 0 : fprintf (lra_dump_file, ")");
4663 : }
4664 1187 : fprintf (lra_dump_file, "\n");
4665 : }
4666 :
4667 : /* Right now, for any pair of operands I and J that are required to
4668 : match, with J < I, goal_alt_matches[I] is J. Add I to
4669 : goal_alt_matched[J]. */
4670 :
4671 256597475 : for (i = 0; i < n_operands; i++)
4672 177132741 : if ((j = goal_alt_matches[i]) >= 0)
4673 : {
4674 10514619 : for (k = 0; goal_alt_matched[j][k] >= 0; k++)
4675 : ;
4676 : /* We allow matching one output operand and several input
4677 : operands. */
4678 10514618 : lra_assert (k == 0
4679 : || (curr_static_id->operand[j].type == OP_OUT
4680 : && curr_static_id->operand[i].type == OP_IN
4681 : && (curr_static_id->operand
4682 : [goal_alt_matched[j][0]].type == OP_IN)));
4683 10514618 : goal_alt_matched[j][k] = i;
4684 10514618 : goal_alt_matched[j][k + 1] = -1;
4685 : }
4686 :
4687 256597475 : for (i = 0; i < n_operands; i++)
4688 177132741 : goal_alt_win[i] |= goal_alt_match_win[i];
4689 :
4690 : /* Any constants that aren't allowed and can't be reloaded into
4691 : registers are here changed into memory references. */
4692 256597475 : for (i = 0; i < n_operands; i++)
4693 177132741 : if (goal_alt_win[i])
4694 : {
4695 171077315 : int regno;
4696 171077315 : enum reg_class new_class;
4697 171077315 : rtx reg = *curr_id->operand_loc[i];
4698 :
4699 171077315 : if (GET_CODE (reg) == SUBREG)
4700 3336118 : reg = SUBREG_REG (reg);
4701 :
4702 171077315 : if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
4703 : {
4704 77740302 : bool ok_p = in_class_p (reg, goal_alt[i], &new_class, true);
4705 :
4706 77740302 : if (new_class != NO_REGS && get_reg_class (regno) != new_class)
4707 : {
4708 3481483 : lra_assert (ok_p);
4709 3481483 : lra_change_class (regno, new_class, " Change to", true);
4710 : }
4711 : }
4712 : }
4713 : else
4714 : {
4715 6055426 : const char *constraint;
4716 6055426 : char c;
4717 6055426 : rtx op = *curr_id->operand_loc[i];
4718 6055426 : rtx subreg = NULL_RTX;
4719 6055426 : machine_mode mode = curr_operand_mode[i];
4720 :
4721 6055426 : if (GET_CODE (op) == SUBREG)
4722 : {
4723 242987 : subreg = op;
4724 242987 : op = SUBREG_REG (op);
4725 242987 : mode = GET_MODE (op);
4726 : }
4727 :
4728 6272915 : if (CONST_POOL_OK_P (mode, op)
4729 6272915 : && ((targetm.preferred_reload_class
4730 217489 : (op, (enum reg_class) goal_alt[i]) == NO_REGS)
4731 71933 : || no_input_reloads_p))
4732 : {
4733 145556 : rtx tem = force_const_mem (mode, op);
4734 :
4735 145556 : change_p = true;
4736 145556 : if (subreg != NULL_RTX)
4737 0 : tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
4738 :
4739 145556 : *curr_id->operand_loc[i] = tem;
4740 145556 : lra_update_dup (curr_id, i);
4741 145556 : process_address (i, false, &before, &after);
4742 :
4743 : /* If the alternative accepts constant pool refs directly
4744 : there will be no reload needed at all. */
4745 145556 : if (subreg != NULL_RTX)
4746 0 : continue;
4747 : /* Skip alternatives before the one requested. */
4748 145556 : constraint = (curr_static_id->operand_alternative
4749 145556 : [goal_alt_number * n_operands + i].constraint);
4750 145556 : for (;
4751 246874 : (c = *constraint) && c != ',' && c != '#';
4752 101318 : constraint += CONSTRAINT_LEN (c, constraint))
4753 : {
4754 199954 : enum constraint_num cn = lookup_constraint (constraint);
4755 199954 : if ((insn_extra_memory_constraint (cn)
4756 101467 : || insn_extra_special_memory_constraint (cn)
4757 : || insn_extra_relaxed_memory_constraint (cn))
4758 200103 : && satisfies_memory_constraint_p (tem, cn))
4759 : break;
4760 : }
4761 145556 : if (c == '\0' || c == ',' || c == '#')
4762 46920 : continue;
4763 :
4764 98636 : goal_alt_win[i] = true;
4765 : }
4766 : }
4767 :
4768 : n_outputs = 0;
4769 256597475 : for (i = 0; i < n_operands; i++)
4770 177132741 : if (curr_static_id->operand[i].type == OP_OUT)
4771 69004313 : outputs[n_outputs++] = i;
4772 79464734 : outputs[n_outputs] = -1;
4773 256597475 : for (i = 0; i < n_operands; i++)
4774 : {
4775 177132741 : int regno;
4776 177132741 : bool optional_p = false;
4777 177132741 : rtx old, new_reg;
4778 177132741 : rtx op = *curr_id->operand_loc[i];
4779 :
4780 177132741 : if (goal_alt_win[i])
4781 : {
4782 171175951 : if (goal_alt[i] == NO_REGS
4783 46623995 : && REG_P (op)
4784 5373203 : && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
4785 : /* We assigned a hard register to the pseudo in the past but now
4786 : decided to spill it for the insn. If the pseudo is used only
4787 : in this insn, it is better to spill it here as we free hard
4788 : registers for other pseudos referenced in the insn. The most
4789 : common case of this is a scratch register which will be
4790 : transformed to scratch back at the end of LRA. */
4791 174077201 : && !multiple_insn_refs_p (regno))
4792 : {
4793 11260 : if (lra_get_allocno_class (regno) != NO_REGS)
4794 5299 : lra_change_class (regno, NO_REGS, " Change to", true);
4795 5630 : reg_renumber[regno] = -1;
4796 : }
4797 : /* We can do an optional reload. If the pseudo got a hard
4798 : reg, we might improve the code through inheritance. If
4799 : it does not get a hard register we coalesce memory/memory
4800 : moves later. Ignore move insns to avoid cycling. */
4801 171175951 : if (! lra_simple_p
4802 170631273 : && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
4803 158230774 : && goal_alt[i] != NO_REGS && REG_P (op)
4804 78668767 : && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
4805 65868579 : && regno < new_regno_start
4806 61152413 : && ! ira_former_scratch_p (regno)
4807 61097564 : && reg_renumber[regno] < 0
4808 : /* Check that the optional reload pseudo will be able to
4809 : hold given mode value. */
4810 3850934 : && ! (prohibited_class_reg_set_mode_p
4811 3850934 : (goal_alt[i], reg_class_contents[goal_alt[i]],
4812 3850934 : PSEUDO_REGNO_MODE (regno)))
4813 175026875 : && (curr_insn_set == NULL_RTX
4814 3843784 : || !((REG_P (SET_SRC (curr_insn_set))
4815 : || MEM_P (SET_SRC (curr_insn_set))
4816 : || GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
4817 3201755 : && (REG_P (SET_DEST (curr_insn_set))
4818 : || MEM_P (SET_DEST (curr_insn_set))
4819 : || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
4820 : optional_p = true;
4821 170526740 : else if (goal_alt_matched[i][0] != -1
4822 8825341 : && curr_static_id->operand[i].type == OP_OUT
4823 8824207 : && (curr_static_id->operand_alternative
4824 8824207 : [goal_alt_number * n_operands + i].earlyclobber)
4825 19023 : && REG_P (op))
4826 : {
4827 24035 : for (j = 0; goal_alt_matched[i][j] != -1; j++)
4828 : {
4829 18970 : rtx op2 = *curr_id->operand_loc[goal_alt_matched[i][j]];
4830 :
4831 18970 : if (REG_P (op2) && REGNO (op) != REGNO (op2))
4832 : break;
4833 : }
4834 18970 : if (goal_alt_matched[i][j] != -1)
4835 : {
4836 : /* Generate reloads for different output and matched
4837 : input registers. This is the easiest way to avoid
4838 : creation of non-existing register conflicts in
4839 : lra-lives.cc. */
4840 13905 : match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
4841 : &goal_alt_exclude_start_hard_regs[i], &before,
4842 : &after, true);
4843 : }
4844 172120028 : continue;
4845 18970 : }
4846 : else
4847 : {
4848 170507770 : enum reg_class rclass, common_class;
4849 :
4850 89418020 : if (REG_P (op) && goal_alt[i] != NO_REGS
4851 84044817 : && (regno = REGNO (op)) >= new_regno_start
4852 4724423 : && (rclass = get_reg_class (regno)) == ALL_REGS
4853 0 : && ((common_class = ira_reg_class_subset[rclass][goal_alt[i]])
4854 : != NO_REGS)
4855 0 : && common_class != ALL_REGS
4856 170507770 : && enough_allocatable_hard_regs_p (common_class,
4857 0 : GET_MODE (op)))
4858 : /* Refine reload pseudo class from chosen alternative
4859 : constraint. */
4860 0 : lra_change_class (regno, common_class, " Change to", true);
4861 170507770 : continue;
4862 170507770 : }
4863 : }
4864 :
4865 : /* Operands that match previous ones have already been handled. */
4866 6606001 : if (goal_alt_matches[i] >= 0)
4867 1593288 : continue;
4868 :
4869 : /* We should not have an operand with a non-offsettable address
4870 : appearing where an offsettable address will do. It also may
4871 : be a case when the address should be special in other words
4872 : not a general one (e.g. it needs no index reg). */
4873 5012713 : if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
4874 : {
4875 72 : enum reg_class rclass;
4876 72 : rtx *loc = &XEXP (op, 0);
4877 72 : enum rtx_code code = GET_CODE (*loc);
4878 :
4879 72 : push_to_sequence (before);
4880 72 : rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
4881 : MEM, SCRATCH, curr_insn);
4882 72 : if (GET_RTX_CLASS (code) == RTX_AUTOINC)
4883 0 : new_reg = emit_inc (rclass, *loc,
4884 : /* This value does not matter for MODIFY. */
4885 0 : GET_MODE_SIZE (GET_MODE (op)));
4886 86 : else if (get_reload_reg (OP_IN, Pmode, *loc, rclass,
4887 : NULL, false, false,
4888 : "offsetable address", &new_reg))
4889 : {
4890 72 : rtx addr = *loc;
4891 72 : enum rtx_code code = GET_CODE (addr);
4892 72 : bool align_p = false;
4893 :
4894 72 : if (code == AND && CONST_INT_P (XEXP (addr, 1)))
4895 : {
4896 : /* (and ... (const_int -X)) is used to align to X bytes. */
4897 0 : align_p = true;
4898 0 : addr = XEXP (*loc, 0);
4899 : }
4900 : else
4901 72 : addr = canonicalize_reload_addr (addr);
4902 :
4903 72 : lra_emit_move (new_reg, addr);
4904 72 : if (align_p)
4905 0 : emit_move_insn (new_reg, gen_rtx_AND (GET_MODE (new_reg), new_reg, XEXP (*loc, 1)));
4906 : }
4907 72 : before = end_sequence ();
4908 72 : *loc = new_reg;
4909 72 : lra_update_dup (curr_id, i);
4910 72 : }
4911 5012641 : else if (goal_alt_matched[i][0] == -1)
4912 : {
4913 3323365 : machine_mode mode;
4914 3323365 : rtx reg, *loc;
4915 3323365 : int hard_regno;
4916 3323365 : enum op_type type = curr_static_id->operand[i].type;
4917 :
4918 3323365 : loc = curr_id->operand_loc[i];
4919 3323365 : mode = curr_operand_mode[i];
4920 3323365 : if (GET_CODE (*loc) == SUBREG)
4921 : {
4922 75597 : reg = SUBREG_REG (*loc);
4923 75597 : poly_int64 byte = SUBREG_BYTE (*loc);
4924 75597 : if (REG_P (reg)
4925 : /* Strict_low_part requires reloading the register and not
4926 : just the subreg. Likewise for a strict subreg no wider
4927 : than a word for WORD_REGISTER_OPERATIONS targets. */
4928 75597 : && (curr_static_id->operand[i].strict_low
4929 75527 : || (!paradoxical_subreg_p (mode, GET_MODE (reg))
4930 72916 : && (hard_regno
4931 72916 : = get_try_hard_regno (REGNO (reg))) >= 0
4932 71354 : && (simplify_subreg_regno
4933 146951 : (hard_regno,
4934 71354 : GET_MODE (reg), byte, mode) < 0)
4935 0 : && (goal_alt[i] == NO_REGS
4936 0 : || (simplify_subreg_regno
4937 75597 : (ira_class_hard_regs[goal_alt[i]][0],
4938 0 : GET_MODE (reg), byte, mode) >= 0)))
4939 75527 : || (partial_subreg_p (mode, GET_MODE (reg))
4940 75527 : && known_le (GET_MODE_SIZE (GET_MODE (reg)),
4941 : UNITS_PER_WORD)
4942 : && WORD_REGISTER_OPERATIONS))
4943 : /* Avoid the situation when there are no available hard regs
4944 : for the pseudo mode but there are ones for the subreg
4945 : mode: */
4946 75667 : && !(goal_alt[i] != NO_REGS
4947 70 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
4948 70 : && (prohibited_class_reg_set_mode_p
4949 70 : (goal_alt[i], reg_class_contents[goal_alt[i]],
4950 70 : GET_MODE (reg)))
4951 : && !(prohibited_class_reg_set_mode_p
4952 0 : (goal_alt[i], reg_class_contents[goal_alt[i]],
4953 : mode))))
4954 : {
4955 : /* An OP_INOUT is required when reloading a subreg of a
4956 : mode wider than a word to ensure that data beyond the
4957 : word being reloaded is preserved. Also automatically
4958 : ensure that strict_low_part reloads are made into
4959 : OP_INOUT which should already be true from the backend
4960 : constraints. */
4961 70 : if (type == OP_OUT
4962 70 : && (curr_static_id->operand[i].strict_low
4963 0 : || read_modify_subreg_p (*loc)))
4964 : type = OP_INOUT;
4965 70 : loc = &SUBREG_REG (*loc);
4966 70 : mode = GET_MODE (*loc);
4967 : }
4968 : }
4969 3323365 : old = *loc;
4970 3323365 : if (get_reload_reg (type, mode, old, goal_alt[i],
4971 : &goal_alt_exclude_start_hard_regs[i],
4972 3323365 : loc != curr_id->operand_loc[i],
4973 3323365 : curr_static_id->operand_alternative
4974 3323365 : [goal_alt_number * n_operands + i].earlyclobber,
4975 : "", &new_reg)
4976 3323365 : && type != OP_OUT)
4977 : {
4978 2357962 : push_to_sequence (before);
4979 2357962 : lra_emit_move (new_reg, old);
4980 2357962 : before = end_sequence ();
4981 : }
4982 3323365 : *loc = new_reg;
4983 3323365 : if (type != OP_IN
4984 964315 : && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX
4985 : /* OLD can be an equivalent constant here. */
4986 939230 : && !CONSTANT_P (old)
4987 : /* No need to write back anything for a scratch. */
4988 939230 : && GET_CODE (old) != SCRATCH
4989 4262595 : && (!REG_P(old) || !ira_former_scratch_p (REGNO (old))))
4990 : {
4991 939230 : start_sequence ();
4992 939230 : lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
4993 939230 : emit_insn (after);
4994 939230 : after = end_sequence ();
4995 939230 : *loc = new_reg;
4996 : }
4997 3323365 : for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
4998 619 : if (goal_alt_dont_inherit_ops[j] == i)
4999 : {
5000 619 : lra_set_regno_unique_value (REGNO (new_reg));
5001 619 : break;
5002 : }
5003 3323365 : lra_update_dup (curr_id, i);
5004 : }
5005 1689276 : else if (curr_static_id->operand[i].type == OP_IN
5006 1689276 : && (curr_static_id->operand[goal_alt_matched[i][0]].type
5007 : == OP_OUT
5008 0 : || (curr_static_id->operand[goal_alt_matched[i][0]].type
5009 : == OP_INOUT
5010 0 : && (operands_match_p
5011 0 : (*curr_id->operand_loc[i],
5012 0 : *curr_id->operand_loc[goal_alt_matched[i][0]],
5013 : -1)))))
5014 : {
5015 : /* generate reloads for input and matched outputs. */
5016 15479 : match_inputs[0] = i;
5017 15479 : match_inputs[1] = -1;
5018 15479 : match_reload (goal_alt_matched[i][0], match_inputs, outputs,
5019 : goal_alt[i], &goal_alt_exclude_start_hard_regs[i],
5020 : &before, &after,
5021 15479 : curr_static_id->operand_alternative
5022 15479 : [goal_alt_number * n_operands + goal_alt_matched[i][0]]
5023 15479 : .earlyclobber);
5024 : }
5025 1673797 : else if ((curr_static_id->operand[i].type == OP_OUT
5026 0 : || (curr_static_id->operand[i].type == OP_INOUT
5027 0 : && (operands_match_p
5028 0 : (*curr_id->operand_loc[i],
5029 0 : *curr_id->operand_loc[goal_alt_matched[i][0]],
5030 : -1))))
5031 1673797 : && (curr_static_id->operand[goal_alt_matched[i][0]].type
5032 : == OP_IN))
5033 : /* Generate reloads for output and matched inputs. */
5034 1673797 : match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
5035 : &goal_alt_exclude_start_hard_regs[i], &before, &after,
5036 1673797 : curr_static_id->operand_alternative
5037 1673797 : [goal_alt_number * n_operands + i].earlyclobber);
5038 0 : else if (curr_static_id->operand[i].type == OP_IN
5039 0 : && (curr_static_id->operand[goal_alt_matched[i][0]].type
5040 : == OP_IN))
5041 : {
5042 : /* Generate reloads for matched inputs. */
5043 0 : match_inputs[0] = i;
5044 0 : for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
5045 0 : match_inputs[j + 1] = k;
5046 0 : match_inputs[j + 1] = -1;
5047 0 : match_reload (-1, match_inputs, outputs, goal_alt[i],
5048 : &goal_alt_exclude_start_hard_regs[i],
5049 : &before, &after, false);
5050 : }
5051 : else
5052 : /* We must generate code in any case when function
5053 : process_alt_operands decides that it is possible. */
5054 0 : gcc_unreachable ();
5055 :
5056 5012713 : if (optional_p)
5057 : {
5058 649211 : rtx reg = op;
5059 :
5060 649211 : lra_assert (REG_P (reg));
5061 649211 : regno = REGNO (reg);
5062 649211 : op = *curr_id->operand_loc[i]; /* Substitution. */
5063 649211 : if (GET_CODE (op) == SUBREG)
5064 0 : op = SUBREG_REG (op);
5065 649211 : gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
5066 649211 : bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
5067 649211 : lra_reg_info[REGNO (op)].restore_rtx = reg;
5068 649211 : if (lra_dump_file != NULL)
5069 3 : fprintf (lra_dump_file,
5070 : " Making reload reg %d for reg %d optional\n",
5071 : REGNO (op), regno);
5072 : }
5073 : }
5074 75177407 : if (before != NULL_RTX || after != NULL_RTX
5075 153839977 : || max_regno_before != max_reg_num ())
5076 5114989 : change_p = true;
5077 79464734 : if (change_p)
5078 : {
5079 6061342 : lra_update_operator_dups (curr_id);
5080 : /* Something changes -- process the insn. */
5081 6061342 : lra_update_insn_regno_info (curr_insn);
5082 6061342 : if (asm_noperands (PATTERN (curr_insn)) >= 0
5083 6061342 : && ++curr_id->asm_reloads_num >= FIRST_PSEUDO_REGISTER)
5084 : /* Most probably there are no enough registers to satisfy asm insn: */
5085 : {
5086 11 : lra_asm_insn_error (curr_insn);
5087 11 : return change_p;
5088 : }
5089 : }
5090 79464723 : if (goal_alt_out_sp_reload_p)
5091 : {
5092 : /* We have an output stack pointer reload -- update sp offset: */
5093 0 : rtx set;
5094 0 : bool done_p = false;
5095 0 : poly_int64 sp_offset = curr_id->sp_offset;
5096 0 : for (rtx_insn *insn = after; insn != NULL_RTX; insn = NEXT_INSN (insn))
5097 0 : if ((set = single_set (insn)) != NULL_RTX
5098 0 : && SET_DEST (set) == stack_pointer_rtx)
5099 : {
5100 0 : lra_assert (!done_p);
5101 0 : done_p = true;
5102 0 : curr_id->sp_offset = 0;
5103 0 : lra_insn_recog_data_t id = lra_get_insn_recog_data (insn);
5104 0 : id->sp_offset = sp_offset;
5105 0 : if (lra_dump_file != NULL)
5106 0 : fprintf (lra_dump_file,
5107 : " Moving sp offset from insn %u to %u\n",
5108 0 : INSN_UID (curr_insn), INSN_UID (insn));
5109 : }
5110 0 : lra_assert (done_p);
5111 : }
5112 79464723 : int const_regno = -1;
5113 79464723 : rtx set;
5114 79464723 : rtx_insn *prev, *const_insn = NULL;
5115 4287322 : if (before != NULL_RTX && (prev = PREV_INSN (curr_insn)) != NULL_RTX
5116 83752045 : && (set = single_set (prev)) != NULL_RTX && CONSTANT_P (SET_SRC (set)))
5117 : {
5118 324355 : rtx reg = SET_DEST (set);
5119 324355 : if (GET_CODE (reg) == SUBREG)
5120 8804 : reg = SUBREG_REG (reg);
5121 : /* Consider only reload insns as we don't want to change the order
5122 : created by previous optimizations. */
5123 236127 : if (REG_P (reg) && (int) REGNO (reg) >= lra_new_regno_start
5124 325149 : && bitmap_bit_p (&lra_reg_info[REGNO (reg)].insn_bitmap,
5125 794 : INSN_UID (curr_insn)))
5126 : {
5127 318 : const_regno = REGNO (reg);
5128 318 : const_insn = prev;
5129 : }
5130 : }
5131 79464723 : if (asm_noperands (PATTERN (curr_insn)) >= 0)
5132 : {
5133 : /* Asm can have a lot of operands. To guarantee their assignment,
5134 : postpone processing the reload insns until the reload pseudos are
5135 : assigned. */
5136 46064 : postpone_insns (before);
5137 46064 : postpone_insns (after);
5138 : }
5139 79464723 : lra_process_new_insns (curr_insn, before, after,
5140 : "Inserting insn reload", true);
5141 79464723 : if (const_regno >= 0) {
5142 636 : bool move_p = true;
5143 636 : for (rtx_insn *insn = before; insn != curr_insn; insn = NEXT_INSN (insn))
5144 318 : if (bitmap_bit_p (&lra_reg_info[const_regno].insn_bitmap,
5145 318 : INSN_UID (insn)))
5146 : {
5147 : move_p = false;
5148 : break;
5149 : }
5150 318 : if (move_p)
5151 : {
5152 318 : reorder_insns_nobb (const_insn, const_insn, PREV_INSN (curr_insn));
5153 318 : if (lra_dump_file != NULL)
5154 : {
5155 0 : dump_insn_slim (lra_dump_file, const_insn);
5156 0 : fprintf (lra_dump_file,
5157 : " to decrease reg pressure, it is moved before:\n");
5158 0 : dump_insn_slim (lra_dump_file, curr_insn);
5159 : }
5160 : }
5161 : }
5162 : return change_p;
5163 : }
5164 :
5165 : /* Return true if INSN satisfies all constraints. In other words, no
5166 : reload insns are needed. */
5167 : bool
5168 3418 : lra_constrain_insn (rtx_insn *insn)
5169 : {
5170 3418 : int saved_new_regno_start = new_regno_start;
5171 3418 : int saved_new_insn_uid_start = new_insn_uid_start;
5172 3418 : bool change_p;
5173 :
5174 3418 : curr_insn = insn;
5175 3418 : curr_id = lra_get_insn_recog_data (curr_insn);
5176 3418 : curr_static_id = curr_id->insn_static_data;
5177 3418 : new_insn_uid_start = get_max_uid ();
5178 3418 : new_regno_start = max_reg_num ();
5179 3418 : change_p = curr_insn_transform (true);
5180 3418 : new_regno_start = saved_new_regno_start;
5181 3418 : new_insn_uid_start = saved_new_insn_uid_start;
5182 3418 : return ! change_p;
5183 : }
5184 :
5185 : /* Return true if X is in LIST. */
5186 : static bool
5187 1307784 : in_list_p (rtx x, rtx list)
5188 : {
5189 2236875 : for (; list != NULL_RTX; list = XEXP (list, 1))
5190 1228589 : if (XEXP (list, 0) == x)
5191 : return true;
5192 : return false;
5193 : }
5194 :
5195 : /* Return true if X contains an allocatable hard register (if
5196 : HARD_REG_P) or a (spilled if SPILLED_P) pseudo. */
5197 : static bool
5198 7314202 : contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
5199 : {
5200 7314202 : int i, j;
5201 7314202 : const char *fmt;
5202 7314202 : enum rtx_code code;
5203 :
5204 7314202 : code = GET_CODE (x);
5205 7314202 : if (REG_P (x))
5206 : {
5207 1483880 : int regno = REGNO (x);
5208 1483880 : HARD_REG_SET alloc_regs;
5209 :
5210 1483880 : if (hard_reg_p)
5211 : {
5212 465265 : if (regno >= FIRST_PSEUDO_REGISTER)
5213 135308 : regno = lra_get_regno_hard_regno (regno);
5214 465265 : if (regno < 0)
5215 : return false;
5216 465265 : alloc_regs = ~lra_no_alloc_regs;
5217 465265 : return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
5218 : }
5219 : else
5220 : {
5221 1018615 : if (regno < FIRST_PSEUDO_REGISTER)
5222 : return false;
5223 327932 : if (! spilled_p)
5224 : return true;
5225 173994 : return lra_get_regno_hard_regno (regno) < 0;
5226 : }
5227 : }
5228 5830322 : fmt = GET_RTX_FORMAT (code);
5229 14416486 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5230 : {
5231 9154151 : if (fmt[i] == 'e')
5232 : {
5233 4037402 : if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
5234 : return true;
5235 : }
5236 5116749 : else if (fmt[i] == 'E')
5237 : {
5238 1252079 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5239 1145964 : if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
5240 : return true;
5241 : }
5242 : }
5243 : return false;
5244 : }
5245 :
5246 : /* Process all regs in location *LOC and change them on equivalent
5247 : substitution. Return true if any change was done. */
5248 : static bool
5249 3353 : loc_equivalence_change_p (rtx *loc)
5250 : {
5251 3353 : rtx subst, reg, x = *loc;
5252 3353 : bool result = false;
5253 3353 : enum rtx_code code = GET_CODE (x);
5254 3353 : const char *fmt;
5255 3353 : int i, j;
5256 :
5257 3353 : if (code == SUBREG)
5258 : {
5259 20 : reg = SUBREG_REG (x);
5260 20 : if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
5261 20 : && GET_MODE (subst) == VOIDmode)
5262 : {
5263 : /* We cannot reload debug location. Simplify subreg here
5264 : while we know the inner mode. */
5265 0 : *loc = simplify_gen_subreg (GET_MODE (x), subst,
5266 0 : GET_MODE (reg), SUBREG_BYTE (x));
5267 0 : return true;
5268 : }
5269 : }
5270 3353 : if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
5271 : {
5272 8 : *loc = subst;
5273 8 : return true;
5274 : }
5275 :
5276 : /* Scan all the operand sub-expressions. */
5277 3345 : fmt = GET_RTX_FORMAT (code);
5278 8180 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5279 : {
5280 4835 : if (fmt[i] == 'e')
5281 2581 : result = loc_equivalence_change_p (&XEXP (x, i)) || result;
5282 2254 : else if (fmt[i] == 'E')
5283 270 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5284 200 : result
5285 210 : = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
5286 : }
5287 : return result;
5288 : }
5289 :
5290 : /* Similar to loc_equivalence_change_p, but for use as
5291 : simplify_replace_fn_rtx callback. DATA is insn for which the
5292 : elimination is done. If it null we don't do the elimination. */
5293 : static rtx
5294 42335670 : loc_equivalence_callback (rtx loc, const_rtx, void *data)
5295 : {
5296 42335670 : if (!REG_P (loc))
5297 : return NULL_RTX;
5298 :
5299 10908972 : rtx subst = (data == NULL
5300 10908972 : ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
5301 10908972 : if (subst != loc)
5302 : return subst;
5303 :
5304 : return NULL_RTX;
5305 : }
5306 :
5307 : /* Maximum number of generated reload insns per an insn. It is for
5308 : preventing this pass cycling in a bug case. */
5309 : #define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
5310 :
5311 : /* The current iteration number of this LRA pass. */
5312 : int lra_constraint_iter;
5313 :
5314 : /* True if we should during assignment sub-pass check assignment
5315 : correctness for all pseudos and spill some of them to correct
5316 : conflicts. It can be necessary when we substitute equiv which
5317 : needs checking register allocation correctness because the
5318 : equivalent value contains allocatable hard registers, or when we
5319 : restore multi-register pseudo, or when we change the insn code and
5320 : its operand became INOUT operand when it was IN one before. */
5321 : bool check_and_force_assignment_correctness_p;
5322 :
5323 : /* Return true if REGNO is referenced in more than one block. */
5324 : static bool
5325 146483 : multi_block_pseudo_p (int regno)
5326 : {
5327 146483 : basic_block bb = NULL;
5328 146483 : unsigned int uid;
5329 146483 : bitmap_iterator bi;
5330 :
5331 146483 : if (regno < FIRST_PSEUDO_REGISTER)
5332 : return false;
5333 :
5334 448603 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
5335 306898 : if (bb == NULL)
5336 146483 : bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
5337 160415 : else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
5338 : return true;
5339 : return false;
5340 : }
5341 :
5342 : /* Return true if LIST contains a deleted insn. */
5343 : static bool
5344 711570 : contains_deleted_insn_p (rtx_insn_list *list)
5345 : {
5346 1358902 : for (; list != NULL_RTX; list = list->next ())
5347 647332 : if (NOTE_P (list->insn ())
5348 647332 : && NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
5349 : return true;
5350 : return false;
5351 : }
5352 :
5353 : /* Return true if X contains a pseudo dying in INSN. */
5354 : static bool
5355 2213447 : dead_pseudo_p (rtx x, rtx_insn *insn)
5356 : {
5357 2213447 : int i, j;
5358 2213447 : const char *fmt;
5359 2213447 : enum rtx_code code;
5360 :
5361 2213447 : if (REG_P (x))
5362 494268 : return (insn != NULL_RTX
5363 494268 : && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
5364 1719179 : code = GET_CODE (x);
5365 1719179 : fmt = GET_RTX_FORMAT (code);
5366 4396651 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5367 : {
5368 2682830 : if (fmt[i] == 'e')
5369 : {
5370 1315055 : if (dead_pseudo_p (XEXP (x, i), insn))
5371 : return true;
5372 : }
5373 1367775 : else if (fmt[i] == 'E')
5374 : {
5375 276212 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5376 252442 : if (dead_pseudo_p (XVECEXP (x, i, j), insn))
5377 : return true;
5378 : }
5379 : }
5380 : return false;
5381 : }
5382 :
5383 : /* Return true if INSN contains a dying pseudo in INSN right hand
5384 : side. */
5385 : static bool
5386 645950 : insn_rhs_dead_pseudo_p (rtx_insn *insn)
5387 : {
5388 645950 : rtx set = single_set (insn);
5389 :
5390 645950 : gcc_assert (set != NULL);
5391 645950 : return dead_pseudo_p (SET_SRC (set), insn);
5392 : }
5393 :
5394 : /* Return true if any init insn of REGNO contains a dying pseudo in
5395 : insn right hand side. */
5396 : static bool
5397 710188 : init_insn_rhs_dead_pseudo_p (int regno)
5398 : {
5399 710188 : rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
5400 :
5401 710188 : if (insns == NULL)
5402 : return false;
5403 1286436 : for (; insns != NULL_RTX; insns = insns->next ())
5404 645950 : if (insn_rhs_dead_pseudo_p (insns->insn ()))
5405 : return true;
5406 : return false;
5407 : }
5408 :
5409 : /* Return TRUE if REGNO has a reverse equivalence. The equivalence is
5410 : reverse only if we have one init insn with given REGNO as a
5411 : source. */
5412 : static bool
5413 711570 : reverse_equiv_p (int regno)
5414 : {
5415 711570 : rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
5416 711570 : rtx set;
5417 :
5418 711570 : if (insns == NULL)
5419 : return false;
5420 647332 : if (! INSN_P (insns->insn ())
5421 1294664 : || insns->next () != NULL)
5422 : return false;
5423 647332 : if ((set = single_set (insns->insn ())) == NULL_RTX)
5424 : return false;
5425 647332 : return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
5426 : }
5427 :
5428 : /* Return TRUE if REGNO was reloaded in an equivalence init insn. We
5429 : call this function only for non-reverse equivalence. */
5430 : static bool
5431 704724 : contains_reloaded_insn_p (int regno)
5432 : {
5433 704724 : rtx set;
5434 704724 : rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
5435 :
5436 1345210 : for (; list != NULL; list = list->next ())
5437 640486 : if ((set = single_set (list->insn ())) == NULL_RTX
5438 640486 : || ! REG_P (SET_DEST (set))
5439 1280972 : || (int) REGNO (SET_DEST (set)) != regno)
5440 : return true;
5441 : return false;
5442 : }
5443 :
5444 : /* Try combine secondary memory reload insn FROM for insn TO into TO insn.
5445 : FROM should be a load insn (usually a secondary memory reload insn). Return
5446 : TRUE in case of success. */
5447 : static bool
5448 7232946 : combine_reload_insn (rtx_insn *from, rtx_insn *to)
5449 : {
5450 7232946 : bool ok_p;
5451 7232946 : rtx_insn *saved_insn;
5452 7232946 : rtx set, from_reg, to_reg, op;
5453 7232946 : enum reg_class to_class, from_class;
5454 7232946 : int n, nop;
5455 7232946 : signed char changed_nops[MAX_RECOG_OPERANDS + 1];
5456 :
5457 : /* Check conditions for second memory reload and original insn: */
5458 7232946 : if ((targetm.secondary_memory_needed
5459 : == hook_bool_mode_reg_class_t_reg_class_t_false)
5460 7232946 : || NEXT_INSN (from) != to
5461 4295322 : || !NONDEBUG_INSN_P (to)
5462 11528268 : || CALL_P (to))
5463 : return false;
5464 :
5465 4289916 : lra_insn_recog_data_t id = lra_get_insn_recog_data (to);
5466 4289916 : struct lra_static_insn_data *static_id = id->insn_static_data;
5467 :
5468 4289916 : if (id->used_insn_alternative == LRA_UNKNOWN_ALT
5469 4289916 : || (set = single_set (from)) == NULL_RTX)
5470 32181 : return false;
5471 4257735 : from_reg = SET_DEST (set);
5472 4257735 : to_reg = SET_SRC (set);
5473 : /* Ignore optional reloads: */
5474 4177994 : if (! REG_P (from_reg) || ! REG_P (to_reg)
5475 7112135 : || bitmap_bit_p (&lra_optional_reload_pseudos, REGNO (from_reg)))
5476 1953945 : return false;
5477 2303790 : to_class = lra_get_allocno_class (REGNO (to_reg));
5478 2303790 : from_class = lra_get_allocno_class (REGNO (from_reg));
5479 : /* Check that reload insn is a load: */
5480 2303790 : if (to_class != NO_REGS || from_class == NO_REGS)
5481 : return false;
5482 53598 : for (n = nop = 0; nop < static_id->n_operands; nop++)
5483 : {
5484 38694 : if (static_id->operand[nop].type != OP_IN)
5485 13898 : continue;
5486 24796 : op = *id->operand_loc[nop];
5487 24796 : if (!REG_P (op) || REGNO (op) != REGNO (from_reg))
5488 10075 : continue;
5489 14721 : *id->operand_loc[nop] = to_reg;
5490 14721 : changed_nops[n++] = nop;
5491 : }
5492 14904 : changed_nops[n] = -1;
5493 14904 : lra_update_dups (id, changed_nops);
5494 14904 : lra_update_insn_regno_info (to);
5495 14904 : ok_p = recog_memoized (to) >= 0;
5496 14904 : if (ok_p)
5497 : {
5498 : /* Check that combined insn does not need any reloads: */
5499 14885 : saved_insn = curr_insn;
5500 14885 : curr_insn = to;
5501 14885 : curr_id = lra_get_insn_recog_data (curr_insn);
5502 14885 : curr_static_id = curr_id->insn_static_data;
5503 14885 : for (bool swapped_p = false;;)
5504 : {
5505 17139 : ok_p = !curr_insn_transform (true);
5506 17139 : if (ok_p || curr_static_id->commutative < 0)
5507 : break;
5508 4508 : swap_operands (curr_static_id->commutative);
5509 4508 : if (lra_dump_file != NULL)
5510 : {
5511 0 : fprintf (lra_dump_file,
5512 : " Swapping %scombined insn operands:\n",
5513 : swapped_p ? "back " : "");
5514 0 : dump_insn_slim (lra_dump_file, to);
5515 : }
5516 4508 : if (swapped_p)
5517 : break;
5518 : swapped_p = true;
5519 : }
5520 14885 : curr_insn = saved_insn;
5521 14885 : curr_id = lra_get_insn_recog_data (curr_insn);
5522 14885 : curr_static_id = curr_id->insn_static_data;
5523 : }
5524 14904 : if (ok_p)
5525 : {
5526 3549 : id->used_insn_alternative = -1;
5527 3549 : lra_push_insn_and_update_insn_regno_info (to);
5528 3549 : if (lra_dump_file != NULL)
5529 : {
5530 0 : fprintf (lra_dump_file, " Use combined insn:\n");
5531 0 : dump_insn_slim (lra_dump_file, to);
5532 : }
5533 3549 : return true;
5534 : }
5535 11355 : if (lra_dump_file != NULL)
5536 : {
5537 0 : fprintf (lra_dump_file, " Failed combined insn:\n");
5538 0 : dump_insn_slim (lra_dump_file, to);
5539 : }
5540 22981 : for (int i = 0; i < n; i++)
5541 : {
5542 11626 : nop = changed_nops[i];
5543 11626 : *id->operand_loc[nop] = from_reg;
5544 : }
5545 11355 : lra_update_dups (id, changed_nops);
5546 11355 : lra_update_insn_regno_info (to);
5547 11355 : if (lra_dump_file != NULL)
5548 : {
5549 0 : fprintf (lra_dump_file, " Restoring insn after failed combining:\n");
5550 0 : dump_insn_slim (lra_dump_file, to);
5551 : }
5552 : return false;
5553 : }
5554 :
5555 : /* Entry function of LRA constraint pass. Return true if the
5556 : constraint pass did change the code. */
5557 : bool
5558 3226660 : lra_constraints (bool first_p)
5559 : {
5560 3226660 : bool changed_p;
5561 3226660 : int i, hard_regno, new_insns_num;
5562 3226660 : unsigned int min_len, new_min_len, uid;
5563 3226660 : rtx set, x, reg, nosubreg_dest;
5564 3226660 : rtx_insn *original_insn;
5565 3226660 : basic_block last_bb;
5566 3226660 : bitmap_iterator bi;
5567 :
5568 3226660 : lra_constraint_iter++;
5569 3226660 : if (lra_dump_file != NULL)
5570 194 : fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
5571 : lra_constraint_iter);
5572 3226660 : changed_p = false;
5573 3226660 : if (pic_offset_table_rtx
5574 3226660 : && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
5575 103795 : check_and_force_assignment_correctness_p = true;
5576 3122865 : else if (first_p)
5577 : /* On the first iteration we should check IRA assignment
5578 : correctness. In rare cases, the assignments can be wrong as
5579 : early clobbers operands are ignored in IRA or usages of
5580 : paradoxical sub-registers are not taken into account by
5581 : IRA. */
5582 1440717 : check_and_force_assignment_correctness_p = true;
5583 3226660 : new_insn_uid_start = get_max_uid ();
5584 3226660 : new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
5585 : /* Mark used hard regs for target stack size calulations. */
5586 206281091 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5587 203054431 : if (lra_reg_info[i].nrefs != 0
5588 299985804 : && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
5589 : {
5590 93021211 : int j, nregs;
5591 :
5592 93021211 : nregs = hard_regno_nregs (hard_regno, lra_reg_info[i].biggest_mode);
5593 189090445 : for (j = 0; j < nregs; j++)
5594 96069234 : df_set_regs_ever_live (hard_regno + j, true);
5595 : }
5596 : /* Do elimination before the equivalence processing as we can spill
5597 : some pseudos during elimination. */
5598 3226660 : lra_eliminate (false, first_p);
5599 3226660 : auto_bitmap equiv_insn_bitmap (®_obstack);
5600 :
5601 : /* Register elimination can create new pseudos via the addptr pattern,
5602 : so make sure the equivalency tables are resized appropriately. */
5603 3226660 : ira_expand_reg_equiv ();
5604 206281091 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5605 203054431 : if (lra_reg_info[i].nrefs != 0)
5606 : {
5607 96931373 : ira_reg_equiv[i].profitable_p = true;
5608 96931373 : reg = regno_reg_rtx[i];
5609 96931373 : if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
5610 : {
5611 723803 : bool pseudo_p = contains_reg_p (x, false, false);
5612 :
5613 : /* After RTL transformation, we cannot guarantee that
5614 : pseudo in the substitution was not reloaded which might
5615 : make equivalence invalid. For example, in reverse
5616 : equiv of p0
5617 :
5618 : p0 <- ...
5619 : ...
5620 : equiv_mem <- p0
5621 :
5622 : the memory address register was reloaded before the 2nd
5623 : insn. */
5624 723803 : if ((! first_p && pseudo_p)
5625 : /* We don't use DF for compilation speed sake. So it
5626 : is problematic to update live info when we use an
5627 : equivalence containing pseudos in more than one
5628 : BB. */
5629 716348 : || (pseudo_p && multi_block_pseudo_p (i))
5630 : /* If an init insn was deleted for some reason, cancel
5631 : the equiv. We could update the equiv insns after
5632 : transformations including an equiv insn deletion
5633 : but it is not worthy as such cases are extremely
5634 : rare. */
5635 711570 : || contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
5636 : /* If it is not a reverse equivalence, we check that a
5637 : pseudo in rhs of the init insn is not dying in the
5638 : insn. Otherwise, the live info at the beginning of
5639 : the corresponding BB might be wrong after we
5640 : removed the insn. When the equiv can be a
5641 : constant, the right hand side of the init insn can
5642 : be a pseudo. */
5643 711570 : || (! reverse_equiv_p (i)
5644 710188 : && (init_insn_rhs_dead_pseudo_p (i)
5645 : /* If we reloaded the pseudo in an equivalence
5646 : init insn, we cannot remove the equiv init
5647 : insns and the init insns might write into
5648 : const memory in this case. */
5649 704724 : || contains_reloaded_insn_p (i)))
5650 : /* Prevent access beyond equivalent memory for
5651 : paradoxical subregs. */
5652 706106 : || (MEM_P (x)
5653 1125324 : && maybe_gt (GET_MODE_SIZE (lra_reg_info[i].biggest_mode),
5654 : GET_MODE_SIZE (GET_MODE (x))))
5655 1429199 : || (pic_offset_table_rtx
5656 51992 : && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
5657 7926 : && (targetm.preferred_reload_class
5658 3963 : (x, lra_get_allocno_class (i)) == NO_REGS))
5659 50359 : || contains_symbol_ref_p (x))))
5660 20874 : ira_reg_equiv[i].defined_p
5661 20874 : = ira_reg_equiv[i].caller_save_p = false;
5662 723803 : if (contains_reg_p (x, false, true))
5663 10383 : ira_reg_equiv[i].profitable_p = false;
5664 723803 : if (get_equiv (reg) != reg)
5665 696695 : bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
5666 : }
5667 : }
5668 206281091 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5669 203054431 : update_equiv (i);
5670 : /* We should add all insns containing pseudos which should be
5671 : substituted by their equivalences. */
5672 5542672 : EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
5673 2316012 : lra_push_insn_by_uid (uid);
5674 3226660 : min_len = lra_insn_stack_length ();
5675 3226660 : new_insns_num = 0;
5676 3226660 : last_bb = NULL;
5677 3226660 : changed_p = false;
5678 3226660 : original_insn = NULL;
5679 166052918 : while ((new_min_len = lra_insn_stack_length ()) != 0)
5680 : {
5681 159599598 : curr_insn = lra_pop_insn ();
5682 159599598 : --new_min_len;
5683 159599598 : curr_bb = BLOCK_FOR_INSN (curr_insn);
5684 159599598 : if (curr_bb != last_bb)
5685 : {
5686 20626043 : last_bb = curr_bb;
5687 20626043 : bb_reload_num = lra_curr_reload_num;
5688 : }
5689 159599598 : if (min_len > new_min_len)
5690 : {
5691 : min_len = new_min_len;
5692 : new_insns_num = 0;
5693 : original_insn = curr_insn;
5694 : }
5695 7232946 : else if (combine_reload_insn (curr_insn, original_insn))
5696 : {
5697 3549 : continue;
5698 : }
5699 7229397 : if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
5700 0 : internal_error
5701 0 : ("maximum number of generated reload insns per insn achieved (%d)",
5702 : MAX_RELOAD_INSNS_NUMBER);
5703 159596049 : new_insns_num++;
5704 159596049 : if (DEBUG_INSN_P (curr_insn))
5705 : {
5706 : /* We need to check equivalence in debug insn and change
5707 : pseudo to the equivalent value if necessary. */
5708 51613435 : curr_id = lra_get_insn_recog_data (curr_insn);
5709 51613435 : if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
5710 : {
5711 29605 : rtx old = *curr_id->operand_loc[0];
5712 29605 : *curr_id->operand_loc[0]
5713 29605 : = simplify_replace_fn_rtx (old, NULL_RTX,
5714 : loc_equivalence_callback, curr_insn);
5715 29605 : if (old != *curr_id->operand_loc[0])
5716 : {
5717 : /* If we substitute pseudo by shared equivalence, we can fail
5718 : to update LRA reg info and this can result in many
5719 : unexpected consequences. So keep rtl unshared: */
5720 29605 : *curr_id->operand_loc[0]
5721 29605 : = copy_rtx (*curr_id->operand_loc[0]);
5722 29605 : lra_update_insn_regno_info (curr_insn);
5723 29605 : changed_p = true;
5724 : }
5725 : }
5726 : }
5727 107982614 : else if (INSN_P (curr_insn))
5728 : {
5729 106923565 : if ((set = single_set (curr_insn)) != NULL_RTX)
5730 : {
5731 101562724 : nosubreg_dest = SET_DEST (set);
5732 : /* The equivalence pseudo could be set up as SUBREG in a
5733 : case when it is a call restore insn in a mode
5734 : different from the pseudo mode. */
5735 101562724 : if (GET_CODE (nosubreg_dest) == SUBREG)
5736 1159660 : nosubreg_dest = SUBREG_REG (nosubreg_dest);
5737 102245954 : if ((REG_P (nosubreg_dest)
5738 75030361 : && (x = get_equiv (nosubreg_dest)) != nosubreg_dest
5739 : /* Remove insns which set up a pseudo whose value
5740 : cannot be changed. Such insns might be not in
5741 : init_insns because we don't update equiv data
5742 : during insn transformations.
5743 :
5744 : As an example, let suppose that a pseudo got
5745 : hard register and on the 1st pass was not
5746 : changed to equivalent constant. We generate an
5747 : additional insn setting up the pseudo because of
5748 : secondary memory movement. Then the pseudo is
5749 : spilled and we use the equiv constant. In this
5750 : case we should remove the additional insn and
5751 : this insn is not init_insns list. */
5752 701152 : && (! MEM_P (x) || MEM_READONLY_P (x)
5753 : /* Check that this is actually an insn setting
5754 : up the equivalence. */
5755 317420 : || in_list_p (curr_insn,
5756 317420 : ira_reg_equiv
5757 317420 : [REGNO (nosubreg_dest)].init_insns)))
5758 175911170 : || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
5759 1980728 : && in_list_p (curr_insn,
5760 990364 : ira_reg_equiv
5761 990364 : [REGNO (SET_SRC (set))].init_insns)
5762 : /* This is a reverse equivalence to memory (see ira.cc)
5763 : in store insn. We can reload all the destination and
5764 : have an output reload which is a store to memory. If
5765 : we just remove the insn, we will have the output
5766 : reload storing an undefined value to the memory.
5767 : Check that we did not reload the memory to prevent a
5768 : wrong code generation. We could implement using the
5769 : equivalence still in such case but doing this is not
5770 : worth the efforts as such case is very rare. */
5771 1315 : && MEM_P (nosubreg_dest)))
5772 : {
5773 : /* This is equiv init insn of pseudo which did not get a
5774 : hard register -- remove the insn. */
5775 683230 : if (lra_dump_file != NULL)
5776 : {
5777 9 : fprintf (lra_dump_file,
5778 : " Removing equiv init insn %i (freq=%d)\n",
5779 3 : INSN_UID (curr_insn),
5780 6 : REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
5781 3 : dump_insn_slim (lra_dump_file, curr_insn);
5782 : }
5783 683230 : if (contains_reg_p (x, true, false))
5784 135308 : check_and_force_assignment_correctness_p = true;
5785 683230 : lra_set_insn_deleted (curr_insn);
5786 683230 : continue;
5787 : }
5788 : }
5789 106240335 : curr_id = lra_get_insn_recog_data (curr_insn);
5790 106240335 : curr_static_id = curr_id->insn_static_data;
5791 106240335 : init_curr_insn_input_reloads ();
5792 106240335 : init_curr_operand_mode ();
5793 106240335 : if (curr_insn_transform (false))
5794 : changed_p = true;
5795 : /* Check non-transformed insns too for equiv change as USE
5796 : or CLOBBER don't need reloads but can contain pseudos
5797 : being changed on their equivalences. */
5798 100159127 : else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
5799 100159127 : && loc_equivalence_change_p (&PATTERN (curr_insn)))
5800 : {
5801 8 : lra_update_insn_regno_info (curr_insn);
5802 8 : lra_push_insn_by_uid (INSN_UID (curr_insn));
5803 8 : changed_p = true;
5804 : }
5805 : }
5806 : }
5807 :
5808 : /* If we used a new hard regno, changed_p should be true because the
5809 : hard reg is assigned to a new pseudo. */
5810 3226660 : if (flag_checking && !changed_p)
5811 : {
5812 132904257 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5813 130290923 : if (lra_reg_info[i].nrefs != 0
5814 190504947 : && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
5815 : {
5816 58760598 : int j, nregs = hard_regno_nregs (hard_regno,
5817 58760598 : PSEUDO_REGNO_MODE (i));
5818 :
5819 119545293 : for (j = 0; j < nregs; j++)
5820 60784695 : lra_assert (df_regs_ever_live_p (hard_regno + j));
5821 : }
5822 : }
5823 2613374 : if (changed_p)
5824 613289 : lra_dump_insns_if_possible ("changed func after local");
5825 3226660 : return changed_p;
5826 3226660 : }
5827 :
5828 : static void initiate_invariants (void);
5829 : static void finish_invariants (void);
5830 :
5831 : /* Initiate the LRA constraint pass. It is done once per
5832 : function. */
5833 : void
5834 1480947 : lra_constraints_init (void)
5835 : {
5836 1480947 : initiate_invariants ();
5837 1480947 : }
5838 :
5839 : /* Finalize the LRA constraint pass. It is done once per
5840 : function. */
5841 : void
5842 1480947 : lra_constraints_finish (void)
5843 : {
5844 1480947 : finish_invariants ();
5845 1480947 : }
5846 :
5847 :
5848 :
5849 : /* Structure describes invariants for ineheritance. */
5850 : struct lra_invariant
5851 : {
5852 : /* The order number of the invariant. */
5853 : int num;
5854 : /* The invariant RTX. */
5855 : rtx invariant_rtx;
5856 : /* The origin insn of the invariant. */
5857 : rtx_insn *insn;
5858 : };
5859 :
5860 : typedef lra_invariant invariant_t;
5861 : typedef invariant_t *invariant_ptr_t;
5862 : typedef const invariant_t *const_invariant_ptr_t;
5863 :
5864 : /* Pointer to the inheritance invariants. */
5865 : static vec<invariant_ptr_t> invariants;
5866 :
5867 : /* Allocation pool for the invariants. */
5868 : static object_allocator<lra_invariant> *invariants_pool;
5869 :
5870 : /* Hash table for the invariants. */
5871 : static htab_t invariant_table;
5872 :
5873 : /* Hash function for INVARIANT. */
5874 : static hashval_t
5875 171598 : invariant_hash (const void *invariant)
5876 : {
5877 171598 : rtx inv = ((const_invariant_ptr_t) invariant)->invariant_rtx;
5878 171598 : return lra_rtx_hash (inv);
5879 : }
5880 :
5881 : /* Equal function for invariants INVARIANT1 and INVARIANT2. */
5882 : static int
5883 57113 : invariant_eq_p (const void *invariant1, const void *invariant2)
5884 : {
5885 57113 : rtx inv1 = ((const_invariant_ptr_t) invariant1)->invariant_rtx;
5886 57113 : rtx inv2 = ((const_invariant_ptr_t) invariant2)->invariant_rtx;
5887 :
5888 57113 : return rtx_equal_p (inv1, inv2);
5889 : }
5890 :
5891 : /* Insert INVARIANT_RTX into the table if it is not there yet. Return
5892 : invariant which is in the table. */
5893 : static invariant_ptr_t
5894 171406 : insert_invariant (rtx invariant_rtx)
5895 : {
5896 171406 : void **entry_ptr;
5897 171406 : invariant_t invariant;
5898 171406 : invariant_ptr_t invariant_ptr;
5899 :
5900 171406 : invariant.invariant_rtx = invariant_rtx;
5901 171406 : entry_ptr = htab_find_slot (invariant_table, &invariant, INSERT);
5902 171406 : if (*entry_ptr == NULL)
5903 : {
5904 148292 : invariant_ptr = invariants_pool->allocate ();
5905 148292 : invariant_ptr->invariant_rtx = invariant_rtx;
5906 148292 : invariant_ptr->insn = NULL;
5907 148292 : invariants.safe_push (invariant_ptr);
5908 148292 : *entry_ptr = (void *) invariant_ptr;
5909 : }
5910 171406 : return (invariant_ptr_t) *entry_ptr;
5911 : }
5912 :
5913 : /* Initiate the invariant table. */
5914 : static void
5915 1480947 : initiate_invariants (void)
5916 : {
5917 1480947 : invariants.create (100);
5918 1480947 : invariants_pool
5919 1480947 : = new object_allocator<lra_invariant> ("Inheritance invariants");
5920 1480947 : invariant_table = htab_create (100, invariant_hash, invariant_eq_p, NULL);
5921 1480947 : }
5922 :
5923 : /* Finish the invariant table. */
5924 : static void
5925 1480947 : finish_invariants (void)
5926 : {
5927 1480947 : htab_delete (invariant_table);
5928 2961894 : delete invariants_pool;
5929 1480947 : invariants.release ();
5930 1480947 : }
5931 :
5932 : /* Make the invariant table empty. */
5933 : static void
5934 12682338 : clear_invariants (void)
5935 : {
5936 12682338 : htab_empty (invariant_table);
5937 12682338 : invariants_pool->release ();
5938 12682338 : invariants.truncate (0);
5939 12682338 : }
5940 :
5941 :
5942 :
5943 : /* This page contains code to do inheritance/split
5944 : transformations. */
5945 :
5946 : /* Number of reloads passed so far in current EBB. */
5947 : static int reloads_num;
5948 :
5949 : /* Number of calls passed so far in current EBB. */
5950 : static int calls_num;
5951 :
5952 : /* Index ID is the CALLS_NUM associated the last call we saw with
5953 : ABI identifier ID. */
5954 : static int last_call_for_abi[NUM_ABI_IDS];
5955 :
5956 : /* Which registers have been fully or partially clobbered by a call
5957 : since they were last used. */
5958 : static HARD_REG_SET full_and_partial_call_clobbers;
5959 :
5960 : /* Current reload pseudo check for validity of elements in
5961 : USAGE_INSNS. */
5962 : static int curr_usage_insns_check;
5963 :
5964 : /* Info about last usage of registers in EBB to do inheritance/split
5965 : transformation. Inheritance transformation is done from a spilled
5966 : pseudo and split transformations from a hard register or a pseudo
5967 : assigned to a hard register. */
5968 : struct usage_insns
5969 : {
5970 : /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
5971 : value INSNS is valid. The insns is chain of optional debug insns
5972 : and a finishing non-debug insn using the corresponding reg. The
5973 : value is also used to mark the registers which are set up in the
5974 : current insn. The negated insn uid is used for this. */
5975 : int check;
5976 : /* Value of global reloads_num at the last insn in INSNS. */
5977 : int reloads_num;
5978 : /* Value of global reloads_nums at the last insn in INSNS. */
5979 : int calls_num;
5980 : /* It can be true only for splitting. And it means that the restore
5981 : insn should be put after insn given by the following member. */
5982 : bool after_p;
5983 : /* Next insns in the current EBB which use the original reg and the
5984 : original reg value is not changed between the current insn and
5985 : the next insns. In order words, e.g. for inheritance, if we need
5986 : to use the original reg value again in the next insns we can try
5987 : to use the value in a hard register from a reload insn of the
5988 : current insn. */
5989 : rtx insns;
5990 : };
5991 :
5992 : /* Map: regno -> corresponding pseudo usage insns. */
5993 : static struct usage_insns *usage_insns;
5994 :
5995 : static void
5996 246811957 : setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
5997 : {
5998 246811957 : usage_insns[regno].check = curr_usage_insns_check;
5999 246811957 : usage_insns[regno].insns = insn;
6000 246811957 : usage_insns[regno].reloads_num = reloads_num;
6001 246811957 : usage_insns[regno].calls_num = calls_num;
6002 246811957 : usage_insns[regno].after_p = after_p;
6003 246811957 : if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0)
6004 111630617 : remove_from_hard_reg_set (&full_and_partial_call_clobbers,
6005 111630617 : PSEUDO_REGNO_MODE (regno),
6006 : reg_renumber[regno]);
6007 246811957 : }
6008 :
6009 : /* The function is used to form list REGNO usages which consists of
6010 : optional debug insns finished by a non-debug insn using REGNO.
6011 : RELOADS_NUM is current number of reload insns processed so far. */
6012 : static void
6013 139573937 : add_next_usage_insn (int regno, rtx_insn *insn, int reloads_num)
6014 : {
6015 139573937 : rtx next_usage_insns;
6016 :
6017 139573937 : if (usage_insns[regno].check == curr_usage_insns_check
6018 72791448 : && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
6019 212365385 : && DEBUG_INSN_P (insn))
6020 : {
6021 : /* Check that we did not add the debug insn yet. */
6022 13687732 : if (next_usage_insns != insn
6023 13687732 : && (GET_CODE (next_usage_insns) != INSN_LIST
6024 6093977 : || XEXP (next_usage_insns, 0) != insn))
6025 13687718 : usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
6026 : next_usage_insns);
6027 : }
6028 125886205 : else if (NONDEBUG_INSN_P (insn))
6029 125389835 : setup_next_usage_insn (regno, insn, reloads_num, false);
6030 : else
6031 496370 : usage_insns[regno].check = 0;
6032 139573937 : }
6033 :
6034 : /* Return first non-debug insn in list USAGE_INSNS. */
6035 : static rtx_insn *
6036 1167535 : skip_usage_debug_insns (rtx usage_insns)
6037 : {
6038 1167535 : rtx insn;
6039 :
6040 : /* Skip debug insns. */
6041 1167535 : for (insn = usage_insns;
6042 1447431 : insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
6043 279896 : insn = XEXP (insn, 1))
6044 : ;
6045 1167535 : return safe_as_a <rtx_insn *> (insn);
6046 : }
6047 :
6048 : /* Return true if we need secondary memory moves for insn in
6049 : USAGE_INSNS after inserting inherited pseudo of class INHER_CL
6050 : into the insn. */
6051 : static bool
6052 1167542 : check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
6053 : rtx usage_insns ATTRIBUTE_UNUSED)
6054 : {
6055 1167542 : rtx_insn *insn;
6056 1167542 : rtx set, dest;
6057 1167542 : enum reg_class cl;
6058 :
6059 1167542 : if (inher_cl == ALL_REGS
6060 1167542 : || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
6061 : return false;
6062 1167535 : lra_assert (INSN_P (insn));
6063 1167535 : if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
6064 : return false;
6065 1129650 : dest = SET_DEST (set);
6066 1129650 : if (! REG_P (dest))
6067 : return false;
6068 1129650 : lra_assert (inher_cl != NO_REGS);
6069 1129650 : cl = get_reg_class (REGNO (dest));
6070 1129650 : return (cl != NO_REGS && cl != ALL_REGS
6071 1129650 : && targetm.secondary_memory_needed (GET_MODE (dest), inher_cl, cl));
6072 : }
6073 :
6074 : /* Registers involved in inheritance/split in the current EBB
6075 : (inheritance/split pseudos and original registers). */
6076 : static bitmap_head check_only_regs;
6077 :
6078 : /* Reload pseudos cannot be involded in invariant inheritance in the
6079 : current EBB. */
6080 : static bitmap_head invalid_invariant_regs;
6081 :
6082 : /* Do inheritance transformations for insn INSN, which defines (if
6083 : DEF_P) or uses ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which
6084 : instruction in the EBB next uses ORIGINAL_REGNO; it has the same
6085 : form as the "insns" field of usage_insns. Return true if we
6086 : succeed in such transformation.
6087 :
6088 : The transformations look like:
6089 :
6090 : p <- ... i <- ...
6091 : ... p <- i (new insn)
6092 : ... =>
6093 : <- ... p ... <- ... i ...
6094 : or
6095 : ... i <- p (new insn)
6096 : <- ... p ... <- ... i ...
6097 : ... =>
6098 : <- ... p ... <- ... i ...
6099 : where p is a spilled original pseudo and i is a new inheritance pseudo.
6100 :
6101 :
6102 : The inheritance pseudo has the smallest class of two classes CL and
6103 : class of ORIGINAL REGNO. */
6104 : static bool
6105 1264325 : inherit_reload_reg (bool def_p, int original_regno,
6106 : enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
6107 : {
6108 1264325 : if (optimize_function_for_size_p (cfun))
6109 : return false;
6110 :
6111 1233395 : enum reg_class rclass = lra_get_allocno_class (original_regno);
6112 1233395 : rtx original_reg = regno_reg_rtx[original_regno];
6113 1233395 : rtx new_reg, usage_insn;
6114 1233395 : rtx_insn *new_insns;
6115 :
6116 1233395 : lra_assert (! usage_insns[original_regno].after_p);
6117 1233395 : if (lra_dump_file != NULL)
6118 2 : fprintf (lra_dump_file,
6119 : " <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
6120 1233395 : if (! ira_reg_classes_intersect_p[cl][rclass])
6121 : {
6122 65853 : if (lra_dump_file != NULL)
6123 : {
6124 0 : fprintf (lra_dump_file,
6125 : " Rejecting inheritance for %d "
6126 : "because of disjoint classes %s and %s\n",
6127 : original_regno, reg_class_names[cl],
6128 : reg_class_names[rclass]);
6129 0 : fprintf (lra_dump_file,
6130 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6131 : }
6132 65853 : return false;
6133 : }
6134 1167542 : if ((ira_class_subset_p[cl][rclass] && cl != rclass)
6135 : /* We don't use a subset of two classes because it can be
6136 : NO_REGS. This transformation is still profitable in most
6137 : cases even if the classes are not intersected as register
6138 : move is probably cheaper than a memory load. */
6139 437123 : || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
6140 : {
6141 730419 : if (lra_dump_file != NULL)
6142 2 : fprintf (lra_dump_file, " Use smallest class of %s and %s\n",
6143 : reg_class_names[cl], reg_class_names[rclass]);
6144 :
6145 : rclass = cl;
6146 : }
6147 1167542 : if (check_secondary_memory_needed_p (rclass, next_usage_insns))
6148 : {
6149 : /* Reject inheritance resulting in secondary memory moves.
6150 : Otherwise, there is a danger in LRA cycling. Also such
6151 : transformation will be unprofitable. */
6152 12864 : if (lra_dump_file != NULL)
6153 : {
6154 0 : rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
6155 0 : rtx set = single_set (insn);
6156 :
6157 0 : lra_assert (set != NULL_RTX);
6158 :
6159 0 : rtx dest = SET_DEST (set);
6160 :
6161 0 : lra_assert (REG_P (dest));
6162 0 : fprintf (lra_dump_file,
6163 : " Rejecting inheritance for insn %d(%s)<-%d(%s) "
6164 : "as secondary mem is needed\n",
6165 0 : REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
6166 0 : original_regno, reg_class_names[rclass]);
6167 0 : fprintf (lra_dump_file,
6168 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6169 : }
6170 12864 : return false;
6171 : }
6172 1154678 : if (ira_reg_class_min_nregs[rclass][GET_MODE (original_reg)]
6173 1154678 : != ira_reg_class_max_nregs[rclass][GET_MODE (original_reg)])
6174 : {
6175 29 : if (lra_dump_file != NULL)
6176 : {
6177 0 : fprintf (lra_dump_file,
6178 : " Rejecting inheritance for %d "
6179 : "because of requiring non-uniform class %s\n",
6180 : original_regno, reg_class_names[rclass]);
6181 0 : fprintf (lra_dump_file,
6182 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6183 : }
6184 29 : return false;
6185 : }
6186 1154649 : new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
6187 : rclass, NULL, "inheritance");
6188 1154649 : start_sequence ();
6189 1154649 : if (def_p)
6190 542334 : lra_emit_move (original_reg, new_reg);
6191 : else
6192 612315 : lra_emit_move (new_reg, original_reg);
6193 1154649 : new_insns = end_sequence ();
6194 1154649 : if (NEXT_INSN (new_insns) != NULL_RTX)
6195 : {
6196 0 : if (lra_dump_file != NULL)
6197 : {
6198 0 : fprintf (lra_dump_file,
6199 : " Rejecting inheritance %d->%d "
6200 : "as it results in 2 or more insns:\n",
6201 : original_regno, REGNO (new_reg));
6202 0 : dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
6203 0 : fprintf (lra_dump_file,
6204 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6205 : }
6206 0 : return false;
6207 : }
6208 1154649 : lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
6209 1154649 : lra_update_insn_regno_info (insn);
6210 1154649 : if (! def_p)
6211 : /* We now have a new usage insn for original regno. */
6212 612315 : setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
6213 1154649 : if (lra_dump_file != NULL)
6214 2 : fprintf (lra_dump_file, " Original reg change %d->%d (bb%d):\n",
6215 2 : original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
6216 1154649 : lra_reg_info[REGNO (new_reg)].restore_rtx = regno_reg_rtx[original_regno];
6217 1154649 : bitmap_set_bit (&check_only_regs, REGNO (new_reg));
6218 1154649 : bitmap_set_bit (&check_only_regs, original_regno);
6219 1154649 : bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
6220 1154649 : if (def_p)
6221 542334 : lra_process_new_insns (insn, NULL, new_insns,
6222 : "Add original<-inheritance");
6223 : else
6224 612315 : lra_process_new_insns (insn, new_insns, NULL,
6225 : "Add inheritance<-original");
6226 2587645 : while (next_usage_insns != NULL_RTX)
6227 : {
6228 1432996 : if (GET_CODE (next_usage_insns) != INSN_LIST)
6229 : {
6230 1154649 : usage_insn = next_usage_insns;
6231 1154649 : lra_assert (NONDEBUG_INSN_P (usage_insn));
6232 : next_usage_insns = NULL;
6233 : }
6234 : else
6235 : {
6236 278347 : usage_insn = XEXP (next_usage_insns, 0);
6237 278347 : lra_assert (DEBUG_INSN_P (usage_insn));
6238 278347 : next_usage_insns = XEXP (next_usage_insns, 1);
6239 : }
6240 1432996 : lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
6241 1432996 : DEBUG_INSN_P (usage_insn));
6242 1432996 : lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
6243 1432996 : if (lra_dump_file != NULL)
6244 : {
6245 2 : basic_block bb = BLOCK_FOR_INSN (usage_insn);
6246 2 : fprintf (lra_dump_file,
6247 : " Inheritance reuse change %d->%d (bb%d):\n",
6248 : original_regno, REGNO (new_reg),
6249 : bb ? bb->index : -1);
6250 2 : dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
6251 : }
6252 : }
6253 1154649 : if (lra_dump_file != NULL)
6254 2 : fprintf (lra_dump_file,
6255 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6256 : return true;
6257 : }
6258 :
6259 : /* Return true if we need a caller save/restore for pseudo REGNO which
6260 : was assigned to a hard register. */
6261 : static inline bool
6262 114275667 : need_for_call_save_p (int regno)
6263 : {
6264 114275667 : lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
6265 114275667 : if (usage_insns[regno].calls_num < calls_num)
6266 : {
6267 : unsigned int abis = 0;
6268 116641239 : for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
6269 107668836 : if (last_call_for_abi[i] > usage_insns[regno].calls_num)
6270 8972403 : abis |= 1 << i;
6271 8972403 : gcc_assert (abis);
6272 8972403 : if (call_clobbered_in_region_p (abis, full_and_partial_call_clobbers,
6273 8972403 : PSEUDO_REGNO_MODE (regno),
6274 : reg_renumber[regno]))
6275 : return true;
6276 : }
6277 : return false;
6278 : }
6279 :
6280 : /* Global registers occurring in the current EBB. */
6281 : static bitmap_head ebb_global_regs;
6282 :
6283 : /* Return true if we need a split for hard register REGNO or pseudo
6284 : REGNO which was assigned to a hard register.
6285 : POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
6286 : used for reloads since the EBB end. It is an approximation of the
6287 : used hard registers in the split range. The exact value would
6288 : require expensive calculations. If we were aggressive with
6289 : splitting because of the approximation, the split pseudo will save
6290 : the same hard register assignment and will be removed in the undo
6291 : pass. We still need the approximation because too aggressive
6292 : splitting would result in too inaccurate cost calculation in the
6293 : assignment pass because of too many generated moves which will be
6294 : probably removed in the undo pass. */
6295 : static inline bool
6296 242228909 : need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
6297 : {
6298 242228909 : int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
6299 :
6300 242228909 : lra_assert (hard_regno >= 0);
6301 242228909 : return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
6302 : /* Don't split eliminable hard registers, otherwise we can
6303 : split hard registers like hard frame pointer, which
6304 : lives on BB start/end according to DF-infrastructure,
6305 : when there is a pseudo assigned to the register and
6306 : living in the same BB. */
6307 670293 : && (regno >= FIRST_PSEUDO_REGISTER
6308 44839 : || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
6309 640141 : && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
6310 : /* Don't split call clobbered hard regs living through
6311 : calls, otherwise we might have a check problem in the
6312 : assign sub-pass as in the most cases (exception is a
6313 : situation when check_and_force_assignment_correctness_p value is
6314 : true) the assign pass assumes that all pseudos living
6315 : through calls are assigned to call saved hard regs. */
6316 626542 : && (regno >= FIRST_PSEUDO_REGISTER
6317 1088 : || !TEST_HARD_REG_BIT (full_and_partial_call_clobbers, regno))
6318 : /* We need at least 2 reloads to make pseudo splitting
6319 : profitable. We should provide hard regno splitting in
6320 : any case to solve 1st insn scheduling problem when
6321 : moving hard register definition up might result in
6322 : impossibility to find hard register for reload pseudo of
6323 : small register class. */
6324 1253034 : && (usage_insns[regno].reloads_num
6325 1251971 : + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
6326 2694 : && (regno < FIRST_PSEUDO_REGISTER
6327 : /* For short living pseudos, spilling + inheritance can
6328 : be considered a substitution for splitting.
6329 : Therefore we do not splitting for local pseudos. It
6330 : decreases also aggressiveness of splitting. The
6331 : minimal number of references is chosen taking into
6332 : account that for 2 references splitting has no sense
6333 : as we can just spill the pseudo. */
6334 : || (regno >= FIRST_PSEUDO_REGISTER
6335 2647 : && lra_reg_info[regno].nrefs > 3
6336 2290 : && bitmap_bit_p (&ebb_global_regs, regno))))
6337 242897965 : || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
6338 : }
6339 :
6340 : /* Return class for the split pseudo created from original pseudo with
6341 : ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO. We
6342 : choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
6343 : results in no secondary memory movements. */
6344 : static enum reg_class
6345 1396 : choose_split_class (enum reg_class allocno_class,
6346 : int hard_regno ATTRIBUTE_UNUSED,
6347 : machine_mode mode ATTRIBUTE_UNUSED)
6348 : {
6349 1396 : int i;
6350 1396 : enum reg_class cl, best_cl = NO_REGS;
6351 1396 : enum reg_class hard_reg_class ATTRIBUTE_UNUSED
6352 : = REGNO_REG_CLASS (hard_regno);
6353 :
6354 1396 : if (! targetm.secondary_memory_needed (mode, allocno_class, allocno_class)
6355 1396 : && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
6356 : return allocno_class;
6357 0 : for (i = 0;
6358 0 : (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
6359 : i++)
6360 0 : if (! targetm.secondary_memory_needed (mode, cl, hard_reg_class)
6361 0 : && ! targetm.secondary_memory_needed (mode, hard_reg_class, cl)
6362 0 : && TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
6363 0 : && (best_cl == NO_REGS
6364 0 : || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
6365 : best_cl = cl;
6366 : return best_cl;
6367 : }
6368 :
6369 : /* Copy any equivalence information from ORIGINAL_REGNO to NEW_REGNO. It only
6370 : makes sense to call this function if NEW_REGNO is always equal to
6371 : ORIGINAL_REGNO. Set up defined_p flag when caller_save_p flag is set up and
6372 : CALL_SAVE_P is true. */
6373 :
6374 : static void
6375 661315 : lra_copy_reg_equiv (unsigned int new_regno, unsigned int original_regno,
6376 : bool call_save_p)
6377 : {
6378 661315 : if (!ira_reg_equiv[original_regno].defined_p
6379 598079 : && !(call_save_p && ira_reg_equiv[original_regno].caller_save_p))
6380 : return;
6381 :
6382 63407 : ira_expand_reg_equiv ();
6383 63407 : ira_reg_equiv[new_regno].defined_p = true;
6384 63407 : if (ira_reg_equiv[original_regno].memory)
6385 29969 : ira_reg_equiv[new_regno].memory
6386 29969 : = copy_rtx (ira_reg_equiv[original_regno].memory);
6387 63407 : if (ira_reg_equiv[original_regno].constant)
6388 27000 : ira_reg_equiv[new_regno].constant
6389 27000 : = copy_rtx (ira_reg_equiv[original_regno].constant);
6390 63407 : if (ira_reg_equiv[original_regno].invariant)
6391 6438 : ira_reg_equiv[new_regno].invariant
6392 6438 : = copy_rtx (ira_reg_equiv[original_regno].invariant);
6393 : }
6394 :
6395 : /* Do split transformations for insn INSN, which defines or uses
6396 : ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which instruction in
6397 : the EBB next uses ORIGINAL_REGNO; it has the same form as the
6398 : "insns" field of usage_insns. If TO is not NULL, we don't use
6399 : usage_insns, we put restore insns after TO insn. It is a case when
6400 : we call it from lra_split_hard_reg_for, outside the inheritance
6401 : pass.
6402 :
6403 : The transformations look like:
6404 :
6405 : p <- ... p <- ...
6406 : ... s <- p (new insn -- save)
6407 : ... =>
6408 : ... p <- s (new insn -- restore)
6409 : <- ... p ... <- ... p ...
6410 : or
6411 : <- ... p ... <- ... p ...
6412 : ... s <- p (new insn -- save)
6413 : ... =>
6414 : ... p <- s (new insn -- restore)
6415 : <- ... p ... <- ... p ...
6416 :
6417 : where p is an original pseudo got a hard register or a hard
6418 : register and s is a new split pseudo. The save is put before INSN
6419 : if BEFORE_P is true. Return true if we succeed in such
6420 : transformation. */
6421 : static bool
6422 663015 : split_reg (bool before_p, int original_regno, rtx_insn *insn,
6423 : rtx next_usage_insns, rtx_insn *to)
6424 : {
6425 663015 : enum reg_class rclass;
6426 663015 : rtx original_reg;
6427 663015 : int hard_regno, nregs;
6428 663015 : rtx new_reg, usage_insn;
6429 663015 : rtx_insn *restore, *save;
6430 663015 : bool after_p;
6431 663015 : bool call_save_p;
6432 663015 : machine_mode mode;
6433 :
6434 663015 : if (original_regno < FIRST_PSEUDO_REGISTER)
6435 : {
6436 206 : rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
6437 206 : hard_regno = original_regno;
6438 206 : call_save_p = false;
6439 206 : nregs = 1;
6440 206 : mode = lra_reg_info[hard_regno].biggest_mode;
6441 206 : machine_mode reg_rtx_mode = GET_MODE (regno_reg_rtx[hard_regno]);
6442 : /* A reg can have a biggest_mode of VOIDmode if it was only ever seen as
6443 : part of a multi-word register. In that case, just use the reg_rtx
6444 : mode. Do the same also if the biggest mode was larger than a register
6445 : or we can not compare the modes. Otherwise, limit the size to that of
6446 : the biggest access in the function or to the natural mode at least. */
6447 206 : if (mode == VOIDmode
6448 206 : || !ordered_p (GET_MODE_PRECISION (mode),
6449 206 : GET_MODE_PRECISION (reg_rtx_mode))
6450 206 : || paradoxical_subreg_p (mode, reg_rtx_mode)
6451 411 : || maybe_gt (GET_MODE_PRECISION (reg_rtx_mode), GET_MODE_PRECISION (mode)))
6452 : {
6453 663015 : original_reg = regno_reg_rtx[hard_regno];
6454 663015 : mode = reg_rtx_mode;
6455 : }
6456 : else
6457 189 : original_reg = gen_rtx_REG (mode, hard_regno);
6458 : }
6459 : else
6460 : {
6461 662809 : mode = PSEUDO_REGNO_MODE (original_regno);
6462 662809 : hard_regno = reg_renumber[original_regno];
6463 662809 : nregs = hard_regno_nregs (hard_regno, mode);
6464 662809 : rclass = lra_get_allocno_class (original_regno);
6465 662809 : original_reg = regno_reg_rtx[original_regno];
6466 662809 : call_save_p = need_for_call_save_p (original_regno);
6467 : }
6468 663015 : lra_assert (hard_regno >= 0);
6469 663015 : if (lra_dump_file != NULL)
6470 0 : fprintf (lra_dump_file,
6471 : " ((((((((((((((((((((((((((((((((((((((((((((((((\n");
6472 :
6473 663015 : if (call_save_p)
6474 : {
6475 661619 : mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
6476 : hard_regno_nregs (hard_regno, mode),
6477 : mode);
6478 661619 : new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, NULL, "save");
6479 : }
6480 : else
6481 : {
6482 1396 : rclass = choose_split_class (rclass, hard_regno, mode);
6483 1396 : if (rclass == NO_REGS)
6484 : {
6485 0 : if (lra_dump_file != NULL)
6486 : {
6487 0 : fprintf (lra_dump_file,
6488 : " Rejecting split of %d(%s): "
6489 : "no good reg class for %d(%s)\n",
6490 : original_regno,
6491 0 : reg_class_names[lra_get_allocno_class (original_regno)],
6492 : hard_regno,
6493 0 : reg_class_names[REGNO_REG_CLASS (hard_regno)]);
6494 0 : fprintf
6495 0 : (lra_dump_file,
6496 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6497 : }
6498 0 : return false;
6499 : }
6500 : /* Split_if_necessary can split hard registers used as part of a
6501 : multi-register mode but splits each register individually. The
6502 : mode used for each independent register may not be supported
6503 : so reject the split. Splitting the wider mode should theoretically
6504 : be possible but is not implemented. */
6505 1396 : if (!targetm.hard_regno_mode_ok (hard_regno, mode))
6506 : {
6507 0 : if (lra_dump_file != NULL)
6508 : {
6509 0 : fprintf (lra_dump_file,
6510 : " Rejecting split of %d(%s): unsuitable mode %s\n",
6511 : original_regno,
6512 0 : reg_class_names[lra_get_allocno_class (original_regno)],
6513 0 : GET_MODE_NAME (mode));
6514 0 : fprintf
6515 0 : (lra_dump_file,
6516 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6517 : }
6518 0 : return false;
6519 : }
6520 1396 : new_reg = lra_create_new_reg (mode, original_reg, rclass, NULL, "split");
6521 1396 : reg_renumber[REGNO (new_reg)] = hard_regno;
6522 : }
6523 663015 : int new_regno = REGNO (new_reg);
6524 663015 : save = emit_spill_move (true, new_reg, original_reg);
6525 663015 : if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
6526 : {
6527 0 : if (lra_dump_file != NULL)
6528 : {
6529 0 : fprintf
6530 0 : (lra_dump_file,
6531 : " Rejecting split %d->%d resulting in > 2 save insns:\n",
6532 : original_regno, new_regno);
6533 0 : dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
6534 0 : fprintf (lra_dump_file,
6535 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6536 : }
6537 0 : return false;
6538 : }
6539 663015 : restore = emit_spill_move (false, new_reg, original_reg);
6540 663015 : if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
6541 : {
6542 0 : if (lra_dump_file != NULL)
6543 : {
6544 0 : fprintf (lra_dump_file,
6545 : " Rejecting split %d->%d "
6546 : "resulting in > 2 restore insns:\n",
6547 : original_regno, new_regno);
6548 0 : dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
6549 0 : fprintf (lra_dump_file,
6550 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6551 : }
6552 0 : return false;
6553 : }
6554 : /* Transfer equivalence information to the spill register, so that
6555 : if we fail to allocate the spill register, we have the option of
6556 : rematerializing the original value instead of spilling to the stack. */
6557 663015 : if (!HARD_REGISTER_NUM_P (original_regno)
6558 662809 : && mode == PSEUDO_REGNO_MODE (original_regno))
6559 661315 : lra_copy_reg_equiv (new_regno, original_regno, call_save_p);
6560 663015 : lra_reg_info[new_regno].restore_rtx = regno_reg_rtx[original_regno];
6561 663015 : bitmap_set_bit (&lra_split_regs, new_regno);
6562 663015 : if (to != NULL)
6563 : {
6564 159 : lra_assert (next_usage_insns == NULL);
6565 159 : usage_insn = to;
6566 159 : after_p = true;
6567 : }
6568 : else
6569 : {
6570 : /* We need check_only_regs only inside the inheritance pass. */
6571 662856 : bitmap_set_bit (&check_only_regs, new_regno);
6572 662856 : bitmap_set_bit (&check_only_regs, original_regno);
6573 662856 : after_p = usage_insns[original_regno].after_p;
6574 774259 : for (;;)
6575 : {
6576 774259 : if (GET_CODE (next_usage_insns) != INSN_LIST)
6577 : {
6578 662856 : usage_insn = next_usage_insns;
6579 662856 : break;
6580 : }
6581 111403 : usage_insn = XEXP (next_usage_insns, 0);
6582 111403 : lra_assert (DEBUG_INSN_P (usage_insn));
6583 111403 : next_usage_insns = XEXP (next_usage_insns, 1);
6584 111403 : lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
6585 : true);
6586 111403 : lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
6587 111403 : if (lra_dump_file != NULL)
6588 : {
6589 0 : fprintf (lra_dump_file, " Split reuse change %d->%d:\n",
6590 : original_regno, new_regno);
6591 0 : dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
6592 : }
6593 : }
6594 : }
6595 663015 : lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
6596 663015 : lra_assert (usage_insn != insn || (after_p && before_p));
6597 1122229 : lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
6598 : after_p ? NULL : restore,
6599 : after_p ? restore : NULL,
6600 : call_save_p ? "Add reg<-save" : "Add reg<-split");
6601 663015 : if (call_save_p
6602 661619 : && first_call_insn != NULL
6603 1324634 : && BLOCK_FOR_INSN (first_call_insn) != BLOCK_FOR_INSN (insn))
6604 : /* PR116028: If original_regno is a pseudo that has been assigned a
6605 : callee-saved hard register, then emit the spill insn before the call
6606 : insn 'first_call_insn' instead of adjacent to 'insn'. If 'insn'
6607 : and 'first_call_insn' belong to the same EBB but to two separate
6608 : BBs, and if 'insn' is present in the entry BB, then generating the
6609 : spill insn in the entry BB can prevent shrink wrap from happening.
6610 : This is because the spill insn references the stack pointer and
6611 : hence the prolog gets generated in the entry BB itself. It is
6612 : also more efficient to generate the spill before
6613 : 'first_call_insn' as the spill now occurs only in the path
6614 : containing the call. */
6615 26921 : lra_process_new_insns (first_call_insn, save, NULL, "Add save<-reg");
6616 : else
6617 1272946 : lra_process_new_insns (insn, before_p ? save : NULL,
6618 : before_p ? NULL : save,
6619 : call_save_p ? "Add save<-reg" : "Add split<-reg");
6620 663015 : if (nregs > 1 || original_regno < FIRST_PSEUDO_REGISTER)
6621 : /* If we are trying to split multi-register. We should check
6622 : conflicts on the next assignment sub-pass. IRA can allocate on
6623 : sub-register levels, LRA do this on pseudos level right now and
6624 : this discrepancy may create allocation conflicts after
6625 : splitting.
6626 :
6627 : If we are trying to split hard register we should also check conflicts
6628 : as such splitting can create artificial conflict of the hard register
6629 : with another pseudo because of simplified conflict calculation in
6630 : LRA. */
6631 9950 : check_and_force_assignment_correctness_p = true;
6632 663015 : if (lra_dump_file != NULL)
6633 0 : fprintf (lra_dump_file,
6634 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6635 : return true;
6636 : }
6637 :
6638 : /* Split a hard reg for reload pseudo REGNO having RCLASS and living
6639 : in the range [FROM, TO]. Return true if did a split. Otherwise,
6640 : return false. */
6641 : bool
6642 1570 : spill_hard_reg_in_range (int regno, enum reg_class rclass, rtx_insn *from, rtx_insn *to)
6643 : {
6644 1570 : int i, hard_regno;
6645 1570 : int rclass_size;
6646 1570 : rtx_insn *insn;
6647 1570 : unsigned int uid;
6648 1570 : bitmap_iterator bi;
6649 1570 : HARD_REG_SET ignore;
6650 :
6651 1570 : lra_assert (from != NULL && to != NULL);
6652 1570 : ignore = lra_no_alloc_regs;
6653 4523 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
6654 : {
6655 2953 : lra_insn_recog_data_t id = lra_insn_recog_data[uid];
6656 2953 : struct lra_static_insn_data *static_id = id->insn_static_data;
6657 2953 : struct lra_insn_reg *reg;
6658 :
6659 9848 : for (reg = id->regs; reg != NULL; reg = reg->next)
6660 6895 : if (reg->regno < FIRST_PSEUDO_REGISTER)
6661 157 : SET_HARD_REG_BIT (ignore, reg->regno);
6662 4609 : for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
6663 1656 : SET_HARD_REG_BIT (ignore, reg->regno);
6664 : }
6665 1570 : rclass_size = ira_class_hard_regs_num[rclass];
6666 4212 : for (i = 0; i < rclass_size; i++)
6667 : {
6668 2801 : hard_regno = ira_class_hard_regs[rclass][i];
6669 2801 : if (! TEST_HARD_REG_BIT (lra_reg_info[regno].conflict_hard_regs, hard_regno)
6670 2801 : || TEST_HARD_REG_BIT (ignore, hard_regno))
6671 2636 : continue;
6672 476 : for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
6673 : {
6674 317 : struct lra_static_insn_data *static_id;
6675 317 : struct lra_insn_reg *reg;
6676 :
6677 317 : if (!INSN_P (insn))
6678 0 : continue;
6679 317 : if (bitmap_bit_p (&lra_reg_info[hard_regno].insn_bitmap,
6680 317 : INSN_UID (insn)))
6681 : break;
6682 311 : static_id = lra_get_insn_recog_data (insn)->insn_static_data;
6683 365 : for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
6684 54 : if (reg->regno == hard_regno)
6685 : break;
6686 : if (reg != NULL)
6687 : break;
6688 : }
6689 165 : if (insn != NEXT_INSN (to))
6690 6 : continue;
6691 159 : if (split_reg (true, hard_regno, from, NULL, to))
6692 : return true;
6693 : }
6694 : return false;
6695 : }
6696 :
6697 : /* Recognize that we need a split transformation for insn INSN, which
6698 : defines or uses REGNO in its insn biggest MODE (we use it only if
6699 : REGNO is a hard register). POTENTIAL_RELOAD_HARD_REGS contains
6700 : hard registers which might be used for reloads since the EBB end.
6701 : Put the save before INSN if BEFORE_P is true. MAX_UID is maximla
6702 : uid before starting INSN processing. Return true if we succeed in
6703 : such transformation. */
6704 : static bool
6705 199529836 : split_if_necessary (int regno, machine_mode mode,
6706 : HARD_REG_SET potential_reload_hard_regs,
6707 : bool before_p, rtx_insn *insn, int max_uid)
6708 : {
6709 199529836 : bool res = false;
6710 199529836 : int i, nregs = 1;
6711 199529836 : rtx next_usage_insns;
6712 :
6713 199529836 : if (regno < FIRST_PSEUDO_REGISTER)
6714 93525919 : nregs = hard_regno_nregs (regno, mode);
6715 399414547 : for (i = 0; i < nregs; i++)
6716 199884711 : if (usage_insns[regno + i].check == curr_usage_insns_check
6717 133289704 : && (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
6718 : /* To avoid processing the register twice or more. */
6719 133289704 : && ((GET_CODE (next_usage_insns) != INSN_LIST
6720 129084582 : && INSN_UID (next_usage_insns) < max_uid)
6721 4205122 : || (GET_CODE (next_usage_insns) == INSN_LIST
6722 4205122 : && (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
6723 133289704 : && need_for_split_p (potential_reload_hard_regs, regno + i)
6724 200162806 : && split_reg (before_p, regno + i, insn, next_usage_insns, NULL))
6725 : res = true;
6726 199529836 : return res;
6727 : }
6728 :
6729 : /* Return TRUE if rtx X is considered as an invariant for
6730 : inheritance. */
6731 : static bool
6732 11539615 : invariant_p (const_rtx x)
6733 : {
6734 11539615 : machine_mode mode;
6735 11539615 : const char *fmt;
6736 11539615 : enum rtx_code code;
6737 11539615 : int i, j;
6738 :
6739 11539615 : if (side_effects_p (x))
6740 : return false;
6741 :
6742 11513618 : code = GET_CODE (x);
6743 11513618 : mode = GET_MODE (x);
6744 11513618 : if (code == SUBREG)
6745 : {
6746 469164 : x = SUBREG_REG (x);
6747 469164 : code = GET_CODE (x);
6748 469164 : mode = wider_subreg_mode (mode, GET_MODE (x));
6749 : }
6750 :
6751 11513618 : if (MEM_P (x))
6752 : return false;
6753 :
6754 9781853 : if (REG_P (x))
6755 : {
6756 3486877 : int i, nregs, regno = REGNO (x);
6757 :
6758 3486877 : if (regno >= FIRST_PSEUDO_REGISTER || regno == STACK_POINTER_REGNUM
6759 902702 : || TEST_HARD_REG_BIT (eliminable_regset, regno)
6760 3504047 : || GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
6761 : return false;
6762 2 : nregs = hard_regno_nregs (regno, mode);
6763 2 : for (i = 0; i < nregs; i++)
6764 2 : if (! fixed_regs[regno + i]
6765 : /* A hard register may be clobbered in the current insn
6766 : but we can ignore this case because if the hard
6767 : register is used it should be set somewhere after the
6768 : clobber. */
6769 2 : || bitmap_bit_p (&invalid_invariant_regs, regno + i))
6770 2 : return false;
6771 : }
6772 6294976 : fmt = GET_RTX_FORMAT (code);
6773 11067075 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6774 : {
6775 8354139 : if (fmt[i] == 'e')
6776 : {
6777 5480071 : if (! invariant_p (XEXP (x, i)))
6778 : return false;
6779 : }
6780 2874068 : else if (fmt[i] == 'E')
6781 : {
6782 653514 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6783 542646 : if (! invariant_p (XVECEXP (x, i, j)))
6784 : return false;
6785 : }
6786 : }
6787 : return true;
6788 : }
6789 :
6790 : /* We have 'dest_reg <- invariant'. Let us try to make an invariant
6791 : inheritance transformation (using dest_reg instead invariant in a
6792 : subsequent insn). */
6793 : static bool
6794 171406 : process_invariant_for_inheritance (rtx dst_reg, rtx invariant_rtx)
6795 : {
6796 171406 : invariant_ptr_t invariant_ptr;
6797 171406 : rtx_insn *insn, *new_insns;
6798 171406 : rtx insn_set, insn_reg, new_reg;
6799 171406 : int insn_regno;
6800 171406 : bool succ_p = false;
6801 171406 : int dst_regno = REGNO (dst_reg);
6802 171406 : machine_mode dst_mode = GET_MODE (dst_reg);
6803 171406 : enum reg_class cl = lra_get_allocno_class (dst_regno), insn_reg_cl;
6804 :
6805 171406 : invariant_ptr = insert_invariant (invariant_rtx);
6806 171406 : if ((insn = invariant_ptr->insn) != NULL_RTX)
6807 : {
6808 : /* We have a subsequent insn using the invariant. */
6809 23114 : insn_set = single_set (insn);
6810 23114 : lra_assert (insn_set != NULL);
6811 23114 : insn_reg = SET_DEST (insn_set);
6812 23114 : lra_assert (REG_P (insn_reg));
6813 23114 : insn_regno = REGNO (insn_reg);
6814 23114 : insn_reg_cl = lra_get_allocno_class (insn_regno);
6815 :
6816 23114 : if (dst_mode == GET_MODE (insn_reg)
6817 : /* We should consider only result move reg insns which are
6818 : cheap. */
6819 23042 : && targetm.register_move_cost (dst_mode, cl, insn_reg_cl) == 2
6820 45565 : && targetm.register_move_cost (dst_mode, cl, cl) == 2)
6821 : {
6822 22451 : if (lra_dump_file != NULL)
6823 0 : fprintf (lra_dump_file,
6824 : " [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[\n");
6825 22451 : new_reg = lra_create_new_reg (dst_mode, dst_reg, cl, NULL,
6826 : "invariant inheritance");
6827 22451 : bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
6828 22451 : bitmap_set_bit (&check_only_regs, REGNO (new_reg));
6829 22451 : lra_reg_info[REGNO (new_reg)].restore_rtx = PATTERN (insn);
6830 22451 : start_sequence ();
6831 22451 : lra_emit_move (new_reg, dst_reg);
6832 22451 : new_insns = end_sequence ();
6833 22451 : lra_process_new_insns (curr_insn, NULL, new_insns,
6834 : "Add invariant inheritance<-original");
6835 22451 : start_sequence ();
6836 22451 : lra_emit_move (SET_DEST (insn_set), new_reg);
6837 22451 : new_insns = end_sequence ();
6838 22451 : lra_process_new_insns (insn, NULL, new_insns,
6839 : "Changing reload<-inheritance");
6840 22451 : lra_set_insn_deleted (insn);
6841 22451 : succ_p = true;
6842 22451 : if (lra_dump_file != NULL)
6843 : {
6844 0 : fprintf (lra_dump_file,
6845 : " Invariant inheritance reuse change %d (bb%d):\n",
6846 0 : REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
6847 0 : dump_insn_slim (lra_dump_file, insn);
6848 0 : fprintf (lra_dump_file,
6849 : " ]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]\n");
6850 : }
6851 : }
6852 : }
6853 171406 : invariant_ptr->insn = curr_insn;
6854 171406 : return succ_p;
6855 : }
6856 :
6857 : /* Check only registers living at the current program point in the
6858 : current EBB. */
6859 : static bitmap_head live_regs;
6860 :
6861 : /* Update live info in EBB given by its HEAD and TAIL insns after
6862 : inheritance/split transformation. The function removes dead moves
6863 : too. */
6864 : static void
6865 746631 : update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
6866 : {
6867 746631 : unsigned int j;
6868 746631 : int i, regno;
6869 746631 : bool live_p;
6870 746631 : rtx_insn *prev_insn;
6871 746631 : rtx set;
6872 746631 : bool remove_p;
6873 746631 : basic_block last_bb, prev_bb, curr_bb;
6874 746631 : bitmap_iterator bi;
6875 746631 : struct lra_insn_reg *reg;
6876 746631 : edge e;
6877 746631 : edge_iterator ei;
6878 :
6879 746631 : last_bb = BLOCK_FOR_INSN (tail);
6880 746631 : prev_bb = NULL;
6881 746631 : for (curr_insn = tail;
6882 37335147 : curr_insn != PREV_INSN (head);
6883 36588516 : curr_insn = prev_insn)
6884 : {
6885 36588516 : prev_insn = PREV_INSN (curr_insn);
6886 : /* We need to process empty blocks too. They contain
6887 : NOTE_INSN_BASIC_BLOCK referring for the basic block. */
6888 36588516 : if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
6889 1413988 : continue;
6890 35174528 : curr_bb = BLOCK_FOR_INSN (curr_insn);
6891 35174528 : if (curr_bb != prev_bb)
6892 : {
6893 1484125 : if (prev_bb != NULL)
6894 : {
6895 : /* Update df_get_live_in (prev_bb): */
6896 54277604 : EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6897 53540110 : if (bitmap_bit_p (&live_regs, j))
6898 1612360 : bitmap_set_bit (df_get_live_in (prev_bb), j);
6899 : else
6900 51927750 : bitmap_clear_bit (df_get_live_in (prev_bb), j);
6901 : }
6902 1484125 : if (curr_bb != last_bb)
6903 : {
6904 : /* Update df_get_live_out (curr_bb): */
6905 54277604 : EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6906 : {
6907 53540110 : live_p = bitmap_bit_p (&live_regs, j);
6908 53540110 : if (! live_p)
6909 155687327 : FOR_EACH_EDGE (e, ei, curr_bb->succs)
6910 103813853 : if (bitmap_bit_p (df_get_live_in (e->dest), j))
6911 : {
6912 : live_p = true;
6913 : break;
6914 : }
6915 51927750 : if (live_p)
6916 1666636 : bitmap_set_bit (df_get_live_out (curr_bb), j);
6917 : else
6918 51873474 : bitmap_clear_bit (df_get_live_out (curr_bb), j);
6919 : }
6920 : }
6921 1484125 : prev_bb = curr_bb;
6922 1484125 : bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
6923 : }
6924 35174528 : if (! NONDEBUG_INSN_P (curr_insn))
6925 12948750 : continue;
6926 22225778 : curr_id = lra_get_insn_recog_data (curr_insn);
6927 22225778 : curr_static_id = curr_id->insn_static_data;
6928 22225778 : remove_p = false;
6929 22225778 : if ((set = single_set (curr_insn)) != NULL_RTX
6930 21540260 : && REG_P (SET_DEST (set))
6931 17209571 : && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
6932 12795873 : && SET_DEST (set) != pic_offset_table_rtx
6933 12789252 : && bitmap_bit_p (&check_only_regs, regno)
6934 25467594 : && ! bitmap_bit_p (&live_regs, regno))
6935 : remove_p = true;
6936 : /* See which defined values die here. */
6937 61391690 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6938 39165912 : if (reg->type == OP_OUT && ! reg->subreg_p)
6939 15329618 : bitmap_clear_bit (&live_regs, reg->regno);
6940 26433734 : for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6941 4207956 : if (reg->type == OP_OUT && ! reg->subreg_p)
6942 3191111 : bitmap_clear_bit (&live_regs, reg->regno);
6943 22225778 : if (curr_id->arg_hard_regs != NULL)
6944 : /* Make clobbered argument hard registers die. */
6945 3359561 : for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6946 2412488 : if (regno >= FIRST_PSEUDO_REGISTER)
6947 189780 : bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
6948 : /* Mark each used value as live. */
6949 61391690 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6950 39165912 : if (reg->type != OP_OUT
6951 39165912 : && bitmap_bit_p (&check_only_regs, reg->regno))
6952 4550847 : bitmap_set_bit (&live_regs, reg->regno);
6953 26433734 : for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6954 4207956 : if (reg->type != OP_OUT
6955 4207956 : && bitmap_bit_p (&check_only_regs, reg->regno))
6956 0 : bitmap_set_bit (&live_regs, reg->regno);
6957 22225778 : if (curr_id->arg_hard_regs != NULL)
6958 : /* Make used argument hard registers live. */
6959 3359561 : for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6960 2412488 : if (regno < FIRST_PSEUDO_REGISTER
6961 2412488 : && bitmap_bit_p (&check_only_regs, regno))
6962 0 : bitmap_set_bit (&live_regs, regno);
6963 : /* It is quite important to remove dead move insns because it
6964 : means removing dead store. We don't need to process them for
6965 : constraints. */
6966 22225778 : if (remove_p)
6967 : {
6968 297210 : if (lra_dump_file != NULL)
6969 : {
6970 2 : fprintf (lra_dump_file, " Removing dead insn:\n ");
6971 2 : dump_insn_slim (lra_dump_file, curr_insn);
6972 : }
6973 297210 : lra_set_insn_deleted (curr_insn);
6974 : }
6975 : }
6976 746631 : }
6977 :
6978 : /* The structure describes info to do an inheritance for the current
6979 : insn. We need to collect such info first before doing the
6980 : transformations because the transformations change the insn
6981 : internal representation. */
6982 : struct to_inherit
6983 : {
6984 : /* Original regno. */
6985 : int regno;
6986 : /* Subsequent insns which can inherit original reg value. */
6987 : rtx insns;
6988 : };
6989 :
6990 : /* Array containing all info for doing inheritance from the current
6991 : insn. */
6992 : static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
6993 :
6994 : /* Number elements in the previous array. */
6995 : static int to_inherit_num;
6996 :
6997 : /* Add inheritance info REGNO and INSNS. Their meaning is described in
6998 : structure to_inherit. */
6999 : static void
7000 312153 : add_to_inherit (int regno, rtx insns)
7001 : {
7002 312153 : int i;
7003 :
7004 312235 : for (i = 0; i < to_inherit_num; i++)
7005 82 : if (to_inherit[i].regno == regno)
7006 : return;
7007 312153 : lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
7008 312153 : to_inherit[to_inherit_num].regno = regno;
7009 312153 : to_inherit[to_inherit_num++].insns = insns;
7010 : }
7011 :
7012 : /* Return the last non-debug insn in basic block BB, or the block begin
7013 : note if none. */
7014 : static rtx_insn *
7015 29926579 : get_last_insertion_point (basic_block bb)
7016 : {
7017 29926579 : rtx_insn *insn;
7018 :
7019 32262643 : FOR_BB_INSNS_REVERSE (bb, insn)
7020 32262643 : if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
7021 29926579 : return insn;
7022 0 : gcc_unreachable ();
7023 : }
7024 :
7025 : /* Set up RES by registers living on edges FROM except the edge (FROM,
7026 : TO) or by registers set up in a jump insn in BB FROM. */
7027 : static void
7028 11496241 : get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
7029 : {
7030 11496241 : rtx_insn *last;
7031 11496241 : struct lra_insn_reg *reg;
7032 11496241 : edge e;
7033 11496241 : edge_iterator ei;
7034 :
7035 11496241 : lra_assert (to != NULL);
7036 11496241 : bitmap_clear (res);
7037 34215301 : FOR_EACH_EDGE (e, ei, from->succs)
7038 22719060 : if (e->dest != to)
7039 11222819 : bitmap_ior_into (res, df_get_live_in (e->dest));
7040 11496241 : last = get_last_insertion_point (from);
7041 11496241 : if (! JUMP_P (last))
7042 1868123 : return;
7043 9628118 : curr_id = lra_get_insn_recog_data (last);
7044 19256058 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7045 9627940 : if (reg->type != OP_IN)
7046 76 : bitmap_set_bit (res, reg->regno);
7047 : }
7048 :
7049 : /* Used as a temporary results of some bitmap calculations. */
7050 : static bitmap_head temp_bitmap;
7051 :
7052 : /* We split for reloads of small class of hard regs. The following
7053 : defines how many hard regs the class should have to be qualified as
7054 : small. The code is mostly oriented to x86/x86-64 architecture
7055 : where some insns need to use only specific register or pair of
7056 : registers and these register can live in RTL explicitly, e.g. for
7057 : parameter passing. */
7058 : static const int max_small_class_regs_num = 2;
7059 :
7060 : /* Do inheritance/split transformations in EBB starting with HEAD and
7061 : finishing on TAIL. We process EBB insns in the reverse order.
7062 : Return true if we did any inheritance/split transformation in the
7063 : EBB.
7064 :
7065 : We should avoid excessive splitting which results in worse code
7066 : because of inaccurate cost calculations for spilling new split
7067 : pseudos in such case. To achieve this we do splitting only if
7068 : register pressure is high in given basic block and there are reload
7069 : pseudos requiring hard registers. We could do more register
7070 : pressure calculations at any given program point to avoid necessary
7071 : splitting even more but it is to expensive and the current approach
7072 : works well enough. */
7073 : static bool
7074 12682338 : inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
7075 : {
7076 12682338 : int i, src_regno, dst_regno, nregs;
7077 12682338 : bool change_p, succ_p, update_reloads_num_p;
7078 12682338 : rtx_insn *prev_insn, *last_insn;
7079 12682338 : rtx next_usage_insns, curr_set;
7080 12682338 : enum reg_class cl;
7081 12682338 : struct lra_insn_reg *reg;
7082 12682338 : basic_block last_processed_bb, curr_bb = NULL;
7083 12682338 : HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
7084 12682338 : bitmap to_process;
7085 12682338 : unsigned int j;
7086 12682338 : bitmap_iterator bi;
7087 12682338 : bool head_p, after_p;
7088 :
7089 12682338 : change_p = false;
7090 12682338 : curr_usage_insns_check++;
7091 12682338 : clear_invariants ();
7092 12682338 : reloads_num = calls_num = 0;
7093 164870394 : for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
7094 152188056 : last_call_for_abi[i] = 0;
7095 12682338 : CLEAR_HARD_REG_SET (full_and_partial_call_clobbers);
7096 12682338 : bitmap_clear (&check_only_regs);
7097 12682338 : bitmap_clear (&invalid_invariant_regs);
7098 12682338 : last_processed_bb = NULL;
7099 12682338 : CLEAR_HARD_REG_SET (potential_reload_hard_regs);
7100 12682338 : live_hard_regs = eliminable_regset | lra_no_alloc_regs;
7101 : /* We don't process new insns generated in the loop. */
7102 234450752 : for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
7103 : {
7104 221768414 : prev_insn = PREV_INSN (curr_insn);
7105 221768414 : if (BLOCK_FOR_INSN (curr_insn) != NULL)
7106 221768173 : curr_bb = BLOCK_FOR_INSN (curr_insn);
7107 221768414 : if (last_processed_bb != curr_bb)
7108 : {
7109 : /* We are at the end of BB. Add qualified living
7110 : pseudos for potential splitting. */
7111 18430338 : to_process = df_get_live_out (curr_bb);
7112 18430338 : if (last_processed_bb != NULL)
7113 : {
7114 : /* We are somewhere in the middle of EBB. */
7115 5748000 : get_live_on_other_edges (curr_bb, last_processed_bb,
7116 : &temp_bitmap);
7117 5748000 : to_process = &temp_bitmap;
7118 : }
7119 18430338 : last_processed_bb = curr_bb;
7120 18430338 : last_insn = get_last_insertion_point (curr_bb);
7121 36860676 : after_p = (! JUMP_P (last_insn)
7122 18430338 : && (! CALL_P (last_insn)
7123 2257618 : || (find_reg_note (last_insn,
7124 : REG_NORETURN, NULL_RTX) == NULL_RTX
7125 1340911 : && ! SIBLING_CALL_P (last_insn))));
7126 18430338 : CLEAR_HARD_REG_SET (potential_reload_hard_regs);
7127 199576856 : EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
7128 : {
7129 181146524 : if ((int) j >= lra_constraint_new_regno_start)
7130 : break;
7131 181146518 : if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
7132 : {
7133 118627321 : if (j < FIRST_PSEUDO_REGISTER)
7134 69333262 : SET_HARD_REG_BIT (live_hard_regs, j);
7135 : else
7136 49294059 : add_to_hard_reg_set (&live_hard_regs,
7137 49294059 : PSEUDO_REGNO_MODE (j),
7138 49294059 : reg_renumber[j]);
7139 118627321 : setup_next_usage_insn (j, last_insn, reloads_num, after_p);
7140 : }
7141 : }
7142 : }
7143 221768414 : src_regno = dst_regno = -1;
7144 221768414 : curr_set = single_set (curr_insn);
7145 221768414 : if (curr_set != NULL_RTX && REG_P (SET_DEST (curr_set)))
7146 83602267 : dst_regno = REGNO (SET_DEST (curr_set));
7147 113429855 : if (curr_set != NULL_RTX && REG_P (SET_SRC (curr_set)))
7148 39278003 : src_regno = REGNO (SET_SRC (curr_set));
7149 221768414 : update_reloads_num_p = true;
7150 221768414 : if (src_regno < lra_constraint_new_regno_start
7151 215421669 : && src_regno >= FIRST_PSEUDO_REGISTER
7152 27729267 : && reg_renumber[src_regno] < 0
7153 3683948 : && dst_regno >= lra_constraint_new_regno_start
7154 224379239 : && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
7155 : {
7156 : /* 'reload_pseudo <- original_pseudo'. */
7157 2610825 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7158 22064 : reloads_num++;
7159 2610825 : update_reloads_num_p = false;
7160 2610825 : succ_p = false;
7161 2610825 : if (usage_insns[src_regno].check == curr_usage_insns_check
7162 2610825 : && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
7163 474816 : succ_p = inherit_reload_reg (false, src_regno, cl,
7164 : curr_insn, next_usage_insns);
7165 474816 : if (succ_p)
7166 : change_p = true;
7167 : else
7168 2159284 : setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
7169 5221650 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7170 623434991 : potential_reload_hard_regs |= reg_class_contents[cl];
7171 : }
7172 219157589 : else if (src_regno < 0
7173 182490411 : && dst_regno >= lra_constraint_new_regno_start
7174 5516898 : && invariant_p (SET_SRC (curr_set))
7175 272259 : && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS
7176 271717 : && ! bitmap_bit_p (&invalid_invariant_regs, dst_regno)
7177 219390455 : && ! bitmap_bit_p (&invalid_invariant_regs,
7178 232866 : ORIGINAL_REGNO(regno_reg_rtx[dst_regno])))
7179 : {
7180 : /* 'reload_pseudo <- invariant'. */
7181 171406 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7182 8363 : reloads_num++;
7183 171406 : update_reloads_num_p = false;
7184 171406 : if (process_invariant_for_inheritance (SET_DEST (curr_set), SET_SRC (curr_set)))
7185 22451 : change_p = true;
7186 342812 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7187 623434991 : potential_reload_hard_regs |= reg_class_contents[cl];
7188 : }
7189 218986183 : else if (src_regno >= lra_constraint_new_regno_start
7190 6346745 : && dst_regno < lra_constraint_new_regno_start
7191 5541914 : && dst_regno >= FIRST_PSEUDO_REGISTER
7192 3722160 : && reg_renumber[dst_regno] < 0
7193 1446738 : && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
7194 1446738 : && usage_insns[dst_regno].check == curr_usage_insns_check
7195 218986183 : && (next_usage_insns
7196 477356 : = usage_insns[dst_regno].insns) != NULL_RTX)
7197 : {
7198 477356 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7199 8003 : reloads_num++;
7200 477356 : update_reloads_num_p = false;
7201 : /* 'original_pseudo <- reload_pseudo'. */
7202 477356 : if (! JUMP_P (curr_insn)
7203 477356 : && inherit_reload_reg (true, dst_regno, cl,
7204 : curr_insn, next_usage_insns))
7205 : change_p = true;
7206 : /* Invalidate. */
7207 477356 : usage_insns[dst_regno].check = 0;
7208 954712 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7209 623434991 : potential_reload_hard_regs |= reg_class_contents[cl];
7210 : }
7211 218508827 : else if (INSN_P (curr_insn))
7212 : {
7213 183084846 : int iter;
7214 183084846 : int max_uid = get_max_uid ();
7215 :
7216 183084846 : curr_id = lra_get_insn_recog_data (curr_insn);
7217 183084846 : curr_static_id = curr_id->insn_static_data;
7218 183084846 : to_inherit_num = 0;
7219 : /* Process insn definitions. */
7220 549254538 : for (iter = 0; iter < 2; iter++)
7221 366169692 : for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
7222 593113334 : reg != NULL;
7223 226943642 : reg = reg->next)
7224 226943642 : if (reg->type != OP_IN
7225 226943642 : && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
7226 : {
7227 45866262 : if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
7228 43743174 : && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
7229 1778553 : && usage_insns[dst_regno].check == curr_usage_insns_check
7230 90812340 : && (next_usage_insns
7231 128177 : = usage_insns[dst_regno].insns) != NULL_RTX)
7232 : {
7233 128177 : struct lra_insn_reg *r;
7234 :
7235 382145 : for (r = curr_id->regs; r != NULL; r = r->next)
7236 253968 : if (r->type != OP_OUT && r->regno == dst_regno)
7237 : break;
7238 : /* Don't do inheritance if the pseudo is also
7239 : used in the insn. */
7240 128177 : if (r == NULL)
7241 : /* We cannot do inheritance right now
7242 : because the current insn reg info (chain
7243 : regs) can change after that. */
7244 128177 : add_to_inherit (dst_regno, next_usage_insns);
7245 : }
7246 : /* We cannot process one reg twice here because of
7247 : usage_insns invalidation. */
7248 90812340 : if ((dst_regno < FIRST_PSEUDO_REGISTER
7249 45866262 : || reg_renumber[dst_regno] >= 0)
7250 88894681 : && ! reg->subreg_p && reg->type != OP_IN)
7251 : {
7252 88613519 : HARD_REG_SET s;
7253 :
7254 88613519 : if (split_if_necessary (dst_regno, reg->biggest_mode,
7255 : potential_reload_hard_regs,
7256 : false, curr_insn, max_uid))
7257 58441 : change_p = true;
7258 88613519 : CLEAR_HARD_REG_SET (s);
7259 88613519 : if (dst_regno < FIRST_PSEUDO_REGISTER)
7260 44946078 : add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
7261 : else
7262 43667441 : add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
7263 43667441 : reg_renumber[dst_regno]);
7264 88613519 : live_hard_regs &= ~s;
7265 177227038 : potential_reload_hard_regs &= ~s;
7266 : }
7267 : /* We should invalidate potential inheritance or
7268 : splitting for the current insn usages to the next
7269 : usage insns (see code below) as the output pseudo
7270 : prevents this. */
7271 90812340 : if ((dst_regno >= FIRST_PSEUDO_REGISTER
7272 45866262 : && reg_renumber[dst_regno] < 0)
7273 88894681 : || (reg->type == OP_OUT && ! reg->subreg_p
7274 80911327 : && (dst_regno < FIRST_PSEUDO_REGISTER
7275 41552913 : || reg_renumber[dst_regno] >= 0)))
7276 : {
7277 : /* Invalidate and mark definitions. */
7278 43470572 : if (dst_regno >= FIRST_PSEUDO_REGISTER)
7279 43470572 : usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
7280 : else
7281 : {
7282 39358414 : nregs = hard_regno_nregs (dst_regno,
7283 39358414 : reg->biggest_mode);
7284 78975720 : for (i = 0; i < nregs; i++)
7285 79234612 : usage_insns[dst_regno + i].check
7286 39617306 : = -(int) INSN_UID (curr_insn);
7287 : }
7288 : }
7289 : }
7290 : /* Process clobbered call regs. */
7291 183084846 : if (curr_id->arg_hard_regs != NULL)
7292 19633409 : for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
7293 14017069 : if (dst_regno >= FIRST_PSEUDO_REGISTER)
7294 1611174 : usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
7295 805587 : = -(int) INSN_UID (curr_insn);
7296 183084846 : if (! JUMP_P (curr_insn))
7297 171657881 : for (i = 0; i < to_inherit_num; i++)
7298 128177 : if (inherit_reload_reg (true, to_inherit[i].regno,
7299 : ALL_REGS, curr_insn,
7300 : to_inherit[i].insns))
7301 103094 : change_p = true;
7302 183084846 : if (CALL_P (curr_insn))
7303 : {
7304 7220750 : rtx cheap, pat, dest;
7305 7220750 : rtx_insn *restore;
7306 7220750 : int regno, hard_regno;
7307 :
7308 7220750 : calls_num++;
7309 7220750 : function_abi callee_abi = insn_callee_abi (curr_insn);
7310 7220750 : last_call_for_abi[callee_abi.id ()] = calls_num;
7311 7220750 : full_and_partial_call_clobbers
7312 7220750 : |= callee_abi.full_and_partial_reg_clobbers ();
7313 7220750 : first_call_insn = curr_insn;
7314 7220750 : if ((cheap = find_reg_note (curr_insn,
7315 : REG_RETURNED, NULL_RTX)) != NULL_RTX
7316 40663 : && ((cheap = XEXP (cheap, 0)), true)
7317 40663 : && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
7318 40663 : && (hard_regno = reg_renumber[regno]) >= 0
7319 29953 : && usage_insns[regno].check == curr_usage_insns_check
7320 : /* If there are pending saves/restores, the
7321 : optimization is not worth. */
7322 25782 : && usage_insns[regno].calls_num == calls_num - 1
7323 7244783 : && callee_abi.clobbers_reg_p (GET_MODE (cheap), hard_regno))
7324 : {
7325 : /* Restore the pseudo from the call result as
7326 : REG_RETURNED note says that the pseudo value is
7327 : in the call result and the pseudo is an argument
7328 : of the call. */
7329 10562 : pat = PATTERN (curr_insn);
7330 10562 : if (GET_CODE (pat) == PARALLEL)
7331 0 : pat = XVECEXP (pat, 0, 0);
7332 10562 : dest = SET_DEST (pat);
7333 : /* For multiple return values dest is PARALLEL.
7334 : Currently we handle only single return value case. */
7335 10562 : if (REG_P (dest))
7336 : {
7337 10562 : start_sequence ();
7338 10562 : emit_move_insn (cheap, copy_rtx (dest));
7339 10562 : restore = end_sequence ();
7340 10562 : lra_process_new_insns (curr_insn, NULL, restore,
7341 : "Inserting call parameter restore");
7342 : /* We don't need to save/restore of the pseudo from
7343 : this call. */
7344 10562 : usage_insns[regno].calls_num = calls_num;
7345 10562 : remove_from_hard_reg_set
7346 10562 : (&full_and_partial_call_clobbers,
7347 10562 : GET_MODE (cheap), hard_regno);
7348 10562 : bitmap_set_bit (&check_only_regs, regno);
7349 : }
7350 : }
7351 : }
7352 183084846 : to_inherit_num = 0;
7353 : /* Process insn usages. */
7354 549254538 : for (iter = 0; iter < 2; iter++)
7355 366169692 : for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
7356 593113334 : reg != NULL;
7357 226943642 : reg = reg->next)
7358 226943642 : if ((reg->type != OP_OUT
7359 89528737 : || (reg->type == OP_OUT && reg->subreg_p))
7360 227490673 : && (src_regno = reg->regno) < lra_constraint_new_regno_start)
7361 : {
7362 126588772 : if (src_regno >= FIRST_PSEUDO_REGISTER
7363 73554690 : && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
7364 : {
7365 2430669 : if (usage_insns[src_regno].check == curr_usage_insns_check
7366 771160 : && (next_usage_insns
7367 771160 : = usage_insns[src_regno].insns) != NULL_RTX
7368 3201829 : && NONDEBUG_INSN_P (curr_insn))
7369 183976 : add_to_inherit (src_regno, next_usage_insns);
7370 4493386 : else if (usage_insns[src_regno].check
7371 2246693 : != -(int) INSN_UID (curr_insn))
7372 : /* Add usages but only if the reg is not set up
7373 : in the same insn. */
7374 2246693 : add_next_usage_insn (src_regno, curr_insn, reloads_num);
7375 : }
7376 71124021 : else if (src_regno < FIRST_PSEUDO_REGISTER
7377 71124021 : || reg_renumber[src_regno] >= 0)
7378 : {
7379 124018997 : bool before_p;
7380 124018997 : rtx_insn *use_insn = curr_insn;
7381 124018997 : rtx_insn *prev_insn = PREV_INSN (curr_insn);
7382 :
7383 248037994 : before_p = (JUMP_P (curr_insn)
7384 124018997 : || (CALL_P (curr_insn) && reg->type == OP_IN));
7385 124018997 : if (NONDEBUG_INSN_P (curr_insn)
7386 110916422 : && (! JUMP_P (curr_insn) || reg->type == OP_IN)
7387 234935314 : && split_if_necessary (src_regno, reg->biggest_mode,
7388 : potential_reload_hard_regs,
7389 : before_p, curr_insn, max_uid))
7390 : {
7391 219654 : if (reg->subreg_p)
7392 3152 : check_and_force_assignment_correctness_p = true;
7393 219654 : change_p = true;
7394 : /* Invalidate. */
7395 219654 : usage_insns[src_regno].check = 0;
7396 219654 : if (before_p && PREV_INSN (curr_insn) != prev_insn)
7397 : use_insn = PREV_INSN (curr_insn);
7398 : }
7399 124018997 : if (NONDEBUG_INSN_P (curr_insn))
7400 : {
7401 110916422 : if (src_regno < FIRST_PSEUDO_REGISTER)
7402 48579864 : add_to_hard_reg_set (&live_hard_regs,
7403 48579864 : reg->biggest_mode, src_regno);
7404 : else
7405 62336558 : add_to_hard_reg_set (&live_hard_regs,
7406 62336558 : PSEUDO_REGNO_MODE (src_regno),
7407 62336558 : reg_renumber[src_regno]);
7408 : }
7409 124018997 : if (src_regno >= FIRST_PSEUDO_REGISTER)
7410 70984915 : add_next_usage_insn (src_regno, use_insn, reloads_num);
7411 : else
7412 : {
7413 106164929 : for (i = 0; i < hard_regno_nregs (src_regno, reg->biggest_mode); i++)
7414 53130847 : add_next_usage_insn (src_regno + i, use_insn, reloads_num);
7415 : }
7416 : }
7417 : }
7418 : /* Process used call regs. */
7419 183084846 : if (curr_id->arg_hard_regs != NULL)
7420 19633409 : for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
7421 14017069 : if (src_regno < FIRST_PSEUDO_REGISTER)
7422 : {
7423 13211482 : SET_HARD_REG_BIT (live_hard_regs, src_regno);
7424 13211482 : add_next_usage_insn (src_regno, curr_insn, reloads_num);
7425 : }
7426 183268822 : for (i = 0; i < to_inherit_num; i++)
7427 : {
7428 183976 : src_regno = to_inherit[i].regno;
7429 183976 : if (inherit_reload_reg (false, src_regno, ALL_REGS,
7430 : curr_insn, to_inherit[i].insns))
7431 : change_p = true;
7432 : else
7433 23202 : setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
7434 : }
7435 : }
7436 183157750 : if (update_reloads_num_p
7437 218508827 : && NONDEBUG_INSN_P (curr_insn) && curr_set != NULL_RTX)
7438 : {
7439 110170268 : int regno = -1;
7440 110170268 : if ((REG_P (SET_DEST (curr_set))
7441 80342680 : && (regno = REGNO (SET_DEST (curr_set))) >= lra_constraint_new_regno_start
7442 8141984 : && reg_renumber[regno] < 0
7443 5211476 : && (cl = lra_get_allocno_class (regno)) != NO_REGS)
7444 185585558 : || (REG_P (SET_SRC (curr_set))
7445 34931803 : && (regno = REGNO (SET_SRC (curr_set))) >= lra_constraint_new_regno_start
7446 5938327 : && reg_renumber[regno] < 0
7447 3491269 : && (cl = lra_get_allocno_class (regno)) != NO_REGS))
7448 : {
7449 8146886 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7450 212725 : reloads_num++;
7451 16293772 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7452 221768414 : potential_reload_hard_regs |= reg_class_contents[cl];
7453 : }
7454 : }
7455 221768414 : if (NONDEBUG_INSN_P (curr_insn))
7456 : {
7457 119718797 : int regno;
7458 :
7459 : /* Invalidate invariants with changed regs. */
7460 119718797 : curr_id = lra_get_insn_recog_data (curr_insn);
7461 307166079 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7462 187447282 : if (reg->type != OP_IN)
7463 : {
7464 80166560 : bitmap_set_bit (&invalid_invariant_regs, reg->regno);
7465 160333120 : bitmap_set_bit (&invalid_invariant_regs,
7466 80166560 : ORIGINAL_REGNO (regno_reg_rtx[reg->regno]));
7467 : }
7468 119718797 : curr_static_id = curr_id->insn_static_data;
7469 151250897 : for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
7470 31532100 : if (reg->type != OP_IN)
7471 22577955 : bitmap_set_bit (&invalid_invariant_regs, reg->regno);
7472 119718797 : if (curr_id->arg_hard_regs != NULL)
7473 19633409 : for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
7474 14017069 : if (regno >= FIRST_PSEUDO_REGISTER)
7475 805587 : bitmap_set_bit (&invalid_invariant_regs,
7476 : regno - FIRST_PSEUDO_REGISTER);
7477 : }
7478 : /* We reached the start of the current basic block. */
7479 221768406 : if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
7480 430854490 : || BLOCK_FOR_INSN (prev_insn) != curr_bb)
7481 : {
7482 : /* We reached the beginning of the current block -- do
7483 : rest of spliting in the current BB. */
7484 18430579 : to_process = df_get_live_in (curr_bb);
7485 18430579 : if (BLOCK_FOR_INSN (head) != curr_bb)
7486 : {
7487 : /* We are somewhere in the middle of EBB. */
7488 5748241 : get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
7489 : curr_bb, &temp_bitmap);
7490 5748241 : to_process = &temp_bitmap;
7491 : }
7492 18430579 : head_p = true;
7493 192856347 : EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
7494 : {
7495 174425775 : if ((int) j >= lra_constraint_new_regno_start)
7496 : break;
7497 110806733 : if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
7498 110819700 : && usage_insns[j].check == curr_usage_insns_check
7499 283364973 : && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
7500 : {
7501 108939205 : if (need_for_split_p (potential_reload_hard_regs, j))
7502 : {
7503 384761 : if (lra_dump_file != NULL && head_p)
7504 : {
7505 0 : fprintf (lra_dump_file,
7506 : " ----------------------------------\n");
7507 0 : head_p = false;
7508 : }
7509 384761 : if (split_reg (false, j, bb_note (curr_bb),
7510 : next_usage_insns, NULL))
7511 384761 : change_p = true;
7512 : }
7513 108939205 : usage_insns[j].check = 0;
7514 : }
7515 : }
7516 : }
7517 : }
7518 12682338 : first_call_insn = NULL;
7519 12682338 : return change_p;
7520 : }
7521 :
7522 : /* This value affects EBB forming. If probability of edge from EBB to
7523 : a BB is not greater than the following value, we don't add the BB
7524 : to EBB. */
7525 : #define EBB_PROBABILITY_CUTOFF \
7526 : ((REG_BR_PROB_BASE * param_lra_inheritance_ebb_probability_cutoff) / 100)
7527 :
7528 : /* Current number of inheritance/split iteration. */
7529 : int lra_inheritance_iter;
7530 :
7531 : /* Entry function for inheritance/split pass. */
7532 : void
7533 1547407 : lra_inheritance (void)
7534 : {
7535 1547407 : int i;
7536 1547407 : basic_block bb, start_bb;
7537 1547407 : edge e;
7538 :
7539 1547407 : lra_inheritance_iter++;
7540 1547407 : if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
7541 : return;
7542 1544421 : timevar_push (TV_LRA_INHERITANCE);
7543 1544421 : if (lra_dump_file != NULL)
7544 97 : fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
7545 : lra_inheritance_iter);
7546 1544421 : curr_usage_insns_check = 0;
7547 1544421 : usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
7548 229037102 : for (i = 0; i < lra_constraint_new_regno_start; i++)
7549 227492681 : usage_insns[i].check = 0;
7550 1544421 : bitmap_initialize (&check_only_regs, ®_obstack);
7551 1544421 : bitmap_initialize (&invalid_invariant_regs, ®_obstack);
7552 1544421 : bitmap_initialize (&live_regs, ®_obstack);
7553 1544421 : bitmap_initialize (&temp_bitmap, ®_obstack);
7554 1544421 : bitmap_initialize (&ebb_global_regs, ®_obstack);
7555 14226759 : FOR_EACH_BB_FN (bb, cfun)
7556 : {
7557 12682338 : start_bb = bb;
7558 12682338 : if (lra_dump_file != NULL)
7559 347 : fprintf (lra_dump_file, "EBB");
7560 : /* Form a EBB starting with BB. */
7561 12682338 : bitmap_clear (&ebb_global_regs);
7562 12682338 : bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
7563 18430338 : for (;;)
7564 : {
7565 18430338 : if (lra_dump_file != NULL)
7566 477 : fprintf (lra_dump_file, " %d", bb->index);
7567 18430338 : if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
7568 16885917 : || LABEL_P (BB_HEAD (bb->next_bb)))
7569 : break;
7570 8160261 : e = find_fallthru_edge (bb->succs);
7571 8160261 : if (! e)
7572 : break;
7573 8160261 : if (e->probability.initialized_p ()
7574 8160261 : && e->probability.to_reg_br_prob_base () < EBB_PROBABILITY_CUTOFF)
7575 : break;
7576 : bb = bb->next_bb;
7577 : }
7578 12682338 : bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
7579 12682338 : if (lra_dump_file != NULL)
7580 347 : fprintf (lra_dump_file, "\n");
7581 12682338 : if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
7582 : /* Remember that the EBB head and tail can change in
7583 : inherit_in_ebb. */
7584 746631 : update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
7585 : }
7586 1544421 : bitmap_release (&ebb_global_regs);
7587 1544421 : bitmap_release (&temp_bitmap);
7588 1544421 : bitmap_release (&live_regs);
7589 1544421 : bitmap_release (&invalid_invariant_regs);
7590 1544421 : bitmap_release (&check_only_regs);
7591 1544421 : free (usage_insns);
7592 1544421 : lra_dump_insns_if_possible ("func after inheritance");
7593 1544421 : timevar_pop (TV_LRA_INHERITANCE);
7594 : }
7595 :
7596 :
7597 :
7598 : /* This page contains code to undo failed inheritance/split
7599 : transformations. */
7600 :
7601 : /* Current number of iteration undoing inheritance/split. */
7602 : int lra_undo_inheritance_iter;
7603 :
7604 : /* Fix BB live info LIVE after removing pseudos created on pass doing
7605 : inheritance/split which are REMOVED_PSEUDOS. */
7606 : static void
7607 36860676 : fix_bb_live_info (bitmap live, bitmap removed_pseudos)
7608 : {
7609 36860676 : unsigned int regno;
7610 36860676 : bitmap_iterator bi;
7611 :
7612 210257944 : EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
7613 173397268 : if (bitmap_clear_bit (live, regno)
7614 173397268 : && REG_P (lra_reg_info[regno].restore_rtx))
7615 1207828 : bitmap_set_bit (live, REGNO (lra_reg_info[regno].restore_rtx));
7616 36860676 : }
7617 :
7618 : /* Return regno of the (subreg of) REG. Otherwise, return a negative
7619 : number. */
7620 : static int
7621 67365657 : get_regno (rtx reg)
7622 : {
7623 1086439 : if (GET_CODE (reg) == SUBREG)
7624 1028564 : reg = SUBREG_REG (reg);
7625 67365657 : if (REG_P (reg))
7626 43564712 : return REGNO (reg);
7627 : return -1;
7628 : }
7629 :
7630 : /* Delete a move INSN with destination reg DREGNO and a previous
7631 : clobber insn with the same regno. The inheritance/split code can
7632 : generate moves with preceding clobber and when we delete such moves
7633 : we should delete the clobber insn too to keep the correct life
7634 : info. */
7635 : static void
7636 746178 : delete_move_and_clobber (rtx_insn *insn, int dregno)
7637 : {
7638 746178 : rtx_insn *prev_insn = PREV_INSN (insn);
7639 :
7640 746178 : lra_set_insn_deleted (insn);
7641 746178 : lra_assert (dregno >= 0);
7642 746178 : if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
7643 318241 : && GET_CODE (PATTERN (prev_insn)) == CLOBBER
7644 746534 : && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
7645 0 : lra_set_insn_deleted (prev_insn);
7646 746178 : }
7647 :
7648 : /* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
7649 : return true if we did any change. The undo transformations for
7650 : inheritance looks like
7651 : i <- i2
7652 : p <- i => p <- i2
7653 : or removing
7654 : p <- i, i <- p, and i <- i3
7655 : where p is original pseudo from which inheritance pseudo i was
7656 : created, i and i3 are removed inheritance pseudos, i2 is another
7657 : not removed inheritance pseudo. All split pseudos or other
7658 : occurrences of removed inheritance pseudos are changed on the
7659 : corresponding original pseudos.
7660 :
7661 : The function also schedules insns changed and created during
7662 : inheritance/split pass for processing by the subsequent constraint
7663 : pass. */
7664 : static bool
7665 1544421 : remove_inheritance_pseudos (bitmap remove_pseudos)
7666 : {
7667 1544421 : basic_block bb;
7668 1544421 : int regno, sregno, prev_sregno, dregno;
7669 1544421 : rtx restore_rtx;
7670 1544421 : rtx set, prev_set;
7671 1544421 : rtx_insn *prev_insn;
7672 1544421 : bool change_p, done_p;
7673 :
7674 1544421 : change_p = ! bitmap_empty_p (remove_pseudos);
7675 : /* We cannot finish the function right away if CHANGE_P is true
7676 : because we need to marks insns affected by previous
7677 : inheritance/split pass for processing by the subsequent
7678 : constraint pass. */
7679 19974759 : FOR_EACH_BB_FN (bb, cfun)
7680 : {
7681 18430338 : fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
7682 18430338 : fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
7683 242734336 : FOR_BB_INSNS_REVERSE (bb, curr_insn)
7684 : {
7685 224303998 : if (! INSN_P (curr_insn))
7686 35769510 : continue;
7687 188534488 : done_p = false;
7688 188534488 : sregno = dregno = -1;
7689 46464149 : if (change_p && NONDEBUG_INSN_P (curr_insn)
7690 220376276 : && (set = single_set (curr_insn)) != NULL_RTX)
7691 : {
7692 30833497 : dregno = get_regno (SET_DEST (set));
7693 61666994 : sregno = get_regno (SET_SRC (set));
7694 : }
7695 :
7696 188534488 : if (sregno >= 0 && dregno >= 0)
7697 : {
7698 10827932 : if (bitmap_bit_p (remove_pseudos, dregno)
7699 10827932 : && ! REG_P (lra_reg_info[dregno].restore_rtx))
7700 : {
7701 : /* invariant inheritance pseudo <- original pseudo */
7702 7102 : if (lra_dump_file != NULL)
7703 : {
7704 0 : fprintf (lra_dump_file, " Removing invariant inheritance:\n");
7705 0 : dump_insn_slim (lra_dump_file, curr_insn);
7706 0 : fprintf (lra_dump_file, "\n");
7707 : }
7708 7102 : delete_move_and_clobber (curr_insn, dregno);
7709 7102 : done_p = true;
7710 : }
7711 10820830 : else if (bitmap_bit_p (remove_pseudos, sregno)
7712 10820830 : && ! REG_P (lra_reg_info[sregno].restore_rtx))
7713 : {
7714 : /* reload pseudo <- invariant inheritance pseudo */
7715 7102 : start_sequence ();
7716 : /* We cannot just change the source. It might be
7717 : an insn different from the move. */
7718 7102 : emit_insn (lra_reg_info[sregno].restore_rtx);
7719 7102 : rtx_insn *new_insns = end_sequence ();
7720 7102 : lra_assert (single_set (new_insns) != NULL
7721 : && SET_DEST (set) == SET_DEST (single_set (new_insns)));
7722 7102 : lra_process_new_insns (curr_insn, NULL, new_insns,
7723 : "Changing reload<-invariant inheritance");
7724 7102 : delete_move_and_clobber (curr_insn, dregno);
7725 7102 : done_p = true;
7726 : }
7727 10813728 : else if ((bitmap_bit_p (remove_pseudos, sregno)
7728 1215434 : && (get_regno (lra_reg_info[sregno].restore_rtx) == dregno
7729 572566 : || (bitmap_bit_p (remove_pseudos, dregno)
7730 188488 : && get_regno (lra_reg_info[sregno].restore_rtx) >= 0
7731 188488 : && (get_regno (lra_reg_info[sregno].restore_rtx)
7732 188488 : == get_regno (lra_reg_info[dregno].restore_rtx)))))
7733 11292050 : || (bitmap_bit_p (remove_pseudos, dregno)
7734 650348 : && get_regno (lra_reg_info[dregno].restore_rtx) == sregno))
7735 : /* One of the following cases:
7736 : original <- removed inheritance pseudo
7737 : removed inherit pseudo <- another removed inherit pseudo
7738 : removed inherit pseudo <- original pseudo
7739 : Or
7740 : removed_split_pseudo <- original_reg
7741 : original_reg <- removed_split_pseudo */
7742 : {
7743 178585 : if (lra_dump_file != NULL)
7744 : {
7745 0 : fprintf (lra_dump_file, " Removing %s:\n",
7746 0 : bitmap_bit_p (&lra_split_regs, sregno)
7747 0 : || bitmap_bit_p (&lra_split_regs, dregno)
7748 : ? "split" : "inheritance");
7749 0 : dump_insn_slim (lra_dump_file, curr_insn);
7750 : }
7751 178585 : delete_move_and_clobber (curr_insn, dregno);
7752 178585 : done_p = true;
7753 : }
7754 10635143 : else if (bitmap_bit_p (remove_pseudos, sregno)
7755 10635143 : && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
7756 : {
7757 : /* Search the following pattern:
7758 : inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
7759 : original_pseudo <- inherit_or_split_pseudo1
7760 : where the 2nd insn is the current insn and
7761 : inherit_or_split_pseudo2 is not removed. If it is found,
7762 : change the current insn onto:
7763 : original_pseudo <- inherit_or_split_pseudo2. */
7764 728616 : for (prev_insn = PREV_INSN (curr_insn);
7765 728616 : prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
7766 250294 : prev_insn = PREV_INSN (prev_insn))
7767 : ;
7768 478322 : if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
7769 465534 : && (prev_set = single_set (prev_insn)) != NULL_RTX
7770 : /* There should be no subregs in insn we are
7771 : searching because only the original reg might
7772 : be in subreg when we changed the mode of
7773 : load/store for splitting. */
7774 459632 : && REG_P (SET_DEST (prev_set))
7775 353285 : && REG_P (SET_SRC (prev_set))
7776 271916 : && (int) REGNO (SET_DEST (prev_set)) == sregno
7777 185469 : && ((prev_sregno = REGNO (SET_SRC (prev_set)))
7778 : >= FIRST_PSEUDO_REGISTER)
7779 185469 : && (lra_reg_info[prev_sregno].restore_rtx == NULL_RTX
7780 143746 : ||
7781 : /* As we consider chain of inheritance or
7782 : splitting described in above comment we should
7783 : check that sregno and prev_sregno were
7784 : inheritance/split pseudos created from the
7785 : same original regno. */
7786 287492 : (get_regno (lra_reg_info[sregno].restore_rtx) >= 0
7787 287492 : && (get_regno (lra_reg_info[sregno].restore_rtx)
7788 287492 : == get_regno (lra_reg_info[prev_sregno].restore_rtx))))
7789 663791 : && ! bitmap_bit_p (remove_pseudos, prev_sregno))
7790 : {
7791 102854 : int restore_regno = get_regno (lra_reg_info[sregno].restore_rtx);
7792 102854 : if (restore_regno < 0)
7793 0 : restore_regno = prev_sregno;
7794 102854 : lra_assert (GET_MODE (SET_SRC (prev_set))
7795 : == GET_MODE (regno_reg_rtx[restore_regno]));
7796 : /* Although we have a single set, the insn can
7797 : contain more one sregno register occurrence
7798 : as a source. Change all occurrences. */
7799 102854 : lra_substitute_pseudo_within_insn (curr_insn, sregno,
7800 : regno_reg_rtx[restore_regno],
7801 : false);
7802 : /* As we are finishing with processing the insn
7803 : here, check the destination too as it might
7804 : inheritance pseudo for another pseudo. */
7805 102854 : if (bitmap_bit_p (remove_pseudos, dregno)
7806 0 : && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
7807 102854 : && (restore_rtx
7808 0 : = lra_reg_info[dregno].restore_rtx) != NULL_RTX)
7809 : {
7810 0 : if (GET_CODE (SET_DEST (set)) == SUBREG)
7811 0 : SUBREG_REG (SET_DEST (set)) = restore_rtx;
7812 : else
7813 0 : SET_DEST (set) = restore_rtx;
7814 : }
7815 102854 : lra_push_insn_and_update_insn_regno_info (curr_insn);
7816 102854 : lra_set_used_insn_alternative_by_uid
7817 102854 : (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
7818 102854 : done_p = true;
7819 102854 : if (lra_dump_file != NULL)
7820 : {
7821 0 : fprintf (lra_dump_file, " Change reload insn:\n");
7822 0 : dump_insn_slim (lra_dump_file, curr_insn);
7823 : }
7824 : }
7825 : }
7826 : }
7827 192789 : if (! done_p)
7828 : {
7829 188238845 : struct lra_insn_reg *reg;
7830 188238845 : bool restored_regs_p = false;
7831 188238845 : bool kept_regs_p = false;
7832 :
7833 188238845 : curr_id = lra_get_insn_recog_data (curr_insn);
7834 393827947 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7835 : {
7836 205589102 : regno = reg->regno;
7837 205589102 : restore_rtx = lra_reg_info[regno].restore_rtx;
7838 205589102 : if (restore_rtx != NULL_RTX)
7839 : {
7840 5913471 : if (change_p && bitmap_bit_p (remove_pseudos, regno))
7841 : {
7842 828079 : lra_substitute_pseudo_within_insn
7843 828079 : (curr_insn, regno, restore_rtx, false);
7844 828079 : restored_regs_p = true;
7845 : }
7846 : else
7847 : kept_regs_p = true;
7848 : }
7849 : }
7850 188238845 : if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
7851 : {
7852 : /* The instruction has changed since the previous
7853 : constraints pass. */
7854 4461456 : lra_push_insn_and_update_insn_regno_info (curr_insn);
7855 4461456 : lra_set_used_insn_alternative_by_uid
7856 4461456 : (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
7857 : }
7858 183777389 : else if (restored_regs_p)
7859 : /* The instruction has been restored to the form that
7860 : it had during the previous constraints pass. */
7861 665350 : lra_update_insn_regno_info (curr_insn);
7862 5126806 : if (restored_regs_p && lra_dump_file != NULL)
7863 : {
7864 0 : fprintf (lra_dump_file, " Insn after restoring regs:\n");
7865 0 : dump_insn_slim (lra_dump_file, curr_insn);
7866 : }
7867 : }
7868 : }
7869 : }
7870 1544421 : return change_p;
7871 : }
7872 :
7873 : /* If optional reload pseudos failed to get a hard register or was not
7874 : inherited, it is better to remove optional reloads. We do this
7875 : transformation after undoing inheritance to figure out necessity to
7876 : remove optional reloads easier. Return true if we do any
7877 : change. */
7878 : static bool
7879 1544421 : undo_optional_reloads (void)
7880 : {
7881 1544421 : bool change_p, keep_p;
7882 1544421 : unsigned int regno, uid;
7883 1544421 : bitmap_iterator bi, bi2;
7884 1544421 : rtx_insn *insn;
7885 1544421 : rtx set, src, dest;
7886 1544421 : auto_bitmap removed_optional_reload_pseudos (®_obstack);
7887 :
7888 1544421 : bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
7889 2549741 : EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
7890 : {
7891 1005320 : keep_p = false;
7892 : /* Keep optional reloads from previous subpasses. */
7893 1005320 : if (lra_reg_info[regno].restore_rtx == NULL_RTX
7894 : /* If the original pseudo changed its allocation, just
7895 : removing the optional pseudo is dangerous as the original
7896 : pseudo will have longer live range. */
7897 1005320 : || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] >= 0)
7898 : keep_p = true;
7899 619825 : else if (reg_renumber[regno] >= 0)
7900 1775302 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
7901 : {
7902 1294057 : insn = lra_insn_recog_data[uid]->insn;
7903 1294057 : if ((set = single_set (insn)) == NULL_RTX)
7904 6634 : continue;
7905 1287423 : src = SET_SRC (set);
7906 1287423 : dest = SET_DEST (set);
7907 1287423 : if ((! REG_P (src) && ! SUBREG_P (src))
7908 689586 : || (! REG_P (dest) && ! SUBREG_P (dest)))
7909 597869 : continue;
7910 689554 : if (get_regno (dest) == (int) regno
7911 : /* Ignore insn for optional reloads itself. */
7912 1161566 : && (get_regno (lra_reg_info[regno].restore_rtx)
7913 580783 : != get_regno (src))
7914 : /* Check only inheritance on last inheritance pass. */
7915 123258 : && get_regno (src) >= new_regno_start
7916 : /* Check that the optional reload was inherited. */
7917 812812 : && bitmap_bit_p (&lra_inheritance_pseudos, get_regno (src)))
7918 : {
7919 : keep_p = true;
7920 : break;
7921 : }
7922 : }
7923 989998 : if (keep_p)
7924 : {
7925 508753 : bitmap_clear_bit (removed_optional_reload_pseudos, regno);
7926 508753 : if (lra_dump_file != NULL)
7927 3 : fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
7928 : }
7929 : }
7930 1544421 : change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
7931 1544421 : auto_bitmap insn_bitmap (®_obstack);
7932 2040988 : EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
7933 : {
7934 496567 : if (lra_dump_file != NULL)
7935 2 : fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
7936 496567 : bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
7937 1577059 : EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
7938 : {
7939 : /* We may have already removed a clobber. */
7940 1080492 : if (!lra_insn_recog_data[uid])
7941 0 : continue;
7942 1080492 : insn = lra_insn_recog_data[uid]->insn;
7943 1080492 : if ((set = single_set (insn)) != NULL_RTX)
7944 : {
7945 1074581 : src = SET_SRC (set);
7946 1074581 : dest = SET_DEST (set);
7947 490636 : if ((REG_P (src) || SUBREG_P (src))
7948 583957 : && (REG_P (dest) || SUBREG_P (dest))
7949 1658506 : && ((get_regno (src) == (int) regno
7950 223774 : && (get_regno (lra_reg_info[regno].restore_rtx)
7951 111887 : == get_regno (dest)))
7952 502514 : || (get_regno (dest) == (int) regno
7953 472038 : && (get_regno (lra_reg_info[regno].restore_rtx)
7954 472038 : == get_regno (src)))))
7955 : {
7956 553389 : if (lra_dump_file != NULL)
7957 : {
7958 0 : fprintf (lra_dump_file, " Deleting move %u\n",
7959 0 : INSN_UID (insn));
7960 0 : dump_insn_slim (lra_dump_file, insn);
7961 : }
7962 1106778 : delete_move_and_clobber (insn, get_regno (dest));
7963 553389 : continue;
7964 : }
7965 : /* We should not worry about generation memory-memory
7966 : moves here as if the corresponding inheritance did
7967 : not work (inheritance pseudo did not get a hard reg),
7968 : we remove the inheritance pseudo and the optional
7969 : reload. */
7970 : }
7971 527103 : if (GET_CODE (PATTERN (insn)) == CLOBBER
7972 0 : && REG_P (SET_DEST (insn))
7973 527103 : && get_regno (SET_DEST (insn)) == (int) regno)
7974 : /* Refuse to remap clobbers to preexisting pseudos. */
7975 0 : gcc_unreachable ();
7976 527103 : lra_substitute_pseudo_within_insn
7977 527103 : (insn, regno, lra_reg_info[regno].restore_rtx, false);
7978 527103 : lra_update_insn_regno_info (insn);
7979 527103 : if (lra_dump_file != NULL)
7980 : {
7981 4 : fprintf (lra_dump_file,
7982 : " Restoring original insn:\n");
7983 4 : dump_insn_slim (lra_dump_file, insn);
7984 : }
7985 : }
7986 : }
7987 : /* Clear restore_regnos. */
7988 2549741 : EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
7989 1005320 : lra_reg_info[regno].restore_rtx = NULL_RTX;
7990 1544421 : return change_p;
7991 1544421 : }
7992 :
7993 : /* Entry function for undoing inheritance/split transformation. Return true
7994 : if we did any RTL change in this pass. */
7995 : bool
7996 1547407 : lra_undo_inheritance (void)
7997 : {
7998 1547407 : unsigned int regno;
7999 1547407 : int hard_regno;
8000 1547407 : int n_all_inherit, n_inherit, n_all_split, n_split;
8001 1547407 : rtx restore_rtx;
8002 1547407 : bitmap_iterator bi;
8003 1547407 : bool change_p;
8004 :
8005 1547407 : lra_undo_inheritance_iter++;
8006 1547407 : if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
8007 : return false;
8008 1544421 : if (lra_dump_file != NULL)
8009 97 : fprintf (lra_dump_file,
8010 : "\n********** Undoing inheritance #%d: **********\n\n",
8011 : lra_undo_inheritance_iter);
8012 1544421 : auto_bitmap remove_pseudos (®_obstack);
8013 1544421 : n_inherit = n_all_inherit = 0;
8014 3404394 : EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
8015 1859973 : if (lra_reg_info[regno].restore_rtx != NULL_RTX)
8016 : {
8017 1177100 : n_all_inherit++;
8018 1177100 : if (reg_renumber[regno] < 0
8019 : /* If the original pseudo changed its allocation, just
8020 : removing inheritance is dangerous as for changing
8021 : allocation we used shorter live-ranges. */
8022 1177100 : && (! REG_P (lra_reg_info[regno].restore_rtx)
8023 420603 : || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
8024 427705 : bitmap_set_bit (remove_pseudos, regno);
8025 : else
8026 749395 : n_inherit++;
8027 : }
8028 1544421 : if (lra_dump_file != NULL && n_all_inherit != 0)
8029 2 : fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
8030 : n_inherit, n_all_inherit,
8031 2 : (double) n_inherit / n_all_inherit * 100);
8032 1544421 : n_split = n_all_split = 0;
8033 2509355 : EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
8034 964934 : if ((restore_rtx = lra_reg_info[regno].restore_rtx) != NULL_RTX)
8035 : {
8036 662891 : int restore_regno = REGNO (restore_rtx);
8037 :
8038 662891 : n_all_split++;
8039 1325700 : hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
8040 662891 : ? reg_renumber[restore_regno] : restore_regno);
8041 662891 : if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
8042 2207 : bitmap_set_bit (remove_pseudos, regno);
8043 : else
8044 : {
8045 660684 : n_split++;
8046 660684 : if (lra_dump_file != NULL)
8047 0 : fprintf (lra_dump_file, " Keep split r%d (orig=r%d)\n",
8048 : regno, restore_regno);
8049 : }
8050 : }
8051 1544421 : if (lra_dump_file != NULL && n_all_split != 0)
8052 0 : fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
8053 : n_split, n_all_split,
8054 0 : (double) n_split / n_all_split * 100);
8055 1544421 : change_p = remove_inheritance_pseudos (remove_pseudos);
8056 : /* Clear restore_regnos. */
8057 3404394 : EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
8058 1859973 : lra_reg_info[regno].restore_rtx = NULL_RTX;
8059 2509355 : EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
8060 964934 : lra_reg_info[regno].restore_rtx = NULL_RTX;
8061 1544421 : change_p = undo_optional_reloads () || change_p;
8062 : if (change_p)
8063 111038 : lra_dump_insns_if_possible ("changed func after undoing inheritance");
8064 1544421 : return change_p;
8065 1544421 : }
|