Line data Source code
1 : /* Code for RTL transformations to satisfy insn constraints.
2 : Copyright (C) 2010-2026 Free Software Foundation, Inc.
3 : Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4 :
5 : This file is part of GCC.
6 :
7 : GCC is free software; you can redistribute it and/or modify it under
8 : the terms of the GNU General Public License as published by the Free
9 : Software Foundation; either version 3, or (at your option) any later
10 : version.
11 :
12 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : for more details.
16 :
17 : You should have received a copy of the GNU General Public License
18 : along with GCC; see the file COPYING3. If not see
19 : <http://www.gnu.org/licenses/>. */
20 :
21 :
22 : /* This file contains code for 3 passes: constraint pass,
23 : inheritance/split pass, and pass for undoing failed inheritance and
24 : split.
25 :
26 : The major goal of constraint pass is to transform RTL to satisfy
27 : insn and address constraints by:
28 : o choosing insn alternatives;
29 : o generating *reload insns* (or reloads in brief) and *reload
30 : pseudos* which will get necessary hard registers later;
31 : o substituting pseudos with equivalent values and removing the
32 : instructions that initialized those pseudos.
33 :
34 : The constraint pass has biggest and most complicated code in LRA.
35 : There are a lot of important details like:
36 : o reuse of input reload pseudos to simplify reload pseudo
37 : allocations;
38 : o some heuristics to choose insn alternative to improve the
39 : inheritance;
40 : o early clobbers etc.
41 :
42 : The pass is mimicking former reload pass in alternative choosing
43 : because the reload pass is oriented to current machine description
44 : model. It might be changed if the machine description model is
45 : changed.
46 :
47 : There is special code for preventing all LRA and this pass cycling
48 : in case of bugs.
49 :
50 : On the first iteration of the pass we process every instruction and
51 : choose an alternative for each one. On subsequent iterations we try
52 : to avoid reprocessing instructions if we can be sure that the old
53 : choice is still valid.
54 :
55 : The inheritance/spilt pass is to transform code to achieve
56 : ineheritance and live range splitting. It is done on backward
57 : traversal of EBBs.
58 :
59 : The inheritance optimization goal is to reuse values in hard
60 : registers. There is analogous optimization in old reload pass. The
61 : inheritance is achieved by following transformation:
62 :
63 : reload_p1 <- p reload_p1 <- p
64 : ... new_p <- reload_p1
65 : ... => ...
66 : reload_p2 <- p reload_p2 <- new_p
67 :
68 : where p is spilled and not changed between the insns. Reload_p1 is
69 : also called *original pseudo* and new_p is called *inheritance
70 : pseudo*.
71 :
72 : The subsequent assignment pass will try to assign the same (or
73 : another if it is not possible) hard register to new_p as to
74 : reload_p1 or reload_p2.
75 :
76 : If the assignment pass fails to assign a hard register to new_p,
77 : this file will undo the inheritance and restore the original code.
78 : This is because implementing the above sequence with a spilled
79 : new_p would make the code much worse. The inheritance is done in
80 : EBB scope. The above is just a simplified example to get an idea
81 : of the inheritance as the inheritance is also done for non-reload
82 : insns.
83 :
84 : Splitting (transformation) is also done in EBB scope on the same
85 : pass as the inheritance:
86 :
87 : r <- ... or ... <- r r <- ... or ... <- r
88 : ... s <- r (new insn -- save)
89 : ... =>
90 : ... r <- s (new insn -- restore)
91 : ... <- r ... <- r
92 :
93 : The *split pseudo* s is assigned to the hard register of the
94 : original pseudo or hard register r.
95 :
96 : Splitting is done:
97 : o In EBBs with high register pressure for global pseudos (living
98 : in at least 2 BBs) and assigned to hard registers when there
99 : are more one reloads needing the hard registers;
100 : o for pseudos needing save/restore code around calls.
101 :
102 : If the split pseudo still has the same hard register as the
103 : original pseudo after the subsequent assignment pass or the
104 : original pseudo was split, the opposite transformation is done on
105 : the same pass for undoing inheritance. */
106 :
107 : #undef REG_OK_STRICT
108 :
109 : #include "config.h"
110 : #include "system.h"
111 : #include "coretypes.h"
112 : #include "backend.h"
113 : #include "hooks.h"
114 : #include "target.h"
115 : #include "rtl.h"
116 : #include "tree.h"
117 : #include "stmt.h"
118 : #include "predict.h"
119 : #include "df.h"
120 : #include "memmodel.h"
121 : #include "tm_p.h"
122 : #include "expmed.h"
123 : #include "optabs.h"
124 : #include "regs.h"
125 : #include "ira.h"
126 : #include "recog.h"
127 : #include "output.h"
128 : #include "addresses.h"
129 : #include "expr.h"
130 : #include "cfgrtl.h"
131 : #include "rtl-error.h"
132 : #include "lra.h"
133 : #include "lra-int.h"
134 : #include "print-rtl.h"
135 : #include "function-abi.h"
136 : #include "rtl-iter.h"
137 : #include "hash-set.h"
138 :
139 : /* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
140 : insn. Remember that LRA_CURR_RELOAD_NUM is the number of emitted
141 : reload insns. */
142 : static int bb_reload_num;
143 :
144 : /* The current insn being processed and corresponding its single set
145 : (NULL otherwise), its data (basic block, the insn data, the insn
146 : static data, and the mode of each operand). */
147 : static rtx_insn *curr_insn;
148 : static rtx curr_insn_set;
149 : static basic_block curr_bb;
150 : static lra_insn_recog_data_t curr_id;
151 : static struct lra_static_insn_data *curr_static_id;
152 : static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
153 : /* Mode of the register substituted by its equivalence with VOIDmode
154 : (e.g. constant) and whose subreg is given operand of the current
155 : insn. VOIDmode in all other cases. */
156 : static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
157 : /* The first call insn after curr_insn within the EBB during inherit_in_ebb
158 : or NULL outside of that function. */
159 : static rtx_insn *first_call_insn;
160 :
161 :
162 :
163 : /* Start numbers for new registers and insns at the current constraints
164 : pass start. */
165 : static int new_regno_start;
166 : static int new_insn_uid_start;
167 :
168 : /* If LOC is nonnull, strip any outer subreg from it. */
169 : static inline rtx *
170 228567800 : strip_subreg (rtx *loc)
171 : {
172 101137445 : return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
173 : }
174 :
175 : /* Return hard regno of REGNO or if it is was not assigned to a hard
176 : register, use a hard register from its allocno class. */
177 : static int
178 71596 : get_try_hard_regno (int regno)
179 : {
180 71596 : int hard_regno;
181 71596 : enum reg_class rclass;
182 :
183 71596 : if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
184 71596 : hard_regno = lra_get_regno_hard_regno (regno);
185 71596 : if (hard_regno >= 0)
186 : return hard_regno;
187 39097 : rclass = lra_get_allocno_class (regno);
188 39097 : if (rclass == NO_REGS)
189 : return -1;
190 37578 : return ira_class_hard_regs[rclass][0];
191 : }
192 :
193 : /* Return the hard regno of X after removing its subreg. If X is not a
194 : register or a subreg of a register, return -1. If X is a pseudo, use its
195 : assignment. If X is a hard regno, return the final hard regno which will be
196 : after elimination. */
197 : static int
198 288377536 : get_hard_regno (rtx x)
199 : {
200 288377536 : rtx reg;
201 288377536 : int hard_regno;
202 :
203 288377536 : reg = x;
204 288377536 : if (SUBREG_P (x))
205 5121671 : reg = SUBREG_REG (x);
206 288377536 : if (! REG_P (reg))
207 : return -1;
208 199225366 : int regno = REGNO (reg);
209 199225366 : if (HARD_REGISTER_NUM_P (regno))
210 34822373 : hard_regno = lra_get_elimination_hard_regno (regno);
211 : else
212 164402993 : hard_regno = lra_get_regno_hard_regno (regno);
213 199225366 : if (hard_regno < 0)
214 : return -1;
215 181335209 : if (SUBREG_P (x))
216 4430314 : hard_regno += subreg_regno_offset (hard_regno, GET_MODE (reg),
217 4430314 : SUBREG_BYTE (x), GET_MODE (x));
218 : return hard_regno;
219 : }
220 :
221 : /* If REGNO is a hard register or has been allocated a hard register,
222 : return the class of that register. If REGNO is a reload pseudo
223 : created by the current constraints pass, return its allocno class.
224 : Return NO_REGS otherwise. */
225 : static enum reg_class
226 514386615 : get_reg_class (int regno)
227 : {
228 514386615 : int hard_regno;
229 :
230 514386615 : if (HARD_REGISTER_NUM_P (regno))
231 64821314 : hard_regno = lra_get_elimination_hard_regno (regno);
232 : else
233 449565301 : hard_regno = lra_get_regno_hard_regno (regno);
234 514386615 : if (hard_regno >= 0)
235 321281281 : return REGNO_REG_CLASS (hard_regno);
236 193105334 : if (regno >= new_regno_start)
237 62274546 : return lra_get_allocno_class (regno);
238 : return NO_REGS;
239 : }
240 :
241 : /* Return true if REG_CLASS has enough allocatable hard regs to keep value of
242 : REG_MODE. */
243 : static bool
244 18737890 : enough_allocatable_hard_regs_p (enum reg_class reg_class,
245 : enum machine_mode reg_mode)
246 : {
247 18737890 : int i, j, hard_regno, class_size, nregs;
248 :
249 37475780 : if (hard_reg_set_subset_p (reg_class_contents[reg_class], lra_no_alloc_regs))
250 : return false;
251 6373849 : class_size = ira_class_hard_regs_num[reg_class];
252 6373849 : for (i = 0; i < class_size; i++)
253 : {
254 6373849 : hard_regno = ira_class_hard_regs[reg_class][i];
255 6373849 : nregs = hard_regno_nregs (hard_regno, reg_mode);
256 6373849 : if (nregs == 1)
257 : return true;
258 254757 : for (j = 0; j < nregs; j++)
259 169838 : if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
260 169838 : || ! TEST_HARD_REG_BIT (reg_class_contents[reg_class],
261 : hard_regno + j))
262 : break;
263 84919 : if (j >= nregs)
264 : return true;
265 : }
266 : return false;
267 : }
268 :
269 : /* True if C is a non-empty register class that has too few registers
270 : to be safely used as a reload target class. */
271 : #define SMALL_REGISTER_CLASS_P(C) \
272 : (ira_class_hard_regs_num [(C)] == 1 \
273 : || (ira_class_hard_regs_num [(C)] >= 1 \
274 : && targetm.class_likely_spilled_p (C)))
275 :
276 : /* Return true if REG satisfies (or will satisfy) reg class constraint
277 : CL. Use elimination first if REG is a hard register. If REG is a
278 : reload pseudo created by this constraints pass, assume that it will
279 : be allocated a hard register from its allocno class, but allow that
280 : class to be narrowed to CL if it is currently a superset of CL and
281 : if either:
282 :
283 : - ALLOW_ALL_RELOAD_CLASS_CHANGES_P is true or
284 : - the instruction we're processing is not a reload move.
285 :
286 : If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
287 : REGNO (reg), or NO_REGS if no change in its class was needed. */
288 : static bool
289 218954172 : in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class,
290 : bool allow_all_reload_class_changes_p = false)
291 : {
292 218954172 : enum reg_class rclass, common_class;
293 218954172 : machine_mode reg_mode;
294 218954172 : rtx src;
295 218954172 : int regno = REGNO (reg);
296 :
297 218954172 : if (new_class != NULL)
298 112406363 : *new_class = NO_REGS;
299 218954172 : if (regno < FIRST_PSEUDO_REGISTER)
300 : {
301 27466267 : rtx final_reg = reg;
302 27466267 : rtx *final_loc = &final_reg;
303 :
304 27466267 : lra_eliminate_reg_if_possible (final_loc);
305 27466267 : return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
306 : }
307 191487905 : reg_mode = GET_MODE (reg);
308 191487905 : rclass = get_reg_class (regno);
309 191487905 : src = curr_insn_set != NULL ? SET_SRC (curr_insn_set) : NULL;
310 191487905 : if (regno < new_regno_start
311 : /* Do not allow the constraints for reload instructions to
312 : influence the classes of new pseudos. These reloads are
313 : typically moves that have many alternatives, and restricting
314 : reload pseudos for one alternative may lead to situations
315 : where other reload pseudos are no longer allocatable. */
316 191487905 : || (!allow_all_reload_class_changes_p
317 15089139 : && INSN_UID (curr_insn) >= new_insn_uid_start
318 14554420 : && src != NULL
319 14554420 : && ((REG_P (src) || MEM_P (src))
320 1397230 : || (GET_CODE (src) == SUBREG
321 621072 : && (REG_P (SUBREG_REG (src)) || MEM_P (SUBREG_REG (src)))))))
322 : /* When we don't know what class will be used finally for reload
323 : pseudos, we use ALL_REGS. */
324 13778262 : return ((regno >= new_regno_start && rclass == ALL_REGS)
325 186525885 : || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
326 200874914 : && ! hard_reg_set_subset_p (reg_class_contents[cl],
327 : lra_no_alloc_regs)));
328 : else
329 : {
330 18737890 : common_class = ira_reg_class_subset[rclass][cl];
331 18737890 : if (new_class != NULL)
332 5274134 : *new_class = common_class;
333 18737890 : return (enough_allocatable_hard_regs_p (common_class, reg_mode)
334 : /* Do not permit reload insn operand matching (new_class == NULL
335 : case) if the new class is too small. */
336 18737890 : && (new_class != NULL || common_class == rclass
337 1015428 : || !SMALL_REGISTER_CLASS_P (common_class)));
338 : }
339 : }
340 :
341 : /* Return true if REGNO satisfies a memory constraint. */
342 : static bool
343 63709089 : in_mem_p (int regno)
344 : {
345 0 : return get_reg_class (regno) == NO_REGS;
346 : }
347 :
348 : /* Return true if ADDR is a valid memory address for mode MODE in address
349 : space AS, and check that each pseudo has the proper kind of hard
350 : reg. */
351 : static bool
352 35597631 : valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
353 : rtx addr, addr_space_t as)
354 : {
355 : #ifdef GO_IF_LEGITIMATE_ADDRESS
356 : lra_assert (ADDR_SPACE_GENERIC_P (as));
357 : GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
358 : return false;
359 :
360 : win:
361 : return true;
362 : #else
363 0 : return targetm.addr_space.legitimate_address_p (mode, addr, 0, as,
364 35597631 : ERROR_MARK);
365 : #endif
366 : }
367 :
368 : namespace {
369 : /* Temporarily eliminates registers in an address (for the lifetime of
370 : the object). */
371 : class address_eliminator {
372 : public:
373 : address_eliminator (struct address_info *ad);
374 : ~address_eliminator ();
375 :
376 : private:
377 : struct address_info *m_ad;
378 : rtx *m_base_loc;
379 : rtx m_base_reg;
380 : rtx *m_index_loc;
381 : rtx m_index_reg;
382 : };
383 : }
384 :
385 74878776 : address_eliminator::address_eliminator (struct address_info *ad)
386 74878776 : : m_ad (ad),
387 74878776 : m_base_loc (strip_subreg (ad->base_term)),
388 74878776 : m_base_reg (NULL_RTX),
389 74878776 : m_index_loc (strip_subreg (ad->index_term)),
390 74878776 : m_index_reg (NULL_RTX)
391 : {
392 74878776 : if (m_base_loc != NULL)
393 : {
394 62347160 : m_base_reg = *m_base_loc;
395 : /* If we have non-legitimate address which is decomposed not in
396 : the way we expected, don't do elimination here. In such case
397 : the address will be reloaded and elimination will be done in
398 : reload insn finally. */
399 62347160 : if (REG_P (m_base_reg))
400 62347160 : lra_eliminate_reg_if_possible (m_base_loc);
401 62347160 : if (m_ad->base_term2 != NULL)
402 0 : *m_ad->base_term2 = *m_ad->base_term;
403 : }
404 74878776 : if (m_index_loc != NULL)
405 : {
406 3714016 : m_index_reg = *m_index_loc;
407 3714016 : if (REG_P (m_index_reg))
408 3714016 : lra_eliminate_reg_if_possible (m_index_loc);
409 : }
410 74878776 : }
411 :
412 74878776 : address_eliminator::~address_eliminator ()
413 : {
414 74878776 : if (m_base_loc && *m_base_loc != m_base_reg)
415 : {
416 43867744 : *m_base_loc = m_base_reg;
417 43867744 : if (m_ad->base_term2 != NULL)
418 0 : *m_ad->base_term2 = *m_ad->base_term;
419 : }
420 74878776 : if (m_index_loc && *m_index_loc != m_index_reg)
421 0 : *m_index_loc = m_index_reg;
422 74878776 : }
423 :
424 : /* Return true if the eliminated form of AD is a legitimate target address.
425 : If OP is a MEM, AD is the address within OP, otherwise OP should be
426 : ignored. CONSTRAINT is one constraint that the operand may need
427 : to meet. */
428 : static bool
429 35575203 : valid_address_p (rtx op, struct address_info *ad,
430 : enum constraint_num constraint)
431 : {
432 35575203 : address_eliminator eliminator (ad);
433 :
434 : /* Allow a memory OP if it matches CONSTRAINT, even if CONSTRAINT is more
435 : forgiving than "m".
436 : Need to extract memory from op for special memory constraint,
437 : i.e. bcst_mem_operand in i386 backend. */
438 35575203 : if (MEM_P (extract_mem_from_operand (op))
439 : && insn_extra_relaxed_memory_constraint (constraint)
440 : && constraint_satisfied_p (op, constraint))
441 : return true;
442 :
443 35575203 : return valid_address_p (ad->mode, *ad->outer, ad->as);
444 35575203 : }
445 :
446 : /* For special_memory_operand, it could be false for MEM_P (op),
447 : i.e. bcst_mem_operand in i386 backend.
448 : Extract and return real memory operand or op. */
449 : rtx
450 622743759 : extract_mem_from_operand (rtx op)
451 : {
452 624421746 : for (rtx x = op;; x = XEXP (x, 0))
453 : {
454 624421746 : if (MEM_P (x))
455 : return x;
456 443474446 : if (GET_RTX_LENGTH (GET_CODE (x)) != 1
457 363663881 : || GET_RTX_FORMAT (GET_CODE (x))[0] != 'e')
458 : break;
459 : }
460 : return op;
461 : }
462 :
463 : /* Return true if the eliminated form of memory reference OP satisfies
464 : extra (special) memory constraint CONSTRAINT. */
465 : static bool
466 36858932 : satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
467 : {
468 36858932 : struct address_info ad;
469 36858932 : rtx mem = extract_mem_from_operand (op);
470 36858932 : if (!MEM_P (mem))
471 : return false;
472 :
473 35867351 : decompose_mem_address (&ad, mem);
474 35867351 : address_eliminator eliminator (&ad);
475 35867351 : return constraint_satisfied_p (op, constraint);
476 35867351 : }
477 :
478 : /* Return true if the eliminated form of address AD satisfies extra
479 : address constraint CONSTRAINT. */
480 : static bool
481 3436222 : satisfies_address_constraint_p (struct address_info *ad,
482 : enum constraint_num constraint)
483 : {
484 3436222 : address_eliminator eliminator (ad);
485 3436222 : return constraint_satisfied_p (*ad->outer, constraint);
486 3436222 : }
487 :
488 : /* Return true if the eliminated form of address OP satisfies extra
489 : address constraint CONSTRAINT. */
490 : static bool
491 1686045 : satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
492 : {
493 1686045 : struct address_info ad;
494 :
495 1686045 : decompose_lea_address (&ad, &op);
496 1686045 : return satisfies_address_constraint_p (&ad, constraint);
497 : }
498 :
499 : /* Set of equivalences whose original targets have set up pointer flag. */
500 : static hash_set <rtx> *pointer_equiv_set;
501 :
502 : /* Add x to pointer_equiv_set. */
503 : void
504 1906026 : lra_pointer_equiv_set_add (rtx x)
505 : {
506 1906026 : pointer_equiv_set->add (x);
507 1906026 : }
508 :
509 : /* Return true if x is in pointer_equiv_set. */
510 : bool
511 9755038 : lra_pointer_equiv_set_in (rtx x)
512 : {
513 9755038 : return pointer_equiv_set->contains (x);
514 : }
515 :
516 : /* Initiate equivalences for LRA. As we keep original equivalences
517 : before any elimination, we need to make copies otherwise any change
518 : in insns might change the equivalences. */
519 : void
520 1471362 : lra_init_equiv (void)
521 : {
522 1471362 : ira_expand_reg_equiv ();
523 69620813 : for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
524 : {
525 68149451 : rtx res;
526 :
527 68149451 : if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
528 3062115 : ira_reg_equiv[i].memory = copy_rtx (res);
529 68149451 : if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
530 868459 : ira_reg_equiv[i].invariant = copy_rtx (res);
531 : }
532 1471362 : pointer_equiv_set = new hash_set <rtx>;
533 1471362 : }
534 :
535 : /* Finish equivalence data for LRA. */
536 : void
537 1471362 : lra_finish_equiv (void)
538 : {
539 2942724 : delete pointer_equiv_set;
540 1471362 : }
541 :
542 : static rtx loc_equivalence_callback (rtx, const_rtx, void *);
543 :
544 : /* Update equivalence for REGNO. We need to this as the equivalence
545 : might contain other pseudos which are changed by their
546 : equivalences. */
547 : static void
548 202380568 : update_equiv (int regno)
549 : {
550 202380568 : rtx x;
551 :
552 202380568 : if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
553 9311578 : ira_reg_equiv[regno].memory
554 9311578 : = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
555 : NULL_RTX);
556 202380568 : if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
557 2696642 : ira_reg_equiv[regno].invariant
558 2696642 : = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
559 : NULL_RTX);
560 202380568 : }
561 :
562 : /* If we have decided to substitute X with another value, return that
563 : value, otherwise return X. */
564 : static rtx
565 436202216 : get_equiv (rtx x)
566 : {
567 436202216 : int regno;
568 436202216 : rtx res;
569 :
570 295280886 : if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
571 196310341 : || regno >= ira_reg_equiv_len
572 196310341 : || ! ira_reg_equiv[regno].defined_p
573 25658069 : || ! ira_reg_equiv[regno].profitable_p
574 461806187 : || lra_get_regno_hard_regno (regno) >= 0)
575 431250364 : return x;
576 4951852 : if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
577 : {
578 2236752 : if (targetm.cannot_substitute_mem_equiv_p (res))
579 : return x;
580 : return res;
581 : }
582 2715100 : if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
583 : return res;
584 1872193 : if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
585 : return res;
586 0 : gcc_unreachable ();
587 : }
588 :
589 : /* If we have decided to substitute X with the equivalent value, return that
590 : value after elimination for INSN, otherwise return X. Add the result to
591 : pointer_equiv_set if X has set up pointer flag. */
592 : static rtx
593 245286955 : get_equiv_with_elimination (rtx x, rtx_insn *insn)
594 : {
595 245286955 : rtx res = get_equiv (x);
596 :
597 245286955 : if (x == res || CONSTANT_P (res))
598 : return res;
599 1504149 : res = lra_eliminate_regs_1 (insn, res, GET_MODE (res),
600 : false, false, 0, true);
601 1504149 : if (REG_POINTER (x))
602 1027587 : lra_pointer_equiv_set_add (res);
603 : return res;
604 : }
605 :
606 : /* Set up curr_operand_mode. */
607 : static void
608 105876116 : init_curr_operand_mode (void)
609 : {
610 105876116 : int nop = curr_static_id->n_operands;
611 329987053 : for (int i = 0; i < nop; i++)
612 : {
613 224110937 : machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
614 224110937 : if (mode == VOIDmode)
615 : {
616 : /* The .md mode for address operands is the mode of the
617 : addressed value rather than the mode of the address itself. */
618 43401640 : if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
619 95 : mode = Pmode;
620 : else
621 43401545 : mode = curr_static_id->operand[i].mode;
622 : }
623 224110937 : curr_operand_mode[i] = mode;
624 : }
625 105876116 : }
626 :
627 :
628 :
629 : /* The page contains code to reuse input reloads. */
630 :
631 : /* Structure describes input reload of the current insns. */
632 : struct input_reload
633 : {
634 : /* True for input reload of matched operands. */
635 : bool match_p;
636 : /* True for input reload of inout earlyclobber operand. */
637 : bool early_clobber_p;
638 : /* Reloaded value. */
639 : rtx input;
640 : /* Reload pseudo used. */
641 : rtx reg;
642 : };
643 :
644 : /* The number of elements in the following array. */
645 : static int curr_insn_input_reloads_num;
646 : /* Array containing info about input reloads. It is used to find the
647 : same input reload and reuse the reload pseudo in this case. */
648 : static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
649 :
650 : /* Initiate data concerning reuse of input reloads for the current
651 : insn. */
652 : static void
653 105876116 : init_curr_insn_input_reloads (void)
654 : {
655 105876116 : curr_insn_input_reloads_num = 0;
656 0 : }
657 :
658 : /* The canonical form of an rtx inside a MEM is not necessarily the same as the
659 : canonical form of the rtx outside the MEM. Fix this up in the case that
660 : we're reloading an address (and therefore pulling it outside a MEM). */
661 : static rtx
662 72 : canonicalize_reload_addr (rtx addr)
663 : {
664 72 : subrtx_var_iterator::array_type array;
665 246 : FOR_EACH_SUBRTX_VAR (iter, array, addr, NONCONST)
666 : {
667 174 : rtx x = *iter;
668 174 : if (GET_CODE (x) == MULT && CONST_INT_P (XEXP (x, 1)))
669 : {
670 14 : const HOST_WIDE_INT ci = INTVAL (XEXP (x, 1));
671 188 : const int pwr2 = exact_log2 (ci);
672 14 : if (pwr2 > 0)
673 : {
674 : /* Rewrite this to use a shift instead, which is canonical when
675 : outside of a MEM. */
676 14 : PUT_CODE (x, ASHIFT);
677 14 : XEXP (x, 1) = GEN_INT (pwr2);
678 : }
679 : }
680 : }
681 :
682 72 : return addr;
683 72 : }
684 :
685 : /* Return rtx accessing reload REG of RCLASS matching another reload reg in
686 : MODE. */
687 : static rtx
688 123270 : get_matching_reload_reg_subreg (machine_mode mode, rtx reg,
689 : enum reg_class rclass)
690 : {
691 123270 : int hard_regno = ira_class_hard_regs[rclass][0];
692 123270 : if (subreg_regno_offset (hard_regno,
693 123270 : GET_MODE (reg),
694 123270 : subreg_lowpart_offset (mode, GET_MODE (reg)),
695 : mode) == 0)
696 : /* For matching scalar int modes generate the right subreg byte offset for
697 : BE targets -- see call of reload.cc:operands_match_p in
698 : recog.cc:constrain_operands. */
699 123270 : return lowpart_subreg (mode, reg, GET_MODE (reg));
700 0 : int offset = (lra_constraint_offset (hard_regno, GET_MODE (reg))
701 0 : - lra_constraint_offset (hard_regno, mode)) * UNITS_PER_WORD;
702 0 : lra_assert (offset >= 0);
703 0 : return gen_rtx_SUBREG (mode, reg, offset);
704 : }
705 :
706 : /* Create a new pseudo using MODE, RCLASS, EXCLUDE_START_HARD_REGS, ORIGINAL or
707 : reuse an existing reload pseudo. Don't reuse an existing reload pseudo if
708 : IN_SUBREG_P is true and the reused pseudo should be wrapped up in a SUBREG.
709 : EARLY_CLOBBER_P is true for input reload of inout early clobber operand.
710 : The result pseudo is returned through RESULT_REG. Return TRUE if we created
711 : a new pseudo, FALSE if we reused an existing reload pseudo. Use TITLE to
712 : describe new registers for debug purposes. */
713 : static bool
714 3810329 : get_reload_reg (enum op_type type, machine_mode mode, rtx original,
715 : enum reg_class rclass, HARD_REG_SET *exclude_start_hard_regs,
716 : bool in_subreg_p, bool early_clobber_p,
717 : const char *title, rtx *result_reg)
718 : {
719 3810329 : int i, regno;
720 3810329 : enum reg_class new_class;
721 :
722 3810329 : if (type == OP_OUT)
723 : {
724 : /* Output reload registers tend to start out with a conservative
725 : choice of register class. Usually this is ALL_REGS, although
726 : a target might narrow it (for performance reasons) through
727 : targetm.preferred_reload_class. It's therefore quite common
728 : for a reload instruction to require a more restrictive class
729 : than the class that was originally assigned to the reload register.
730 :
731 : In these situations, it's more efficient to refine the choice
732 : of register class rather than create a second reload register.
733 : This also helps to avoid cycling for registers that are only
734 : used by reload instructions. */
735 966947 : if (REG_P (original)
736 706891 : && (int) REGNO (original) >= new_regno_start
737 7151 : && (INSN_UID (curr_insn) >= new_insn_uid_start
738 250 : || ira_former_scratch_p (REGNO (original)))
739 7151 : && in_class_p (original, rclass, &new_class, true)
740 967197 : && (exclude_start_hard_regs == nullptr
741 250 : || hard_reg_set_intersect_p (
742 967197 : ~lra_reg_info[REGNO (original)].exclude_start_hard_regs,
743 250 : ~*exclude_start_hard_regs)))
744 : {
745 250 : unsigned int regno = REGNO (original);
746 250 : if (lra_dump_file != NULL)
747 : {
748 0 : fprintf (lra_dump_file, " Reuse r%d for output ", regno);
749 0 : dump_value_slim (lra_dump_file, original, 1);
750 : }
751 500 : if (new_class != lra_get_allocno_class (regno))
752 250 : lra_change_class (regno, new_class, ", change to", false);
753 250 : if (lra_dump_file != NULL)
754 0 : fprintf (lra_dump_file, "\n");
755 250 : if (exclude_start_hard_regs)
756 250 : lra_reg_info[regno].exclude_start_hard_regs
757 250 : |= *exclude_start_hard_regs;
758 250 : *result_reg = original;
759 250 : return false;
760 : }
761 966697 : *result_reg
762 966697 : = lra_create_new_reg_with_unique_value (mode, original, rclass,
763 : exclude_start_hard_regs, title);
764 966697 : return true;
765 : }
766 :
767 2843382 : bool unique_p = early_clobber_p;
768 : /* Prevent reuse value of expression with side effects,
769 : e.g. volatile memory. */
770 2843382 : if (! side_effects_p (original))
771 3061648 : for (i = 0; i < curr_insn_input_reloads_num; i++)
772 : {
773 236755 : if (! curr_insn_input_reloads[i].match_p
774 99218 : && ! curr_insn_input_reloads[i].early_clobber_p
775 99217 : && rtx_equal_p (curr_insn_input_reloads[i].input, original)
776 245216 : && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
777 : {
778 8448 : rtx reg = curr_insn_input_reloads[i].reg;
779 8448 : regno = REGNO (reg);
780 : /* If input is equal to original and both are VOIDmode,
781 : GET_MODE (reg) might be still different from mode.
782 : Ensure we don't return *result_reg with wrong mode. */
783 8448 : if (GET_MODE (reg) != mode)
784 : {
785 0 : if (in_subreg_p)
786 0 : continue;
787 0 : if (maybe_lt (GET_MODE_SIZE (GET_MODE (reg)),
788 0 : GET_MODE_SIZE (mode)))
789 0 : continue;
790 0 : reg = get_matching_reload_reg_subreg (mode, reg, new_class);
791 0 : if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
792 0 : continue;
793 : }
794 : /* If the existing reload and this have no start hard register in
795 : common, then skip. Otherwise update exclude_start_hard_regs. */
796 8448 : if (exclude_start_hard_regs
797 9708 : && ! hard_reg_set_empty_p (*exclude_start_hard_regs))
798 : {
799 1 : HARD_REG_SET r = lra_reg_info[regno].exclude_start_hard_regs
800 1 : | *exclude_start_hard_regs;
801 2 : if (hard_reg_set_empty_p (~r))
802 0 : continue;
803 : else
804 1 : lra_reg_info[regno].exclude_start_hard_regs = r;
805 : }
806 8448 : *result_reg = reg;
807 8448 : if (lra_dump_file != NULL)
808 : {
809 0 : fprintf (lra_dump_file, " Reuse r%d for reload ", regno);
810 0 : dump_value_slim (lra_dump_file, original, 1);
811 : }
812 16896 : if (new_class != lra_get_allocno_class (regno))
813 4396 : lra_change_class (regno, new_class, ", change to", false);
814 8448 : if (lra_dump_file != NULL)
815 0 : fprintf (lra_dump_file, "\n");
816 8448 : return false;
817 : }
818 : /* If we have an input reload with a different mode, make sure it
819 : will get a different hard reg. */
820 228307 : else if (REG_P (original)
821 180671 : && REG_P (curr_insn_input_reloads[i].input)
822 147788 : && REGNO (original) == REGNO (curr_insn_input_reloads[i].input)
823 228307 : && (GET_MODE (original)
824 2027 : != GET_MODE (curr_insn_input_reloads[i].input)))
825 : unique_p = true;
826 : }
827 5669868 : *result_reg = (unique_p
828 2834934 : ? lra_create_new_reg_with_unique_value
829 2834934 : : lra_create_new_reg) (mode, original, rclass,
830 : exclude_start_hard_regs, title);
831 2834934 : lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
832 2834934 : curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
833 2834934 : curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = false;
834 2834934 : curr_insn_input_reloads[curr_insn_input_reloads_num].early_clobber_p
835 2834934 : = early_clobber_p;
836 2834934 : curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
837 2834934 : return true;
838 : }
839 :
840 :
841 : /* The page contains major code to choose the current insn alternative
842 : and generate reloads for it. */
843 :
844 : /* Return the offset from REGNO of the least significant register
845 : in (reg:MODE REGNO).
846 :
847 : This function is used to tell whether two registers satisfy
848 : a matching constraint. (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
849 :
850 : REGNO1 + lra_constraint_offset (REGNO1, MODE1)
851 : == REGNO2 + lra_constraint_offset (REGNO2, MODE2) */
852 : int
853 43345158 : lra_constraint_offset (int regno, machine_mode mode)
854 : {
855 43345158 : lra_assert (regno < FIRST_PSEUDO_REGISTER);
856 :
857 43345158 : scalar_int_mode int_mode;
858 43345158 : if (WORDS_BIG_ENDIAN
859 : && is_a <scalar_int_mode> (mode, &int_mode)
860 : && GET_MODE_SIZE (int_mode) > UNITS_PER_WORD)
861 : return hard_regno_nregs (regno, mode) - 1;
862 43345158 : return 0;
863 : }
864 :
865 : /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
866 : if they are the same hard reg, and has special hacks for
867 : auto-increment and auto-decrement. This is specifically intended for
868 : process_alt_operands to use in determining whether two operands
869 : match. X is the operand whose number is the lower of the two.
870 :
871 : It is supposed that X is the output operand and Y is the input
872 : operand. Y_HARD_REGNO is the final hard regno of register Y or
873 : register in subreg Y as we know it now. Otherwise, it is a
874 : negative value. */
875 : static bool
876 57671608 : operands_match_p (rtx x, rtx y, int y_hard_regno)
877 : {
878 57671608 : int i;
879 57671608 : RTX_CODE code = GET_CODE (x);
880 57671608 : const char *fmt;
881 :
882 57671608 : if (x == y)
883 : return true;
884 50045036 : if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
885 23668409 : && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
886 : {
887 23569797 : int j;
888 :
889 23569797 : i = get_hard_regno (x);
890 23569797 : if (i < 0)
891 1214272 : goto slow;
892 :
893 22355525 : if ((j = y_hard_regno) < 0)
894 682946 : goto slow;
895 :
896 21672579 : i += lra_constraint_offset (i, GET_MODE (x));
897 21672579 : j += lra_constraint_offset (j, GET_MODE (y));
898 :
899 21672579 : return i == j;
900 : }
901 :
902 : /* If two operands must match, because they are really a single
903 : operand of an assembler insn, then two post-increments are invalid
904 : because the assembler insn would increment only once. On the
905 : other hand, a post-increment matches ordinary indexing if the
906 : post-increment is the output operand. */
907 26475239 : if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
908 0 : return operands_match_p (XEXP (x, 0), y, y_hard_regno);
909 :
910 : /* Two pre-increments are invalid because the assembler insn would
911 : increment only once. On the other hand, a pre-increment matches
912 : ordinary indexing if the pre-increment is the input operand. */
913 26475239 : if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
914 26475239 : || GET_CODE (y) == PRE_MODIFY)
915 0 : return operands_match_p (x, XEXP (y, 0), -1);
916 :
917 26475239 : slow:
918 :
919 28372457 : if (code == REG && REG_P (y))
920 1800684 : return REGNO (x) == REGNO (y);
921 :
922 98590 : if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
923 7546 : && x == SUBREG_REG (y))
924 : return true;
925 26571773 : if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
926 64139 : && SUBREG_REG (x) == y)
927 : return true;
928 :
929 : /* Now we have disposed of all the cases in which different rtx
930 : codes can match. */
931 26571607 : if (code != GET_CODE (y))
932 : return false;
933 :
934 : /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
935 1038681 : if (GET_MODE (x) != GET_MODE (y))
936 : return false;
937 :
938 1037972 : switch (code)
939 : {
940 : CASE_CONST_UNIQUE:
941 : return false;
942 :
943 : case CONST_VECTOR:
944 : if (!same_vector_encodings_p (x, y))
945 : return false;
946 : break;
947 :
948 0 : case LABEL_REF:
949 0 : return label_ref_label (x) == label_ref_label (y);
950 25 : case SYMBOL_REF:
951 25 : return XSTR (x, 0) == XSTR (y, 0);
952 :
953 : default:
954 : break;
955 : }
956 :
957 : /* Compare the elements. If any pair of corresponding elements fail
958 : to match, return false for the whole things. */
959 :
960 1017776 : fmt = GET_RTX_FORMAT (code);
961 2950168 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
962 : {
963 2004657 : int val, j;
964 2004657 : switch (fmt[i])
965 : {
966 0 : case 'w':
967 0 : if (XWINT (x, i) != XWINT (y, i))
968 : return false;
969 : break;
970 :
971 486 : case 'i':
972 486 : if (XINT (x, i) != XINT (y, i))
973 : return false;
974 : break;
975 :
976 0 : case 'L':
977 0 : if (XLOC (x, i) != XLOC (y, i))
978 : return false;
979 : break;
980 :
981 24761 : case 'p':
982 24761 : if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
983 : return false;
984 : break;
985 :
986 1459996 : case 'e':
987 1459996 : val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
988 1459996 : if (val == 0)
989 : return false;
990 : break;
991 :
992 : case '0':
993 : break;
994 :
995 486 : case 'E':
996 486 : if (XVECLEN (x, i) != XVECLEN (y, i))
997 : return false;
998 972 : for (j = XVECLEN (x, i) - 1; j >= 0; --j)
999 : {
1000 486 : val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
1001 486 : if (val == 0)
1002 : return false;
1003 : }
1004 : break;
1005 :
1006 : /* It is believed that rtx's at this level will never
1007 : contain anything but integers and other rtx's, except for
1008 : within LABEL_REFs and SYMBOL_REFs. */
1009 0 : default:
1010 0 : gcc_unreachable ();
1011 : }
1012 : }
1013 : return true;
1014 : }
1015 :
1016 : /* True if X is a constant that can be forced into the constant pool.
1017 : MODE is the mode of the operand, or VOIDmode if not known. */
1018 : #define CONST_POOL_OK_P(MODE, X) \
1019 : ((MODE) != VOIDmode \
1020 : && CONSTANT_P (X) \
1021 : && GET_CODE (X) != HIGH \
1022 : && GET_MODE_SIZE (MODE).is_constant () \
1023 : && !targetm.cannot_force_const_mem (MODE, X))
1024 :
1025 : /* If REG is a reload pseudo, try to make its class satisfying CL. */
1026 : static void
1027 3393136 : narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
1028 : {
1029 3393136 : enum reg_class rclass;
1030 :
1031 : /* Do not make more accurate class from reloads generated. They are
1032 : mostly moves with a lot of constraints. Making more accurate
1033 : class may results in very narrow class and impossibility of find
1034 : registers for several reloads of one insn. */
1035 3393136 : if (INSN_UID (curr_insn) >= new_insn_uid_start)
1036 3393104 : return;
1037 3393024 : if (GET_CODE (reg) == SUBREG)
1038 163913 : reg = SUBREG_REG (reg);
1039 3393024 : if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
1040 : return;
1041 32 : if (in_class_p (reg, cl, &rclass) && rclass != cl)
1042 13 : lra_change_class (REGNO (reg), rclass, " Change to", true);
1043 : }
1044 :
1045 : /* Searches X for any reference to a reg with the same value as REGNO,
1046 : returning the rtx of the reference found if any. Otherwise,
1047 : returns NULL_RTX. */
1048 : static rtx
1049 525805 : regno_val_use_in (unsigned int regno, rtx x)
1050 : {
1051 525805 : const char *fmt;
1052 525805 : int i, j;
1053 525805 : rtx tem;
1054 :
1055 525805 : if (REG_P (x) && lra_reg_info[REGNO (x)].val == lra_reg_info[regno].val)
1056 : return x;
1057 :
1058 525442 : fmt = GET_RTX_FORMAT (GET_CODE (x));
1059 1057228 : for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1060 : {
1061 531786 : if (fmt[i] == 'e')
1062 : {
1063 7621 : if ((tem = regno_val_use_in (regno, XEXP (x, i))))
1064 : return tem;
1065 : }
1066 524165 : else if (fmt[i] == 'E')
1067 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1068 0 : if ((tem = regno_val_use_in (regno , XVECEXP (x, i, j))))
1069 : return tem;
1070 : }
1071 :
1072 : return NULL_RTX;
1073 : }
1074 :
1075 : /* Return true if all current insn non-output operands except INS (it
1076 : has a negaitve end marker) do not use pseudos with the same value
1077 : as REGNO. */
1078 : static bool
1079 2 : check_conflict_input_operands (int regno, signed char *ins)
1080 : {
1081 2 : int in;
1082 2 : int n_operands = curr_static_id->n_operands;
1083 :
1084 8 : for (int nop = 0; nop < n_operands; nop++)
1085 7 : if (! curr_static_id->operand[nop].is_operator
1086 7 : && curr_static_id->operand[nop].type != OP_OUT)
1087 : {
1088 5 : for (int i = 0; (in = ins[i]) >= 0; i++)
1089 4 : if (in == nop)
1090 : break;
1091 3 : if (in < 0
1092 3 : && regno_val_use_in (regno, *curr_id->operand_loc[nop]) != NULL_RTX)
1093 : return false;
1094 : }
1095 : return true;
1096 : }
1097 :
1098 : /* Generate reloads for matching OUT and INS (array of input operand numbers
1099 : with end marker -1) with reg class GOAL_CLASS and EXCLUDE_START_HARD_REGS,
1100 : considering output operands OUTS (similar array to INS) needing to be in
1101 : different registers. Add input and output reloads correspondingly to the
1102 : lists *BEFORE and *AFTER. OUT might be negative. In this case we generate
1103 : input reloads for matched input operands INS. EARLY_CLOBBER_P is a flag
1104 : that the output operand is early clobbered for chosen alternative. */
1105 : static void
1106 1696568 : match_reload (signed char out, signed char *ins, signed char *outs,
1107 : enum reg_class goal_class, HARD_REG_SET *exclude_start_hard_regs,
1108 : rtx_insn **before, rtx_insn **after, bool early_clobber_p)
1109 : {
1110 1696568 : bool out_conflict;
1111 1696568 : int i, in;
1112 1696568 : rtx new_in_reg, new_out_reg, reg;
1113 1696568 : machine_mode inmode, outmode;
1114 1696568 : rtx in_rtx = *curr_id->operand_loc[ins[0]];
1115 1696568 : rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
1116 :
1117 1696568 : inmode = curr_operand_mode[ins[0]];
1118 1696568 : outmode = out < 0 ? inmode : curr_operand_mode[out];
1119 1696568 : push_to_sequence (*before);
1120 1696568 : if (inmode != outmode)
1121 : {
1122 : /* process_alt_operands has already checked that the mode sizes
1123 : are ordered. */
1124 123270 : if (partial_subreg_p (outmode, inmode))
1125 : {
1126 1709 : bool asm_p = asm_noperands (PATTERN (curr_insn)) >= 0;
1127 1709 : int hr;
1128 1709 : HARD_REG_SET temp_hard_reg_set;
1129 :
1130 19 : if (asm_p && (hr = get_hard_regno (out_rtx)) >= 0
1131 1712 : && hard_regno_nregs (hr, inmode) > 1)
1132 : {
1133 : /* See gcc.c-torture/execute/20030222-1.c.
1134 : Consider the code for 32-bit (e.g. BE) target:
1135 : int i, v; long x; x = v; asm ("" : "=r" (i) : "0" (x));
1136 : We generate the following RTL with reload insns:
1137 : 1. subreg:si(x:di, 0) = 0;
1138 : 2. subreg:si(x:di, 4) = v:si;
1139 : 3. t:di = x:di, dead x;
1140 : 4. asm ("" : "=r" (subreg:si(t:di,4)) : "0" (t:di))
1141 : 5. i:si = subreg:si(t:di,4);
1142 : If we assign hard reg of x to t, dead code elimination
1143 : will remove insn #2 and we will use unitialized hard reg.
1144 : So exclude the hard reg of x for t. We could ignore this
1145 : problem for non-empty asm using all x value but it is hard to
1146 : check that the asm are expanded into insn realy using x
1147 : and setting r. */
1148 0 : CLEAR_HARD_REG_SET (temp_hard_reg_set);
1149 0 : if (exclude_start_hard_regs != NULL)
1150 0 : temp_hard_reg_set = *exclude_start_hard_regs;
1151 0 : SET_HARD_REG_BIT (temp_hard_reg_set, hr);
1152 0 : exclude_start_hard_regs = &temp_hard_reg_set;
1153 : }
1154 3418 : reg = new_in_reg
1155 1709 : = lra_create_new_reg_with_unique_value (inmode, in_rtx, goal_class,
1156 : exclude_start_hard_regs,
1157 : "");
1158 1709 : new_out_reg = get_matching_reload_reg_subreg (outmode, reg, goal_class);
1159 1709 : LRA_SUBREG_P (new_out_reg) = 1;
1160 : /* If the input reg is dying here, we can use the same hard
1161 : register for REG and IN_RTX. We do it only for original
1162 : pseudos as reload pseudos can die although original
1163 : pseudos still live where reload pseudos dies. */
1164 1494 : if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
1165 1454 : && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
1166 2736 : && (!early_clobber_p
1167 2 : || check_conflict_input_operands(REGNO (in_rtx), ins)))
1168 1026 : lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
1169 : }
1170 : else
1171 : {
1172 243122 : reg = new_out_reg
1173 121561 : = lra_create_new_reg_with_unique_value (outmode, out_rtx,
1174 : goal_class,
1175 : exclude_start_hard_regs,
1176 : "");
1177 121561 : new_in_reg = get_matching_reload_reg_subreg (inmode, reg, goal_class);
1178 : /* NEW_IN_REG is non-paradoxical subreg. We don't want
1179 : NEW_OUT_REG living above. We add clobber clause for
1180 : this. This is just a temporary clobber. We can remove
1181 : it at the end of LRA work. */
1182 121561 : rtx_insn *clobber = emit_clobber (new_out_reg);
1183 121561 : LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
1184 121561 : LRA_SUBREG_P (new_in_reg) = 1;
1185 121561 : if (GET_CODE (in_rtx) == SUBREG)
1186 : {
1187 1758 : rtx subreg_reg = SUBREG_REG (in_rtx);
1188 :
1189 : /* If SUBREG_REG is dying here and sub-registers IN_RTX
1190 : and NEW_IN_REG are similar, we can use the same hard
1191 : register for REG and SUBREG_REG. */
1192 1758 : if (REG_P (subreg_reg)
1193 1758 : && (int) REGNO (subreg_reg) < lra_new_regno_start
1194 1758 : && GET_MODE (subreg_reg) == outmode
1195 1105 : && known_eq (SUBREG_BYTE (in_rtx), SUBREG_BYTE (new_in_reg))
1196 1105 : && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg))
1197 1897 : && (! early_clobber_p
1198 0 : || check_conflict_input_operands (REGNO (subreg_reg),
1199 : ins)))
1200 139 : lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
1201 : }
1202 : }
1203 : }
1204 : else
1205 : {
1206 : /* Pseudos have values -- see comments for lra_reg_info.
1207 : Different pseudos with the same value do not conflict even if
1208 : they live in the same place. When we create a pseudo we
1209 : assign value of original pseudo (if any) from which we
1210 : created the new pseudo. If we create the pseudo from the
1211 : input pseudo, the new pseudo will have no conflict with the
1212 : input pseudo which is wrong when the input pseudo lives after
1213 : the insn and as the new pseudo value is changed by the insn
1214 : output. Therefore we create the new pseudo from the output
1215 : except the case when we have single matched dying input
1216 : pseudo.
1217 :
1218 : We cannot reuse the current output register because we might
1219 : have a situation like "a <- a op b", where the constraints
1220 : force the second input operand ("b") to match the output
1221 : operand ("a"). "b" must then be copied into a new register
1222 : so that it doesn't clobber the current value of "a".
1223 :
1224 : We cannot use the same value if the output pseudo is
1225 : early clobbered or the input pseudo is mentioned in the
1226 : output, e.g. as an address part in memory, because
1227 : output reload will actually extend the pseudo liveness.
1228 : We don't care about eliminable hard regs here as we are
1229 : interesting only in pseudos. */
1230 :
1231 : /* Matching input's register value is the same as one of the other
1232 : output operand. Output operands in a parallel insn must be in
1233 : different registers. */
1234 1573298 : out_conflict = false;
1235 1573298 : if (REG_P (in_rtx))
1236 : {
1237 2697608 : for (i = 0; outs[i] >= 0; i++)
1238 : {
1239 1397258 : rtx other_out_rtx = *curr_id->operand_loc[outs[i]];
1240 96678 : if (outs[i] != out && REG_P (other_out_rtx)
1241 1493742 : && (regno_val_use_in (REGNO (in_rtx), other_out_rtx)
1242 : != NULL_RTX))
1243 : {
1244 : out_conflict = true;
1245 : break;
1246 : }
1247 : }
1248 : }
1249 :
1250 1573298 : new_in_reg = new_out_reg
1251 1542024 : = (! early_clobber_p && ins[1] < 0 && REG_P (in_rtx)
1252 1270169 : && (int) REGNO (in_rtx) < lra_new_regno_start
1253 1269889 : && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
1254 : && (! early_clobber_p
1255 : || check_conflict_input_operands (REGNO (in_rtx), ins))
1256 421699 : && (out < 0
1257 421699 : || regno_val_use_in (REGNO (in_rtx), out_rtx) == NULL_RTX)
1258 421667 : && !out_conflict
1259 1994926 : ? lra_create_new_reg (inmode, in_rtx, goal_class,
1260 : exclude_start_hard_regs, "")
1261 1151670 : : lra_create_new_reg_with_unique_value (outmode, out_rtx, goal_class,
1262 : exclude_start_hard_regs,
1263 : ""));
1264 : }
1265 : /* In operand can be got from transformations before processing insn
1266 : constraints. One example of such transformations is subreg
1267 : reloading (see function simplify_operand_subreg). The new
1268 : pseudos created by the transformations might have inaccurate
1269 : class (ALL_REGS) and we should make their classes more
1270 : accurate. */
1271 1696568 : narrow_reload_pseudo_class (in_rtx, goal_class);
1272 1696568 : lra_emit_move (copy_rtx (new_in_reg), in_rtx);
1273 1696568 : *before = end_sequence ();
1274 : /* Add the new pseudo to consider values of subsequent input reload
1275 : pseudos. */
1276 1696568 : lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
1277 1696568 : curr_insn_input_reloads[curr_insn_input_reloads_num].input = in_rtx;
1278 1696568 : curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = true;
1279 1696568 : curr_insn_input_reloads[curr_insn_input_reloads_num].early_clobber_p = false;
1280 1696568 : curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = new_in_reg;
1281 3393137 : for (i = 0; (in = ins[i]) >= 0; i++)
1282 1696569 : if (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
1283 1668481 : || GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]))
1284 1696568 : *curr_id->operand_loc[in] = new_in_reg;
1285 : else
1286 : {
1287 1 : lra_assert
1288 : (GET_MODE (new_out_reg) == GET_MODE (*curr_id->operand_loc[in]));
1289 1 : *curr_id->operand_loc[in] = new_out_reg;
1290 : }
1291 1696568 : lra_update_dups (curr_id, ins);
1292 1696568 : if (out < 0)
1293 : return;
1294 : /* See a comment for the input operand above. */
1295 1696568 : narrow_reload_pseudo_class (out_rtx, goal_class);
1296 1696568 : reg = SUBREG_P (out_rtx) ? SUBREG_REG (out_rtx) : out_rtx;
1297 1696568 : if (find_reg_note (curr_insn, REG_UNUSED, reg) == NULL_RTX
1298 1696568 : && (!REG_P (reg) || !ira_former_scratch_p (REGNO (reg))))
1299 : {
1300 1613493 : start_sequence ();
1301 : /* If we had strict_low_part, use it also in reload to keep other
1302 : parts unchanged but do it only for regs as strict_low_part
1303 : has no sense for memory and probably there is no insn pattern
1304 : to match the reload insn in memory case. */
1305 1613493 : if (out >= 0 && curr_static_id->operand[out].strict_low && REG_P (reg))
1306 0 : out_rtx = gen_rtx_STRICT_LOW_PART (VOIDmode, out_rtx);
1307 1613493 : lra_emit_move (out_rtx, copy_rtx (new_out_reg));
1308 1613493 : emit_insn (*after);
1309 1613493 : *after = end_sequence ();
1310 : }
1311 1696568 : *curr_id->operand_loc[out] = new_out_reg;
1312 1696568 : lra_update_dup (curr_id, out);
1313 : }
1314 :
1315 : /* Return register class which is union of all reg classes in insn
1316 : constraint alternative string starting with P. */
1317 : static enum reg_class
1318 0 : reg_class_from_constraints (const char *p)
1319 : {
1320 0 : int c, len;
1321 0 : enum reg_class op_class = NO_REGS;
1322 :
1323 0 : do
1324 0 : switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1325 : {
1326 : case '#':
1327 : case ',':
1328 : return op_class;
1329 :
1330 0 : case 'g':
1331 0 : op_class = reg_class_subunion[op_class][GENERAL_REGS];
1332 0 : break;
1333 :
1334 0 : default:
1335 0 : enum constraint_num cn = lookup_constraint (p);
1336 0 : enum reg_class cl = reg_class_for_constraint (cn);
1337 0 : if (cl == NO_REGS)
1338 : {
1339 0 : if (insn_extra_address_constraint (cn))
1340 0 : op_class
1341 0 : = (reg_class_subunion
1342 0 : [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1343 0 : ADDRESS, SCRATCH)]);
1344 : break;
1345 : }
1346 :
1347 0 : op_class = reg_class_subunion[op_class][cl];
1348 0 : break;
1349 : }
1350 0 : while ((p += len), c);
1351 : return op_class;
1352 : }
1353 :
1354 : /* If OP is a register, return the class of the register as per
1355 : get_reg_class, otherwise return NO_REGS. */
1356 : static inline enum reg_class
1357 162684440 : get_op_class (rtx op)
1358 : {
1359 135085616 : return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
1360 : }
1361 :
1362 : /* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
1363 : otherwise. If modes of MEM_PSEUDO and VAL are different, use
1364 : SUBREG for VAL to make them equal. */
1365 : static rtx_insn *
1366 1332527 : emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
1367 : {
1368 1332527 : if (GET_MODE (mem_pseudo) != GET_MODE (val))
1369 : {
1370 : /* Usually size of mem_pseudo is greater than val size but in
1371 : rare cases it can be less as it can be defined by target
1372 : dependent macro HARD_REGNO_CALLER_SAVE_MODE. */
1373 3028 : if (! MEM_P (val))
1374 : {
1375 3028 : val = gen_lowpart_SUBREG (GET_MODE (mem_pseudo),
1376 : GET_CODE (val) == SUBREG
1377 : ? SUBREG_REG (val) : val);
1378 3028 : LRA_SUBREG_P (val) = 1;
1379 : }
1380 : else
1381 : {
1382 0 : mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
1383 0 : LRA_SUBREG_P (mem_pseudo) = 1;
1384 : }
1385 : }
1386 1332527 : return to_p ? gen_move_insn (mem_pseudo, val)
1387 672857 : : gen_move_insn (val, mem_pseudo);
1388 : }
1389 :
1390 : /* Process a special case insn (register move), return true if we
1391 : don't need to process it anymore. INSN should be a single set
1392 : insn. Set up that RTL was changed through CHANGE_P and that hook
1393 : TARGET_SECONDARY_MEMORY_NEEDED says to use secondary memory through
1394 : SEC_MEM_P. */
1395 : static bool
1396 75403086 : check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
1397 : {
1398 75403086 : int sregno, dregno;
1399 75403086 : rtx dest, src, dreg, sreg, new_reg, scratch_reg;
1400 75403086 : rtx_insn *before;
1401 75403086 : enum reg_class dclass, sclass, secondary_class;
1402 75403086 : secondary_reload_info sri;
1403 :
1404 75403086 : lra_assert (curr_insn_set != NULL_RTX);
1405 75403086 : dreg = dest = SET_DEST (curr_insn_set);
1406 75403086 : sreg = src = SET_SRC (curr_insn_set);
1407 75403086 : if (GET_CODE (dest) == SUBREG)
1408 1147123 : dreg = SUBREG_REG (dest);
1409 75403086 : if (GET_CODE (src) == SUBREG)
1410 1184443 : sreg = SUBREG_REG (src);
1411 75403086 : if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
1412 : return false;
1413 35401755 : sclass = dclass = NO_REGS;
1414 35401755 : if (REG_P (dreg))
1415 22838120 : dclass = get_reg_class (REGNO (dreg));
1416 22838120 : gcc_assert (dclass < LIM_REG_CLASSES && dclass >= NO_REGS);
1417 35401755 : if (dclass == ALL_REGS)
1418 : /* ALL_REGS is used for new pseudos created by transformations
1419 : like reload of SUBREG_REG (see function
1420 : simplify_operand_subreg). We don't know their class yet. We
1421 : should figure out the class from processing the insn
1422 : constraints not in this fast path function. Even if ALL_REGS
1423 : were a right class for the pseudo, secondary_... hooks usually
1424 : are not define for ALL_REGS. */
1425 : return false;
1426 35399550 : if (REG_P (sreg))
1427 19702590 : sclass = get_reg_class (REGNO (sreg));
1428 19702590 : gcc_assert (sclass < LIM_REG_CLASSES && sclass >= NO_REGS);
1429 35399550 : if (sclass == ALL_REGS)
1430 : /* See comments above. */
1431 : return false;
1432 35399550 : if (sclass == NO_REGS && dclass == NO_REGS)
1433 : return false;
1434 33935766 : if (targetm.secondary_memory_needed (GET_MODE (src), sclass, dclass)
1435 33935766 : && ((sclass != NO_REGS && dclass != NO_REGS)
1436 0 : || (GET_MODE (src)
1437 0 : != targetm.secondary_memory_needed_mode (GET_MODE (src)))))
1438 : {
1439 13376 : *sec_mem_p = true;
1440 13376 : return false;
1441 : }
1442 33922390 : if (! REG_P (dreg) || ! REG_P (sreg))
1443 : return false;
1444 7654447 : sri.prev_sri = NULL;
1445 7654447 : sri.icode = CODE_FOR_nothing;
1446 7654447 : sri.extra_cost = 0;
1447 7654447 : secondary_class = NO_REGS;
1448 : /* Set up hard register for a reload pseudo for hook
1449 : secondary_reload because some targets just ignore unassigned
1450 : pseudos in the hook. */
1451 7654447 : if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
1452 : {
1453 2874215 : dregno = REGNO (dreg);
1454 2874215 : reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
1455 : }
1456 : else
1457 : dregno = -1;
1458 7654447 : if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
1459 : {
1460 1269425 : sregno = REGNO (sreg);
1461 1269425 : reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
1462 : }
1463 : else
1464 : sregno = -1;
1465 7654447 : if (sclass != NO_REGS)
1466 3840409 : secondary_class
1467 7680818 : = (enum reg_class) targetm.secondary_reload (false, dest,
1468 : (reg_class_t) sclass,
1469 3840409 : GET_MODE (src), &sri);
1470 3840409 : if (sclass == NO_REGS
1471 3840409 : || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
1472 1343 : && dclass != NO_REGS))
1473 : {
1474 3814038 : enum reg_class old_sclass = secondary_class;
1475 3814038 : secondary_reload_info old_sri = sri;
1476 :
1477 3814038 : sri.prev_sri = NULL;
1478 3814038 : sri.icode = CODE_FOR_nothing;
1479 3814038 : sri.extra_cost = 0;
1480 3814038 : secondary_class
1481 7628076 : = (enum reg_class) targetm.secondary_reload (true, src,
1482 : (reg_class_t) dclass,
1483 3814038 : GET_MODE (src), &sri);
1484 : /* Check the target hook consistency. */
1485 3814038 : lra_assert
1486 : ((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1487 : || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
1488 : || (secondary_class == old_sclass && sri.icode == old_sri.icode));
1489 : }
1490 7654447 : if (sregno >= 0)
1491 1269425 : reg_renumber [sregno] = -1;
1492 7654447 : if (dregno >= 0)
1493 2874215 : reg_renumber [dregno] = -1;
1494 7654447 : if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1495 : return false;
1496 1344 : *change_p = true;
1497 1344 : new_reg = NULL_RTX;
1498 0 : if (secondary_class != NO_REGS)
1499 1344 : new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
1500 : secondary_class, NULL,
1501 : "secondary");
1502 1344 : start_sequence ();
1503 1344 : if (sri.icode == CODE_FOR_nothing)
1504 1344 : lra_emit_move (new_reg, src);
1505 : else
1506 : {
1507 0 : enum reg_class scratch_class;
1508 :
1509 0 : scratch_class = (reg_class_from_constraints
1510 0 : (insn_data[sri.icode].operand[2].constraint));
1511 0 : scratch_reg = (lra_create_new_reg_with_unique_value
1512 0 : (insn_data[sri.icode].operand[2].mode, NULL_RTX,
1513 : scratch_class, NULL, "scratch"));
1514 0 : emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
1515 : src, scratch_reg));
1516 : }
1517 1344 : before = end_sequence ();
1518 1344 : lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
1519 1344 : if (new_reg != NULL_RTX)
1520 1344 : SET_SRC (curr_insn_set) = new_reg;
1521 : else
1522 : {
1523 0 : if (lra_dump_file != NULL)
1524 : {
1525 0 : fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
1526 0 : dump_insn_slim (lra_dump_file, curr_insn);
1527 : }
1528 0 : lra_set_insn_deleted (curr_insn);
1529 0 : return true;
1530 : }
1531 1344 : return false;
1532 : }
1533 :
1534 : /* The following data describe the result of process_alt_operands.
1535 : The data are used in curr_insn_transform to generate reloads. */
1536 :
1537 : /* The chosen reg classes which should be used for the corresponding
1538 : operands. */
1539 : static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
1540 : /* Hard registers which cannot be a start hard register for the corresponding
1541 : operands. */
1542 : static HARD_REG_SET goal_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
1543 : /* True if the operand should be the same as another operand and that
1544 : other operand does not need a reload. */
1545 : static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
1546 : /* True if the operand does not need a reload. */
1547 : static bool goal_alt_win[MAX_RECOG_OPERANDS];
1548 : /* True if the operand can be offsetable memory. */
1549 : static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
1550 : /* The number of an operand to which given operand can be matched to. */
1551 : static int goal_alt_matches[MAX_RECOG_OPERANDS];
1552 : /* The number of elements in the following array. */
1553 : static int goal_alt_dont_inherit_ops_num;
1554 : /* Numbers of operands whose reload pseudos should not be inherited. */
1555 : static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1556 : /* True if we should try only this alternative for the next constraint sub-pass
1557 : to speed up the sub-pass. */
1558 : static bool goal_reuse_alt_p;
1559 : /* True if the insn commutative operands should be swapped. */
1560 : static bool goal_alt_swapped;
1561 : /* The chosen insn alternative. */
1562 : static int goal_alt_number;
1563 : /* True if output reload of the stack pointer should be generated. */
1564 : static bool goal_alt_out_sp_reload_p;
1565 :
1566 : /* True if the corresponding operand is the result of an equivalence
1567 : substitution. */
1568 : static bool equiv_substition_p[MAX_RECOG_OPERANDS];
1569 :
1570 : /* The following five variables are used to choose the best insn
1571 : alternative. They reflect final characteristics of the best
1572 : alternative. */
1573 :
1574 : /* Number of necessary reloads and overall cost reflecting the
1575 : previous value and other unpleasantness of the best alternative. */
1576 : static int best_losers, best_overall;
1577 : /* Overall number hard registers used for reloads. For example, on
1578 : some targets we need 2 general registers to reload DFmode and only
1579 : one floating point register. */
1580 : static int best_reload_nregs;
1581 : /* Overall number reflecting distances of previous reloading the same
1582 : value. The distances are counted from the current BB start. It is
1583 : used to improve inheritance chances. */
1584 : static int best_reload_sum;
1585 :
1586 : /* True if the current insn should have no correspondingly input or
1587 : output reloads. */
1588 : static bool no_input_reloads_p, no_output_reloads_p;
1589 :
1590 : /* True if we swapped the commutative operands in the current
1591 : insn. */
1592 : static int curr_swapped;
1593 :
1594 : /* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
1595 : register of class CL. Add any input reloads to list BEFORE. AFTER
1596 : is nonnull if *LOC is an automodified value; handle that case by
1597 : adding the required output reloads to list AFTER. Return true if
1598 : the RTL was changed.
1599 :
1600 : if CHECK_ONLY_P is true, check that the *LOC is a correct address
1601 : register. Return false if the address register is correct. */
1602 : static bool
1603 35083600 : process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
1604 : enum reg_class cl)
1605 : {
1606 35083600 : int regno;
1607 35083600 : enum reg_class rclass, new_class;
1608 35083600 : rtx reg;
1609 35083600 : rtx new_reg;
1610 35083600 : machine_mode mode;
1611 35083600 : bool subreg_p, before_p = false;
1612 :
1613 35083600 : subreg_p = GET_CODE (*loc) == SUBREG;
1614 35083600 : if (subreg_p)
1615 : {
1616 15472 : reg = SUBREG_REG (*loc);
1617 15472 : mode = GET_MODE (reg);
1618 :
1619 : /* For mode with size bigger than ptr_mode, there unlikely to be "mov"
1620 : between two registers with different classes, but there normally will
1621 : be "mov" which transfers element of vector register into the general
1622 : register, and this normally will be a subreg which should be reloaded
1623 : as a whole. This is particularly likely to be triggered when
1624 : -fno-split-wide-types specified. */
1625 15472 : if (!REG_P (reg)
1626 15472 : || in_class_p (reg, cl, &new_class)
1627 17740 : || known_le (GET_MODE_SIZE (mode), GET_MODE_SIZE (ptr_mode)))
1628 15472 : loc = &SUBREG_REG (*loc);
1629 : }
1630 :
1631 35083600 : reg = *loc;
1632 35083600 : mode = GET_MODE (reg);
1633 35083600 : if (! REG_P (reg))
1634 : {
1635 0 : if (check_only_p)
1636 : return true;
1637 : /* Always reload memory in an address even if the target supports
1638 : such addresses. */
1639 0 : new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, NULL,
1640 : "address");
1641 0 : before_p = true;
1642 : }
1643 : else
1644 : {
1645 35083600 : regno = REGNO (reg);
1646 35083600 : rclass = get_reg_class (regno);
1647 35083600 : if (! check_only_p
1648 35083600 : && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
1649 : {
1650 110866 : if (lra_dump_file != NULL)
1651 : {
1652 0 : fprintf (lra_dump_file,
1653 : "Changing pseudo %d in address of insn %u on equiv ",
1654 0 : REGNO (reg), INSN_UID (curr_insn));
1655 0 : dump_value_slim (lra_dump_file, *loc, 1);
1656 0 : fprintf (lra_dump_file, "\n");
1657 : }
1658 110866 : rtx new_equiv = copy_rtx (*loc);
1659 110866 : if (lra_pointer_equiv_set_in (*loc))
1660 105783 : lra_pointer_equiv_set_add (new_equiv);
1661 110866 : *loc = new_equiv;
1662 : }
1663 35083600 : if (*loc != reg || ! in_class_p (reg, cl, &new_class))
1664 : {
1665 479866 : if (check_only_p)
1666 : return true;
1667 479866 : reg = *loc;
1668 479866 : if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
1669 : mode, reg, cl, NULL,
1670 : subreg_p, false, "address", &new_reg))
1671 : before_p = true;
1672 : }
1673 34603734 : else if (new_class != NO_REGS && rclass != new_class)
1674 : {
1675 468992 : if (check_only_p)
1676 : return true;
1677 468992 : lra_change_class (regno, new_class, " Change to", true);
1678 468992 : return false;
1679 : }
1680 : else
1681 : return false;
1682 : }
1683 0 : if (before_p)
1684 : {
1685 472678 : push_to_sequence (*before);
1686 472678 : lra_emit_move (new_reg, reg);
1687 472678 : *before = end_sequence ();
1688 : }
1689 479866 : *loc = new_reg;
1690 479866 : if (after != NULL)
1691 : {
1692 0 : start_sequence ();
1693 0 : lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
1694 0 : emit_insn (*after);
1695 0 : *after = end_sequence ();
1696 : }
1697 : return true;
1698 : }
1699 :
1700 : /* Insert move insn in simplify_operand_subreg. BEFORE returns
1701 : the insn to be inserted before curr insn. AFTER returns the
1702 : the insn to be inserted after curr insn. ORIGREG and NEWREG
1703 : are the original reg and new reg for reload. */
1704 : static void
1705 458 : insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
1706 : rtx newreg)
1707 : {
1708 458 : if (before)
1709 : {
1710 458 : push_to_sequence (*before);
1711 458 : lra_emit_move (newreg, origreg);
1712 458 : *before = end_sequence ();
1713 : }
1714 458 : if (after)
1715 : {
1716 0 : start_sequence ();
1717 0 : lra_emit_move (origreg, newreg);
1718 0 : emit_insn (*after);
1719 0 : *after = end_sequence ();
1720 : }
1721 458 : }
1722 :
1723 : static bool valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
1724 : static bool process_address (int, bool, rtx_insn **, rtx_insn **);
1725 :
1726 : /* Make reloads for subreg in operand NOP with internal subreg mode
1727 : REG_MODE, add new reloads for further processing. Return true if
1728 : any change was done. */
1729 : static bool
1730 175100192 : simplify_operand_subreg (int nop, machine_mode reg_mode)
1731 : {
1732 175100192 : int hard_regno, inner_hard_regno;
1733 175100192 : rtx_insn *before, *after;
1734 175100192 : machine_mode mode, innermode;
1735 175100192 : rtx reg, new_reg;
1736 175100192 : rtx operand = *curr_id->operand_loc[nop];
1737 175100192 : enum reg_class regclass;
1738 175100192 : enum op_type type;
1739 :
1740 175100192 : before = after = NULL;
1741 :
1742 175100192 : if (GET_CODE (operand) != SUBREG)
1743 : return false;
1744 :
1745 3594774 : mode = GET_MODE (operand);
1746 3594774 : reg = SUBREG_REG (operand);
1747 3594774 : innermode = GET_MODE (reg);
1748 3594774 : type = curr_static_id->operand[nop].type;
1749 3594774 : if (MEM_P (reg))
1750 : {
1751 11214 : const bool addr_was_valid
1752 11214 : = valid_address_p (innermode, XEXP (reg, 0), MEM_ADDR_SPACE (reg));
1753 11214 : alter_subreg (curr_id->operand_loc[nop], false);
1754 11214 : rtx subst = *curr_id->operand_loc[nop];
1755 11214 : lra_assert (MEM_P (subst));
1756 11214 : const bool addr_is_valid = valid_address_p (GET_MODE (subst),
1757 : XEXP (subst, 0),
1758 11214 : MEM_ADDR_SPACE (subst));
1759 11214 : if (!addr_was_valid
1760 11214 : || addr_is_valid
1761 11214 : || ((get_constraint_type (lookup_constraint
1762 0 : (curr_static_id->operand[nop].constraint))
1763 : != CT_SPECIAL_MEMORY)
1764 : /* We still can reload address and if the address is
1765 : valid, we can remove subreg without reloading its
1766 : inner memory. */
1767 0 : && valid_address_p (GET_MODE (subst),
1768 0 : regno_reg_rtx
1769 : [ira_class_hard_regs
1770 0 : [base_reg_class (GET_MODE (subst),
1771 0 : MEM_ADDR_SPACE (subst),
1772 0 : ADDRESS, SCRATCH)][0]],
1773 0 : MEM_ADDR_SPACE (subst))))
1774 : {
1775 : /* If we change the address for a paradoxical subreg of memory, the
1776 : new address might violate the necessary alignment or the access
1777 : might be slow; take this into consideration. We need not worry
1778 : about accesses beyond allocated memory for paradoxical memory
1779 : subregs as we don't substitute such equiv memory (see processing
1780 : equivalences in function lra_constraints) and because for spilled
1781 : pseudos we allocate stack memory enough for the biggest
1782 : corresponding paradoxical subreg.
1783 :
1784 : However, do not blindly simplify a (subreg (mem ...)) for
1785 : WORD_REGISTER_OPERATIONS targets as this may lead to loading junk
1786 : data into a register when the inner is narrower than outer or
1787 : missing important data from memory when the inner is wider than
1788 : outer. This rule only applies to modes that are no wider than
1789 : a word.
1790 :
1791 : If valid memory becomes invalid after subreg elimination
1792 : and address might be different we still have to reload
1793 : memory.
1794 : */
1795 11214 : if ((! addr_was_valid
1796 : || addr_is_valid
1797 0 : || known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (innermode)))
1798 11214 : && !(maybe_ne (GET_MODE_PRECISION (mode),
1799 11214 : GET_MODE_PRECISION (innermode))
1800 13705 : && known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD)
1801 19719 : && known_le (GET_MODE_SIZE (innermode), UNITS_PER_WORD)
1802 : && WORD_REGISTER_OPERATIONS)
1803 23578 : && (!(MEM_ALIGN (subst) < GET_MODE_ALIGNMENT (mode)
1804 1150 : && targetm.slow_unaligned_access (mode, MEM_ALIGN (subst)))
1805 0 : || (MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (innermode)
1806 0 : && targetm.slow_unaligned_access (innermode,
1807 0 : MEM_ALIGN (reg)))))
1808 11214 : return true;
1809 :
1810 0 : *curr_id->operand_loc[nop] = operand;
1811 :
1812 : /* But if the address was not valid, we cannot reload the MEM without
1813 : reloading the address first. */
1814 0 : if (!addr_was_valid)
1815 0 : process_address (nop, false, &before, &after);
1816 :
1817 : /* INNERMODE is fast, MODE slow. Reload the mem in INNERMODE. */
1818 0 : enum reg_class rclass
1819 0 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1820 0 : if (get_reload_reg (curr_static_id->operand[nop].type, innermode,
1821 : reg, rclass, NULL,
1822 : true, false, "slow/invalid mem", &new_reg))
1823 : {
1824 0 : bool insert_before, insert_after;
1825 0 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1826 :
1827 0 : insert_before = (type != OP_OUT
1828 0 : || partial_subreg_p (mode, innermode));
1829 0 : insert_after = type != OP_IN;
1830 0 : insert_move_for_subreg (insert_before ? &before : NULL,
1831 : insert_after ? &after : NULL,
1832 : reg, new_reg);
1833 : }
1834 0 : SUBREG_REG (operand) = new_reg;
1835 :
1836 : /* Convert to MODE. */
1837 0 : reg = operand;
1838 0 : rclass
1839 0 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1840 0 : if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
1841 : rclass, NULL,
1842 : true, false, "slow/invalid mem", &new_reg))
1843 : {
1844 0 : bool insert_before, insert_after;
1845 0 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1846 :
1847 0 : insert_before = type != OP_OUT;
1848 0 : insert_after = type != OP_IN;
1849 0 : insert_move_for_subreg (insert_before ? &before : NULL,
1850 : insert_after ? &after : NULL,
1851 : reg, new_reg);
1852 : }
1853 0 : *curr_id->operand_loc[nop] = new_reg;
1854 0 : lra_process_new_insns (curr_insn, before, after,
1855 : "Inserting slow/invalid mem reload");
1856 0 : return true;
1857 : }
1858 :
1859 : /* If the address was valid and became invalid, prefer to reload
1860 : the memory. Typical case is when the index scale should
1861 : correspond the memory. */
1862 0 : *curr_id->operand_loc[nop] = operand;
1863 : /* Do not return false here as the MEM_P (reg) will be processed
1864 : later in this function. */
1865 : }
1866 3583560 : else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
1867 : {
1868 67 : alter_subreg (curr_id->operand_loc[nop], false);
1869 67 : return true;
1870 : }
1871 3583493 : else if (CONSTANT_P (reg))
1872 : {
1873 : /* Try to simplify subreg of constant. It is usually result of
1874 : equivalence substitution. */
1875 43157 : if (innermode == VOIDmode
1876 43157 : && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
1877 0 : innermode = curr_static_id->operand[nop].mode;
1878 43157 : if ((new_reg = simplify_subreg (mode, reg, innermode,
1879 43157 : SUBREG_BYTE (operand))) != NULL_RTX)
1880 : {
1881 42740 : *curr_id->operand_loc[nop] = new_reg;
1882 42740 : return true;
1883 : }
1884 : }
1885 : /* Put constant into memory when we have mixed modes. It generates
1886 : a better code in most cases as it does not need a secondary
1887 : reload memory. It also prevents LRA looping when LRA is using
1888 : secondary reload memory again and again. */
1889 834 : if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
1890 3541170 : && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
1891 : {
1892 8 : SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
1893 8 : alter_subreg (curr_id->operand_loc[nop], false);
1894 8 : return true;
1895 : }
1896 3540745 : auto fp_subreg_can_be_simplified_after_reload_p = [] (machine_mode innermode,
1897 : poly_uint64 offset,
1898 : machine_mode mode) {
1899 0 : reload_completed = 1;
1900 0 : bool res = simplify_subreg_regno (FRAME_POINTER_REGNUM,
1901 : innermode,
1902 0 : offset, mode) >= 0;
1903 0 : reload_completed = 0;
1904 0 : return res;
1905 : };
1906 : /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
1907 : if there may be a problem accessing OPERAND in the outer
1908 : mode. */
1909 3540745 : if ((REG_P (reg)
1910 3540287 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1911 3540287 : && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1912 : /* Don't reload paradoxical subregs because we could be looping
1913 : having repeatedly final regno out of hard regs range. */
1914 2989162 : && (hard_regno_nregs (hard_regno, innermode)
1915 2989162 : >= hard_regno_nregs (hard_regno, mode))
1916 2984038 : && simplify_subreg_regno (hard_regno, innermode,
1917 2984038 : SUBREG_BYTE (operand), mode) < 0
1918 : /* Exclude reloading of frame pointer in subreg if frame pointer can not
1919 : be simplified here only because the reload is not finished yet. */
1920 819 : && (hard_regno != FRAME_POINTER_REGNUM
1921 0 : || !fp_subreg_can_be_simplified_after_reload_p (innermode,
1922 0 : SUBREG_BYTE (operand),
1923 : mode))
1924 : /* Don't reload subreg for matching reload. It is actually
1925 : valid subreg in LRA. */
1926 819 : && ! LRA_SUBREG_P (operand))
1927 7081032 : || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
1928 : {
1929 458 : enum reg_class rclass;
1930 :
1931 458 : if (REG_P (reg))
1932 : /* There is a big probability that we will get the same class
1933 : for the new pseudo and we will get the same insn which
1934 : means infinite looping. So spill the new pseudo. */
1935 : rclass = NO_REGS;
1936 : else
1937 : /* The class will be defined later in curr_insn_transform. */
1938 458 : rclass
1939 458 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1940 :
1941 458 : if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
1942 : rclass, NULL,
1943 : true, false, "subreg reg", &new_reg))
1944 : {
1945 458 : bool insert_before, insert_after;
1946 458 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1947 :
1948 916 : insert_before = (type != OP_OUT
1949 458 : || read_modify_subreg_p (operand));
1950 458 : insert_after = (type != OP_IN);
1951 916 : insert_move_for_subreg (insert_before ? &before : NULL,
1952 : insert_after ? &after : NULL,
1953 : reg, new_reg);
1954 : }
1955 458 : SUBREG_REG (operand) = new_reg;
1956 458 : lra_process_new_insns (curr_insn, before, after,
1957 : "Inserting subreg reload");
1958 458 : return true;
1959 : }
1960 : /* Force a reload for a paradoxical subreg. For paradoxical subreg,
1961 : IRA allocates hardreg to the inner pseudo reg according to its mode
1962 : instead of the outermode, so the size of the hardreg may not be enough
1963 : to contain the outermode operand, in that case we may need to insert
1964 : reload for the reg. For the following two types of paradoxical subreg,
1965 : we need to insert reload:
1966 : 1. If the op_type is OP_IN, and the hardreg could not be paired with
1967 : other hardreg to contain the outermode operand
1968 : (checked by in_hard_reg_set_p), we need to insert the reload.
1969 : 2. If the op_type is OP_OUT or OP_INOUT.
1970 :
1971 : Here is a paradoxical subreg example showing how the reload is generated:
1972 :
1973 : (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1974 : (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
1975 :
1976 : In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
1977 : here, if reg107 is assigned to hardreg R15, because R15 is the last
1978 : hardreg, compiler cannot find another hardreg to pair with R15 to
1979 : contain TImode data. So we insert a TImode reload reg180 for it.
1980 : After reload is inserted:
1981 :
1982 : (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
1983 : (reg:DI 107 [ __comp ])) -1
1984 : (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1985 : (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
1986 :
1987 : Two reload hard registers will be allocated to reg180 to save TImode data
1988 : in LRA_assign.
1989 :
1990 : For LRA pseudos this should normally be handled by the biggest_mode
1991 : mechanism. However, it's possible for new uses of an LRA pseudo
1992 : to be introduced after we've allocated it, such as when undoing
1993 : inheritance, and the allocated register might not then be appropriate
1994 : for the new uses. */
1995 3540287 : else if (REG_P (reg)
1996 3540287 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1997 3540287 : && paradoxical_subreg_p (operand)
1998 1042606 : && (inner_hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1999 3540287 : && ((hard_regno
2000 4492555 : = simplify_subreg_regno (inner_hard_regno, innermode,
2001 952268 : SUBREG_BYTE (operand), mode)) < 0
2002 952268 : || ((hard_regno_nregs (inner_hard_regno, innermode)
2003 952268 : < hard_regno_nregs (hard_regno, mode))
2004 10248 : && (regclass = lra_get_allocno_class (REGNO (reg)))
2005 5124 : && (type != OP_IN
2006 5124 : || !in_hard_reg_set_p (reg_class_contents[regclass],
2007 : mode, hard_regno)
2008 5124 : || overlaps_hard_reg_set_p (lra_no_alloc_regs,
2009 : mode, hard_regno)))))
2010 : {
2011 : /* The class will be defined later in curr_insn_transform. */
2012 0 : enum reg_class rclass
2013 0 : = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
2014 :
2015 0 : if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
2016 : rclass, NULL,
2017 : true, false, "paradoxical subreg", &new_reg))
2018 : {
2019 0 : rtx subreg;
2020 0 : bool insert_before, insert_after;
2021 :
2022 0 : PUT_MODE (new_reg, mode);
2023 0 : subreg = gen_lowpart_SUBREG (innermode, new_reg);
2024 0 : bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
2025 :
2026 0 : insert_before = (type != OP_OUT);
2027 0 : insert_after = (type != OP_IN);
2028 0 : insert_move_for_subreg (insert_before ? &before : NULL,
2029 : insert_after ? &after : NULL,
2030 : reg, subreg);
2031 : }
2032 0 : SUBREG_REG (operand) = new_reg;
2033 0 : lra_process_new_insns (curr_insn, before, after,
2034 : "Inserting paradoxical subreg reload");
2035 0 : return true;
2036 : }
2037 : return false;
2038 : }
2039 :
2040 : /* Return TRUE if X refers for a hard register from SET. */
2041 : static bool
2042 406420 : uses_hard_regs_p (rtx x, HARD_REG_SET set)
2043 : {
2044 406420 : int i, j, x_hard_regno;
2045 406420 : machine_mode mode;
2046 406420 : const char *fmt;
2047 406420 : enum rtx_code code;
2048 :
2049 406420 : if (x == NULL_RTX)
2050 : return false;
2051 406420 : code = GET_CODE (x);
2052 406420 : mode = GET_MODE (x);
2053 :
2054 406420 : if (code == SUBREG)
2055 : {
2056 : /* For all SUBREGs we want to check whether the full multi-register
2057 : overlaps the set. For normal SUBREGs this means 'get_hard_regno' of
2058 : the inner register, for paradoxical SUBREGs this means the
2059 : 'get_hard_regno' of the full SUBREG and for complete SUBREGs either is
2060 : fine. Use the wider mode for all cases. */
2061 2707 : rtx subreg = SUBREG_REG (x);
2062 2707 : mode = wider_subreg_mode (x);
2063 2707 : if (mode == GET_MODE (subreg))
2064 : {
2065 1671 : x = subreg;
2066 1671 : code = GET_CODE (x);
2067 : }
2068 : }
2069 :
2070 406420 : if (REG_P (x) || SUBREG_P (x))
2071 : {
2072 265058 : x_hard_regno = get_hard_regno (x);
2073 265058 : return (x_hard_regno >= 0
2074 265058 : && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
2075 : }
2076 141362 : fmt = GET_RTX_FORMAT (code);
2077 366697 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2078 : {
2079 227337 : if (fmt[i] == 'e')
2080 : {
2081 109904 : if (uses_hard_regs_p (XEXP (x, i), set))
2082 : return true;
2083 : }
2084 117433 : else if (fmt[i] == 'E')
2085 : {
2086 4398 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2087 3980 : if (uses_hard_regs_p (XVECEXP (x, i, j), set))
2088 : return true;
2089 : }
2090 : }
2091 : return false;
2092 : }
2093 :
2094 : /* Return true if OP is a spilled pseudo. */
2095 : static inline bool
2096 80676160 : spilled_pseudo_p (rtx op)
2097 : {
2098 80676160 : return (REG_P (op)
2099 80676160 : && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
2100 : }
2101 :
2102 : /* Return true if X is a general constant. */
2103 : static inline bool
2104 7810130 : general_constant_p (rtx x)
2105 : {
2106 7810130 : return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
2107 : }
2108 :
2109 : static bool
2110 24757451 : reg_in_class_p (rtx reg, enum reg_class cl)
2111 : {
2112 24757451 : if (cl == NO_REGS)
2113 1105033 : return get_reg_class (REGNO (reg)) == NO_REGS;
2114 23652418 : return in_class_p (reg, cl, NULL);
2115 : }
2116 :
2117 : /* Return true if SET of RCLASS contains no hard regs which can be
2118 : used in MODE. */
2119 : static bool
2120 3857799 : prohibited_class_reg_set_mode_p (enum reg_class rclass,
2121 : HARD_REG_SET &set,
2122 : machine_mode mode)
2123 : {
2124 3857799 : HARD_REG_SET temp;
2125 :
2126 7715598 : lra_assert (hard_reg_set_subset_p (reg_class_contents[rclass], set));
2127 3857799 : temp = set & ~lra_no_alloc_regs;
2128 3857799 : return (hard_reg_set_subset_p
2129 3857799 : (temp, ira_prohibited_class_mode_regs[rclass][mode]));
2130 : }
2131 :
2132 :
2133 : /* Used to check validity info about small class input operands. It
2134 : should be incremented at start of processing an insn
2135 : alternative. */
2136 : static unsigned int curr_small_class_check = 0;
2137 :
2138 : /* Update number of used inputs of class OP_CLASS for operand NOP
2139 : of alternative NALT. Return true if we have more such class operands
2140 : than the number of available regs. */
2141 : static bool
2142 393445072 : update_and_check_small_class_inputs (int nop, int nalt,
2143 : enum reg_class op_class)
2144 : {
2145 393445072 : static unsigned int small_class_check[LIM_REG_CLASSES];
2146 393445072 : static int small_class_input_nums[LIM_REG_CLASSES];
2147 :
2148 390575057 : if (SMALL_REGISTER_CLASS_P (op_class)
2149 : /* We are interesting in classes became small because of fixing
2150 : some hard regs, e.g. by an user through GCC options. */
2151 2977404 : && hard_reg_set_intersect_p (reg_class_contents[op_class],
2152 2977404 : ira_no_alloc_regs)
2153 393445111 : && (curr_static_id->operand[nop].type != OP_OUT
2154 33 : || TEST_BIT (curr_static_id->operand[nop].early_clobber_alts, nalt)))
2155 : {
2156 6 : if (small_class_check[op_class] == curr_small_class_check)
2157 0 : small_class_input_nums[op_class]++;
2158 : else
2159 : {
2160 6 : small_class_check[op_class] = curr_small_class_check;
2161 6 : small_class_input_nums[op_class] = 1;
2162 : }
2163 6 : if (small_class_input_nums[op_class] > ira_class_hard_regs_num[op_class])
2164 : return true;
2165 : }
2166 : return false;
2167 : }
2168 :
2169 : /* Print operand constraints for alternative ALT_NUMBER of the current
2170 : insn. */
2171 : static void
2172 4590 : print_curr_insn_alt (int alt_number)
2173 : {
2174 15917 : for (int i = 0; i < curr_static_id->n_operands; i++)
2175 : {
2176 11327 : const char *p = (curr_static_id->operand_alternative
2177 11327 : [alt_number * curr_static_id->n_operands + i].constraint);
2178 11327 : if (*p == '\0')
2179 220 : continue;
2180 11107 : fprintf (lra_dump_file, " (%d) ", i);
2181 39435 : for (; *p != '\0' && *p != ',' && *p != '#'; p++)
2182 17221 : fputc (*p, lra_dump_file);
2183 : }
2184 4590 : }
2185 :
2186 : /* Major function to choose the current insn alternative and what
2187 : operands should be reloaded and how. If ONLY_ALTERNATIVE is not
2188 : negative we should consider only this alternative. Return false if
2189 : we cannot choose the alternative or find how to reload the
2190 : operands. */
2191 : static bool
2192 89633892 : process_alt_operands (int only_alternative)
2193 : {
2194 89633892 : bool ok_p = false;
2195 89633892 : int nop, overall, nalt;
2196 89633892 : int n_alternatives = curr_static_id->n_alternatives;
2197 89633892 : int n_operands = curr_static_id->n_operands;
2198 : /* LOSERS counts the operands that don't fit this alternative and
2199 : would require loading. */
2200 89633892 : int losers;
2201 89633892 : int addr_losers;
2202 : /* REJECT is a count of how undesirable this alternative says it is
2203 : if any reloading is required. If the alternative matches exactly
2204 : then REJECT is ignored, but otherwise it gets this much counted
2205 : against it in addition to the reloading needed. */
2206 89633892 : int reject;
2207 : /* This is defined by '!' or '?' alternative constraint and added to
2208 : reject. But in some cases it can be ignored. */
2209 89633892 : int static_reject;
2210 89633892 : int op_reject;
2211 : /* The number of elements in the following array. */
2212 89633892 : int early_clobbered_regs_num;
2213 : /* Numbers of operands which are early clobber registers. */
2214 89633892 : int early_clobbered_nops[MAX_RECOG_OPERANDS];
2215 89633892 : enum reg_class curr_alt[MAX_RECOG_OPERANDS];
2216 89633892 : enum reg_class all_this_alternative;
2217 89633892 : int all_used_nregs, all_reload_nregs;
2218 89633892 : HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
2219 89633892 : HARD_REG_SET curr_alt_exclude_start_hard_regs[MAX_RECOG_OPERANDS];
2220 89633892 : bool curr_alt_match_win[MAX_RECOG_OPERANDS];
2221 89633892 : bool curr_alt_win[MAX_RECOG_OPERANDS];
2222 89633892 : bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
2223 89633892 : int curr_alt_matches[MAX_RECOG_OPERANDS];
2224 : /* The number of elements in the following array. */
2225 89633892 : int curr_alt_dont_inherit_ops_num;
2226 : /* Numbers of operands whose reload pseudos should not be inherited. */
2227 89633892 : int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
2228 89633892 : bool curr_reuse_alt_p;
2229 : /* True if output stack pointer reload should be generated for the current
2230 : alternative. */
2231 89633892 : bool curr_alt_out_sp_reload_p;
2232 89633892 : bool curr_alt_class_change_p;
2233 89633892 : rtx op;
2234 : /* The register when the operand is a subreg of register, otherwise the
2235 : operand itself. */
2236 89633892 : rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
2237 : /* The register if the operand is a register or subreg of register,
2238 : otherwise NULL. */
2239 89633892 : rtx operand_reg[MAX_RECOG_OPERANDS];
2240 89633892 : int hard_regno[MAX_RECOG_OPERANDS];
2241 89633892 : machine_mode biggest_mode[MAX_RECOG_OPERANDS];
2242 89633892 : int reload_nregs, reload_sum;
2243 89633892 : bool costly_p;
2244 89633892 : enum reg_class cl;
2245 89633892 : const HARD_REG_SET *cl_filter;
2246 89633892 : HARD_REG_SET hard_reg_constraint;
2247 :
2248 : /* Calculate some data common for all alternatives to speed up the
2249 : function. */
2250 297965428 : for (nop = 0; nop < n_operands; nop++)
2251 : {
2252 208331536 : rtx reg;
2253 :
2254 208331536 : op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
2255 : /* The real hard regno of the operand after the allocation. */
2256 208331536 : hard_regno[nop] = get_hard_regno (op);
2257 :
2258 208331536 : operand_reg[nop] = reg = op;
2259 208331536 : biggest_mode[nop] = GET_MODE (op);
2260 208331536 : if (GET_CODE (op) == SUBREG)
2261 : {
2262 4069399 : biggest_mode[nop] = wider_subreg_mode (op);
2263 4069399 : operand_reg[nop] = reg = SUBREG_REG (op);
2264 : }
2265 208331536 : if (! REG_P (reg))
2266 88110303 : operand_reg[nop] = NULL_RTX;
2267 120221233 : else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
2268 141117333 : || ((int) REGNO (reg)
2269 20896100 : == lra_get_elimination_hard_regno (REGNO (reg))))
2270 117302269 : no_subreg_reg_operand[nop] = reg;
2271 : else
2272 2918964 : operand_reg[nop] = no_subreg_reg_operand[nop]
2273 : /* Just use natural mode for elimination result. It should
2274 : be enough for extra constraints hooks. */
2275 2918964 : = regno_reg_rtx[hard_regno[nop]];
2276 : }
2277 :
2278 : /* The constraints are made of several alternatives. Each operand's
2279 : constraint looks like foo,bar,... with commas separating the
2280 : alternatives. The first alternatives for all operands go
2281 : together, the second alternatives go together, etc.
2282 :
2283 : First loop over alternatives. */
2284 89633892 : alternative_mask preferred = curr_id->preferred_alternatives;
2285 89633892 : if (only_alternative >= 0)
2286 969661 : preferred &= ALTERNATIVE_BIT (only_alternative);
2287 :
2288 89633892 : bool prefer_memory_p = false;
2289 89633994 : repeat:
2290 368751587 : for (nalt = 0; nalt < n_alternatives; nalt++)
2291 : {
2292 : /* Loop over operands for one constraint alternative. */
2293 355033244 : if (!TEST_BIT (preferred, nalt))
2294 98248766 : continue;
2295 :
2296 256784478 : if (lra_dump_file != NULL)
2297 : {
2298 3403 : fprintf (lra_dump_file, " Considering alt=%d of insn %d: ",
2299 3403 : nalt, INSN_UID (curr_insn));
2300 3403 : print_curr_insn_alt (nalt);
2301 3403 : fprintf (lra_dump_file, "\n");
2302 : }
2303 :
2304 256784478 : bool matching_early_clobber[MAX_RECOG_OPERANDS];
2305 256784478 : curr_small_class_check++;
2306 256784478 : overall = losers = addr_losers = 0;
2307 256784478 : static_reject = reject = reload_nregs = reload_sum = 0;
2308 851551480 : for (nop = 0; nop < n_operands; nop++)
2309 : {
2310 594767002 : int inc = (curr_static_id
2311 594767002 : ->operand_alternative[nalt * n_operands + nop].reject);
2312 594767002 : if (lra_dump_file != NULL && inc != 0)
2313 53 : fprintf (lra_dump_file,
2314 : " Staticly defined alt reject+=%d\n", inc);
2315 594767002 : static_reject += inc;
2316 594767002 : matching_early_clobber[nop] = 0;
2317 : }
2318 : reject += static_reject;
2319 : early_clobbered_regs_num = 0;
2320 : curr_alt_out_sp_reload_p = false;
2321 : curr_reuse_alt_p = true;
2322 : curr_alt_class_change_p = false;
2323 : all_this_alternative = NO_REGS;
2324 : all_used_nregs = all_reload_nregs = 0;
2325 664797855 : for (nop = 0; nop < n_operands; nop++)
2326 : {
2327 530164865 : const char *p;
2328 530164865 : char *end;
2329 530164865 : int len, c, m, i, opalt_num, this_alternative_matches;
2330 530164865 : bool win, did_match, offmemok, early_clobber_p;
2331 : /* false => this operand can be reloaded somehow for this
2332 : alternative. */
2333 530164865 : bool badop;
2334 : /* true => this operand can be reloaded if the alternative
2335 : allows regs. */
2336 530164865 : bool winreg;
2337 : /* True if a constant forced into memory would be OK for
2338 : this operand. */
2339 530164865 : bool constmemok;
2340 530164865 : enum reg_class this_alternative, this_costly_alternative;
2341 530164865 : HARD_REG_SET this_alternative_set, this_costly_alternative_set;
2342 530164865 : HARD_REG_SET this_alternative_exclude_start_hard_regs;
2343 530164865 : bool this_alternative_match_win, this_alternative_win;
2344 530164865 : bool this_alternative_offmemok;
2345 530164865 : bool scratch_p;
2346 530164865 : machine_mode mode;
2347 530164865 : enum constraint_num cn;
2348 530164865 : bool class_change_p = false;
2349 :
2350 530164865 : opalt_num = nalt * n_operands + nop;
2351 530164865 : if (curr_static_id->operand_alternative[opalt_num].anything_ok)
2352 : {
2353 : /* Fast track for no constraints at all. */
2354 14568305 : curr_alt[nop] = NO_REGS;
2355 14568305 : CLEAR_HARD_REG_SET (curr_alt_set[nop]);
2356 14568305 : curr_alt_win[nop] = true;
2357 14568305 : curr_alt_match_win[nop] = false;
2358 14568305 : curr_alt_offmemok[nop] = false;
2359 14568305 : curr_alt_matches[nop] = -1;
2360 14568305 : continue;
2361 : }
2362 :
2363 515596560 : op = no_subreg_reg_operand[nop];
2364 515596560 : mode = curr_operand_mode[nop];
2365 :
2366 515596560 : win = did_match = winreg = offmemok = constmemok = false;
2367 515596560 : badop = true;
2368 :
2369 515596560 : early_clobber_p = false;
2370 515596560 : p = curr_static_id->operand_alternative[opalt_num].constraint;
2371 :
2372 515596560 : this_costly_alternative = this_alternative = NO_REGS;
2373 : /* We update set of possible hard regs besides its class
2374 : because reg class might be inaccurate. For example,
2375 : union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
2376 : is translated in HI_REGS because classes are merged by
2377 : pairs and there is no accurate intermediate class. */
2378 2062386240 : CLEAR_HARD_REG_SET (this_alternative_set);
2379 1546789680 : CLEAR_HARD_REG_SET (this_costly_alternative_set);
2380 515596560 : CLEAR_HARD_REG_SET (this_alternative_exclude_start_hard_regs);
2381 515596560 : this_alternative_win = false;
2382 515596560 : this_alternative_match_win = false;
2383 515596560 : this_alternative_offmemok = false;
2384 515596560 : this_alternative_matches = -1;
2385 :
2386 : /* An empty constraint should be excluded by the fast
2387 : track. */
2388 515596560 : lra_assert (*p != 0 && *p != ',');
2389 :
2390 : op_reject = 0;
2391 : /* Scan this alternative's specs for this operand; set WIN
2392 : if the operand fits any letter in this alternative.
2393 : Otherwise, clear BADOP if this operand could fit some
2394 : letter after reloads, or set WINREG if this operand could
2395 : fit after reloads provided the constraint allows some
2396 : registers. */
2397 : costly_p = false;
2398 1309325258 : do
2399 : {
2400 1309325258 : switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
2401 : {
2402 : case '\0':
2403 : len = 0;
2404 : break;
2405 492752189 : case ',':
2406 492752189 : c = '\0';
2407 492752189 : break;
2408 :
2409 176037 : case '&':
2410 176037 : early_clobber_p = true;
2411 176037 : break;
2412 :
2413 21771 : case '$':
2414 21771 : op_reject += LRA_MAX_REJECT;
2415 21771 : break;
2416 0 : case '^':
2417 0 : op_reject += LRA_LOSER_COST_FACTOR;
2418 0 : break;
2419 :
2420 0 : case '#':
2421 : /* Ignore rest of this alternative. */
2422 0 : c = '\0';
2423 0 : break;
2424 :
2425 56211127 : case '0': case '1': case '2': case '3': case '4':
2426 56211127 : case '5': case '6': case '7': case '8': case '9':
2427 56211127 : {
2428 56211127 : int m_hregno;
2429 56211127 : bool match_p;
2430 :
2431 56211127 : m = strtoul (p, &end, 10);
2432 56211127 : p = end;
2433 56211127 : len = 0;
2434 56211127 : lra_assert (nop > m);
2435 :
2436 : /* Reject matches if we don't know which operand is
2437 : bigger. This situation would arguably be a bug in
2438 : an .md pattern, but could also occur in a user asm. */
2439 168633381 : if (!ordered_p (GET_MODE_SIZE (biggest_mode[m]),
2440 56211127 : GET_MODE_SIZE (biggest_mode[nop])))
2441 : break;
2442 :
2443 : /* Don't match wrong asm insn operands for proper
2444 : diagnostic later. */
2445 56211127 : if (INSN_CODE (curr_insn) < 0
2446 32561 : && (curr_operand_mode[m] == BLKmode
2447 32560 : || curr_operand_mode[nop] == BLKmode)
2448 1 : && curr_operand_mode[m] != curr_operand_mode[nop])
2449 : break;
2450 :
2451 56211126 : m_hregno = get_hard_regno (*curr_id->operand_loc[m]);
2452 : /* We are supposed to match a previous operand.
2453 : If we do, we win if that one did. If we do
2454 : not, count both of the operands as losers.
2455 : (This is too conservative, since most of the
2456 : time only a single reload insn will be needed
2457 : to make the two operands win. As a result,
2458 : this alternative may be rejected when it is
2459 : actually desirable.) */
2460 56211126 : match_p = false;
2461 56211126 : if (operands_match_p (*curr_id->operand_loc[nop],
2462 56211126 : *curr_id->operand_loc[m], m_hregno))
2463 : {
2464 : /* We should reject matching of an early
2465 : clobber operand if the matching operand is
2466 : not dying in the insn. */
2467 14825351 : if (!TEST_BIT (curr_static_id->operand[m]
2468 : .early_clobber_alts, nalt)
2469 18786 : || operand_reg[nop] == NULL_RTX
2470 14844137 : || (find_regno_note (curr_insn, REG_DEAD,
2471 : REGNO (op))
2472 4654 : || REGNO (op) == REGNO (operand_reg[m])))
2473 14825351 : match_p = true;
2474 : }
2475 14825351 : if (match_p)
2476 : {
2477 : /* If we are matching a non-offsettable
2478 : address where an offsettable address was
2479 : expected, then we must reject this
2480 : combination, because we can't reload
2481 : it. */
2482 14825351 : if (curr_alt_offmemok[m]
2483 1483 : && MEM_P (*curr_id->operand_loc[m])
2484 0 : && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
2485 0 : continue;
2486 : }
2487 : else
2488 : {
2489 : /* If the operands do not match and one
2490 : operand is INOUT, we can not match them.
2491 : Try other possibilities, e.g. other
2492 : alternatives or commutative operand
2493 : exchange. */
2494 41385775 : if (curr_static_id->operand[nop].type == OP_INOUT
2495 41385775 : || curr_static_id->operand[m].type == OP_INOUT)
2496 : break;
2497 : /* Operands don't match. For asm if the operands
2498 : are different user defined explicit hard
2499 : registers, then we cannot make them match
2500 : when one is early clobber operand. */
2501 41385360 : if ((REG_P (*curr_id->operand_loc[nop])
2502 25976617 : || SUBREG_P (*curr_id->operand_loc[nop]))
2503 15999821 : && (REG_P (*curr_id->operand_loc[m])
2504 204866 : || SUBREG_P (*curr_id->operand_loc[m]))
2505 15902973 : && INSN_CODE (curr_insn) < 0)
2506 : {
2507 590 : rtx nop_reg = *curr_id->operand_loc[nop];
2508 590 : if (SUBREG_P (nop_reg))
2509 0 : nop_reg = SUBREG_REG (nop_reg);
2510 590 : rtx m_reg = *curr_id->operand_loc[m];
2511 590 : if (SUBREG_P (m_reg))
2512 0 : m_reg = SUBREG_REG (m_reg);
2513 :
2514 590 : if (REG_P (nop_reg)
2515 590 : && HARD_REGISTER_P (nop_reg)
2516 0 : && REG_USERVAR_P (nop_reg)
2517 0 : && REG_P (m_reg)
2518 0 : && HARD_REGISTER_P (m_reg)
2519 590 : && REG_USERVAR_P (m_reg))
2520 : {
2521 : int i;
2522 :
2523 0 : for (i = 0; i < early_clobbered_regs_num; i++)
2524 0 : if (m == early_clobbered_nops[i])
2525 : break;
2526 0 : if (i < early_clobbered_regs_num
2527 0 : || early_clobber_p)
2528 : break;
2529 : }
2530 : }
2531 : /* Both operands must allow a reload register,
2532 : otherwise we cannot make them match. */
2533 41385360 : if (curr_alt[m] == NO_REGS)
2534 : break;
2535 : /* Retroactively mark the operand we had to
2536 : match as a loser, if it wasn't already and
2537 : it wasn't matched to a register constraint
2538 : (e.g it might be matched by memory). */
2539 41360434 : if (curr_alt_win[m]
2540 40519093 : && (operand_reg[m] == NULL_RTX
2541 40015692 : || hard_regno[m] < 0))
2542 : {
2543 1266569 : if (lra_dump_file != NULL)
2544 9 : fprintf
2545 9 : (lra_dump_file,
2546 : " %d Matched operand reload: "
2547 : "losers++\n", m);
2548 1266569 : losers++;
2549 1266569 : reload_nregs
2550 1266569 : += (ira_reg_class_max_nregs[curr_alt[m]]
2551 1266569 : [GET_MODE (*curr_id->operand_loc[m])]);
2552 : }
2553 :
2554 : /* Prefer matching earlyclobber alternative as
2555 : it results in less hard regs required for
2556 : the insn than a non-matching earlyclobber
2557 : alternative. */
2558 41360434 : if (TEST_BIT (curr_static_id->operand[m]
2559 : .early_clobber_alts, nalt))
2560 : {
2561 18099 : if (lra_dump_file != NULL)
2562 0 : fprintf
2563 0 : (lra_dump_file,
2564 : " %d Matching earlyclobber alt:"
2565 : " reject--\n",
2566 : nop);
2567 18099 : if (!matching_early_clobber[m])
2568 : {
2569 18099 : reject--;
2570 18099 : matching_early_clobber[m] = 1;
2571 : }
2572 : }
2573 : /* Otherwise we prefer no matching
2574 : alternatives because it gives more freedom
2575 : in RA. */
2576 41342335 : else if (operand_reg[nop] == NULL_RTX
2577 41342335 : || (find_regno_note (curr_insn, REG_DEAD,
2578 15973781 : REGNO (operand_reg[nop]))
2579 : == NULL_RTX))
2580 : {
2581 36419496 : if (lra_dump_file != NULL)
2582 912 : fprintf
2583 912 : (lra_dump_file,
2584 : " %d Matching alt: reject+=2\n",
2585 : nop);
2586 36419496 : reject += 2;
2587 : }
2588 : }
2589 : /* If we have to reload this operand and some
2590 : previous operand also had to match the same
2591 : thing as this operand, we don't know how to do
2592 : that. */
2593 56185785 : if (!match_p || !curr_alt_win[m])
2594 : {
2595 86866773 : for (i = 0; i < nop; i++)
2596 45399656 : if (curr_alt_matches[i] == m)
2597 : break;
2598 41467118 : if (i < nop)
2599 : break;
2600 : }
2601 : else
2602 : did_match = true;
2603 :
2604 56185784 : this_alternative_matches = m;
2605 : /* This can be fixed with reloads if the operand
2606 : we are supposed to match can be fixed with
2607 : reloads. */
2608 56185784 : badop = false;
2609 56185784 : this_alternative = curr_alt[m];
2610 56185784 : this_alternative_set = curr_alt_set[m];
2611 56185784 : this_alternative_exclude_start_hard_regs
2612 56185784 : = curr_alt_exclude_start_hard_regs[m];
2613 56185784 : winreg = this_alternative != NO_REGS;
2614 56185784 : break;
2615 : }
2616 :
2617 11656795 : case 'g':
2618 11656795 : if (MEM_P (op)
2619 7810130 : || general_constant_p (op)
2620 16207147 : || spilled_pseudo_p (op))
2621 : win = true;
2622 11656795 : if (REG_P (op) && prefer_memory_p)
2623 : {
2624 11656795 : badop = false;
2625 11656795 : offmemok = true;
2626 : }
2627 11656795 : cl = GENERAL_REGS;
2628 11656795 : cl_filter = nullptr;
2629 11656795 : goto reg;
2630 :
2631 1140 : case '{':
2632 1140 : {
2633 1140 : int regno = decode_hard_reg_constraint (p);
2634 1140 : gcc_assert (regno >= 0);
2635 1140 : cl = NO_REGS;
2636 1140 : int nregs = hard_regno_nregs (regno, mode);
2637 2280 : for (int i = 0; i < nregs; ++i)
2638 1140 : cl = reg_class_superunion[cl][REGNO_REG_CLASS (regno + i)];
2639 1140 : CLEAR_HARD_REG_SET (hard_reg_constraint);
2640 1140 : SET_HARD_REG_BIT (hard_reg_constraint, regno);
2641 1140 : cl_filter = &hard_reg_constraint;
2642 1140 : goto reg;
2643 : }
2644 :
2645 725661828 : default:
2646 725661828 : cn = lookup_constraint (p);
2647 725661828 : switch (get_constraint_type (cn))
2648 : {
2649 480118698 : case CT_REGISTER:
2650 480118698 : cl = reg_class_for_constraint (cn);
2651 353360294 : if (cl != NO_REGS)
2652 : {
2653 343915331 : cl_filter = get_register_filter (cn);
2654 343915331 : goto reg;
2655 : }
2656 : break;
2657 :
2658 2064002 : case CT_CONST_INT:
2659 2064002 : if (CONST_INT_P (op)
2660 2064002 : && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2661 : win = true;
2662 : break;
2663 :
2664 110657317 : case CT_MEMORY:
2665 110657317 : case CT_RELAXED_MEMORY:
2666 110657317 : if (MEM_P (op)
2667 110657317 : && satisfies_memory_constraint_p (op, cn))
2668 : win = true;
2669 75097837 : else if (spilled_pseudo_p (op))
2670 45041967 : win = true;
2671 :
2672 : /* If we didn't already win, we can reload constants
2673 : via force_const_mem or put the pseudo value into
2674 : memory, or make other memory by reloading the
2675 : address like for 'o'. */
2676 115767380 : if (CONST_POOL_OK_P (mode, op)
2677 105547096 : || MEM_P (op) || REG_P (op)
2678 : /* We can restore the equiv insn by a
2679 : reload. */
2680 111230933 : || equiv_substition_p[nop])
2681 110622604 : badop = false;
2682 : constmemok = true;
2683 : offmemok = true;
2684 : break;
2685 :
2686 1686064 : case CT_ADDRESS:
2687 : /* An asm operand with an address constraint
2688 : that doesn't satisfy address_operand has
2689 : is_address cleared, so that we don't try to
2690 : make a non-address fit. */
2691 1686064 : if (!curr_static_id->operand[nop].is_address)
2692 : break;
2693 : /* If we didn't already win, we can reload the address
2694 : into a base register. */
2695 1686045 : if (satisfies_address_constraint_p (op, cn))
2696 1686045 : win = true;
2697 1686045 : cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2698 : ADDRESS, SCRATCH);
2699 1686045 : cl_filter = nullptr;
2700 1686045 : badop = false;
2701 1686045 : goto reg;
2702 :
2703 129935071 : case CT_FIXED_FORM:
2704 129935071 : if (constraint_satisfied_p (op, cn))
2705 1309325258 : win = true;
2706 : break;
2707 :
2708 1200676 : case CT_SPECIAL_MEMORY:
2709 1200676 : if (satisfies_memory_constraint_p (op, cn))
2710 : win = true;
2711 1027971 : else if (spilled_pseudo_p (op))
2712 : {
2713 1309325258 : curr_reuse_alt_p = false;
2714 1309325258 : win = true;
2715 : }
2716 : break;
2717 : }
2718 : break;
2719 :
2720 357259311 : reg:
2721 357259311 : if (mode == BLKmode)
2722 : break;
2723 357259293 : this_alternative = reg_class_subunion[this_alternative][cl];
2724 357259293 : if (hard_reg_set_subset_p (this_alternative_set,
2725 357259293 : reg_class_contents[cl]))
2726 357255668 : this_alternative_exclude_start_hard_regs
2727 357255668 : = ira_exclude_class_mode_regs[cl][mode];
2728 3625 : else if (!hard_reg_set_subset_p (reg_class_contents[cl],
2729 : this_alternative_set))
2730 3624 : this_alternative_exclude_start_hard_regs
2731 1071781503 : |= ira_exclude_class_mode_regs[cl][mode];
2732 357259293 : this_alternative_set |= reg_class_contents[cl];
2733 357259293 : if (cl_filter)
2734 2280 : this_alternative_exclude_start_hard_regs |= ~*cl_filter;
2735 357259293 : if (costly_p)
2736 : {
2737 21158574 : this_costly_alternative
2738 21158574 : = reg_class_subunion[this_costly_alternative][cl];
2739 21158574 : this_costly_alternative_set |= reg_class_contents[cl];
2740 : }
2741 357259293 : winreg = true;
2742 357259293 : if (REG_P (op))
2743 : {
2744 227447418 : tree decl;
2745 227447418 : if (hard_regno[nop] >= 0
2746 191628208 : && in_hard_reg_set_p (this_alternative_set,
2747 : mode, hard_regno[nop])
2748 173609464 : && (!cl_filter
2749 590 : || TEST_HARD_REG_BIT (*cl_filter,
2750 : hard_regno[nop]))
2751 401056876 : && ((REG_ATTRS (op) && (decl = REG_EXPR (op)) != NULL
2752 97073030 : && VAR_P (decl) && DECL_HARD_REGISTER (decl))
2753 173606201 : || !(TEST_HARD_REG_BIT
2754 173606201 : (this_alternative_exclude_start_hard_regs,
2755 : hard_regno[nop]))))
2756 : win = true;
2757 53837990 : else if (hard_regno[nop] < 0 && !prefer_memory_p)
2758 : {
2759 35819074 : if (in_class_p (op, this_alternative, NULL))
2760 : win = true;
2761 26923774 : else if (in_class_p (op, this_alternative, NULL, true))
2762 : {
2763 1309325258 : class_change_p = true;
2764 1309325258 : win = true;
2765 : }
2766 : }
2767 : }
2768 : break;
2769 : }
2770 1309325258 : if (c != ' ' && c != '\t')
2771 1309325258 : costly_p = c == '*';
2772 : }
2773 1309325258 : while ((p += len), c);
2774 :
2775 1031193120 : scratch_p = (operand_reg[nop] != NULL_RTX
2776 515596560 : && ira_former_scratch_p (REGNO (operand_reg[nop])));
2777 : /* Record which operands fit this alternative. */
2778 515596560 : if (win)
2779 : {
2780 277668326 : if (early_clobber_p
2781 277525594 : || curr_static_id->operand[nop].type != OP_OUT)
2782 : {
2783 121453029 : if (winreg)
2784 100406261 : all_used_nregs
2785 100406261 : += ira_reg_class_min_nregs[this_alternative][mode];
2786 121453029 : all_this_alternative
2787 121453029 : = (reg_class_subunion
2788 121453029 : [all_this_alternative][this_alternative]);
2789 : }
2790 277668326 : this_alternative_win = true;
2791 277668326 : if (class_change_p)
2792 : {
2793 267276 : curr_alt_class_change_p = true;
2794 267276 : if (lra_dump_file != NULL)
2795 10 : fprintf (lra_dump_file,
2796 : " %d Narrowing class: reject+=3\n",
2797 : nop);
2798 267276 : reject += 3;
2799 : }
2800 277668326 : if (operand_reg[nop] != NULL_RTX)
2801 : {
2802 193669565 : if (hard_regno[nop] >= 0)
2803 : {
2804 173553200 : if (in_hard_reg_set_p (this_costly_alternative_set,
2805 : mode, hard_regno[nop]))
2806 : {
2807 776044 : if (lra_dump_file != NULL)
2808 21 : fprintf (lra_dump_file,
2809 : " %d Costly set: reject++\n",
2810 : nop);
2811 776044 : reject++;
2812 : }
2813 : }
2814 : else
2815 : {
2816 : /* Prefer won reg to spilled pseudo under other
2817 : equal conditions for possibe inheritance. */
2818 20116365 : if (! scratch_p)
2819 : {
2820 20111770 : if (lra_dump_file != NULL)
2821 59 : fprintf
2822 59 : (lra_dump_file,
2823 : " %d Non pseudo reload: reject++\n",
2824 : nop);
2825 20111770 : reject++;
2826 : }
2827 20116365 : if (in_class_p (operand_reg[nop],
2828 : this_costly_alternative, NULL, true))
2829 : {
2830 134295 : if (lra_dump_file != NULL)
2831 0 : fprintf
2832 0 : (lra_dump_file,
2833 : " %d Non pseudo costly reload:"
2834 : " reject++\n",
2835 : nop);
2836 134295 : reject++;
2837 : }
2838 : }
2839 : /* We simulate the behavior of old reload here.
2840 : Although scratches need hard registers and it
2841 : might result in spilling other pseudos, no reload
2842 : insns are generated for the scratches. So it
2843 : might cost something but probably less than old
2844 : reload pass believes. */
2845 193669565 : if (scratch_p)
2846 : {
2847 117053 : if (lra_dump_file != NULL)
2848 6 : fprintf (lra_dump_file,
2849 : " %d Scratch win: reject+=2\n",
2850 : nop);
2851 117053 : reject += 2;
2852 : }
2853 : }
2854 : }
2855 237928234 : else if (did_match)
2856 : this_alternative_match_win = true;
2857 : else
2858 : {
2859 223209567 : if (prefer_memory_p && offmemok)
2860 : {
2861 0 : winreg = false;
2862 0 : this_alternative = NO_REGS;
2863 : }
2864 :
2865 223209567 : int const_to_mem = 0;
2866 223209567 : bool no_regs_p;
2867 :
2868 223209567 : reject += op_reject;
2869 : /* Mark output reload of the stack pointer. */
2870 223209567 : if (op == stack_pointer_rtx
2871 56993 : && curr_static_id->operand[nop].type != OP_IN)
2872 223209567 : curr_alt_out_sp_reload_p = true;
2873 :
2874 : /* If this alternative asks for a specific reg class, see if there
2875 : is at least one allocatable register in that class. */
2876 223209567 : no_regs_p
2877 389237278 : = (this_alternative == NO_REGS
2878 223209567 : || (hard_reg_set_subset_p
2879 332055444 : (reg_class_contents[this_alternative],
2880 : lra_no_alloc_regs)));
2881 :
2882 : /* For asms, verify that the class for this alternative is possible
2883 : for the mode that is specified. */
2884 166027711 : if (!no_regs_p && INSN_CODE (curr_insn) < 0)
2885 : {
2886 : int i;
2887 69340 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2888 69338 : if (targetm.hard_regno_mode_ok (i, mode)
2889 69338 : && in_hard_reg_set_p (reg_class_contents[this_alternative],
2890 : mode, i))
2891 : break;
2892 20256 : if (i == FIRST_PSEUDO_REGISTER)
2893 223209567 : winreg = false;
2894 : }
2895 :
2896 : /* If this operand accepts a register, and if the
2897 : register class has at least one allocatable register,
2898 : then this operand can be reloaded. */
2899 223209567 : if (winreg && !no_regs_p)
2900 : badop = false;
2901 :
2902 57181858 : if (badop)
2903 : {
2904 47849593 : if (lra_dump_file != NULL)
2905 606 : fprintf (lra_dump_file,
2906 : " Bad operand -- refuse\n");
2907 122151488 : goto fail;
2908 : }
2909 :
2910 175359974 : if (this_alternative != NO_REGS)
2911 : {
2912 166027710 : HARD_REG_SET available_regs
2913 166027710 : = (reg_class_contents[this_alternative]
2914 166027710 : & ~((ira_prohibited_class_mode_regs
2915 166027710 : [this_alternative][mode])
2916 166027710 : | lra_no_alloc_regs));
2917 332055420 : if (!hard_reg_set_empty_p (available_regs))
2918 : {
2919 166026277 : if (early_clobber_p
2920 165992972 : || curr_static_id->operand[nop].type != OP_OUT)
2921 : {
2922 87063891 : all_reload_nregs
2923 87063891 : += ira_reg_class_min_nregs[this_alternative][mode];
2924 87063891 : all_this_alternative
2925 87063891 : = (reg_class_subunion
2926 87063891 : [all_this_alternative][this_alternative]);
2927 : }
2928 : }
2929 : else
2930 : {
2931 : /* There are no hard regs holding a value of given
2932 : mode. */
2933 1433 : if (offmemok)
2934 : {
2935 170 : this_alternative = NO_REGS;
2936 170 : if (lra_dump_file != NULL)
2937 0 : fprintf (lra_dump_file,
2938 : " %d Using memory because of"
2939 : " a bad mode: reject+=2\n",
2940 : nop);
2941 170 : reject += 2;
2942 : }
2943 : else
2944 : {
2945 1263 : if (lra_dump_file != NULL)
2946 0 : fprintf (lra_dump_file,
2947 : " Wrong mode -- refuse\n");
2948 1263 : goto fail;
2949 : }
2950 : }
2951 : }
2952 :
2953 : /* If not assigned pseudo has a class which a subset of
2954 : required reg class, it is a less costly alternative
2955 : as the pseudo still can get a hard reg of necessary
2956 : class. */
2957 166026447 : if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
2958 21531481 : && (cl = get_reg_class (REGNO (op))) != NO_REGS
2959 178440697 : && ira_class_subset_p[this_alternative][cl])
2960 : {
2961 1059 : if (lra_dump_file != NULL)
2962 0 : fprintf
2963 0 : (lra_dump_file,
2964 : " %d Super set class reg: reject-=3\n", nop);
2965 1059 : reject -= 3;
2966 : }
2967 :
2968 175358711 : this_alternative_offmemok = offmemok;
2969 175358711 : if (this_costly_alternative != NO_REGS)
2970 : {
2971 18993956 : if (lra_dump_file != NULL)
2972 25 : fprintf (lra_dump_file,
2973 : " %d Costly loser: reject++\n", nop);
2974 18993956 : reject++;
2975 : }
2976 : /* If the operand is dying, has a matching constraint,
2977 : and satisfies constraints of the matched operand
2978 : which failed to satisfy the own constraints, most probably
2979 : the reload for this operand will be gone. */
2980 175358711 : if (this_alternative_matches >= 0
2981 41449767 : && !curr_alt_win[this_alternative_matches]
2982 947551 : && REG_P (op)
2983 692154 : && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
2984 176069699 : && (hard_regno[nop] >= 0
2985 373583 : ? in_hard_reg_set_p (this_alternative_set,
2986 : mode, hard_regno[nop])
2987 36178 : : in_class_p (op, this_alternative, NULL)))
2988 : {
2989 224534 : if (lra_dump_file != NULL)
2990 1 : fprintf
2991 1 : (lra_dump_file,
2992 : " %d Dying matched operand reload: reject++\n",
2993 : nop);
2994 224534 : reject++;
2995 : }
2996 : else
2997 : {
2998 : /* Strict_low_part requires to reload the register
2999 : not the sub-register. In this case we should
3000 : check that a final reload hard reg can hold the
3001 : value mode. */
3002 175134177 : if (curr_static_id->operand[nop].strict_low
3003 101 : && REG_P (op)
3004 94 : && hard_regno[nop] < 0
3005 68 : && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
3006 68 : && ira_class_hard_regs_num[this_alternative] > 0
3007 175134245 : && (!targetm.hard_regno_mode_ok
3008 68 : (ira_class_hard_regs[this_alternative][0],
3009 68 : GET_MODE (*curr_id->operand_loc[nop]))))
3010 : {
3011 0 : if (lra_dump_file != NULL)
3012 0 : fprintf
3013 0 : (lra_dump_file,
3014 : " Strict low subreg reload -- refuse\n");
3015 0 : goto fail;
3016 : }
3017 175134177 : if (lra_dump_file != NULL)
3018 2177 : fprintf
3019 2177 : (lra_dump_file,
3020 : " %d Operand reload: losers++\n", nop);
3021 175134177 : losers++;
3022 : }
3023 175358711 : if (operand_reg[nop] != NULL_RTX
3024 : /* Output operands and matched input operands are
3025 : not inherited. The following conditions do not
3026 : exactly describe the previous statement but they
3027 : are pretty close. */
3028 62897059 : && curr_static_id->operand[nop].type != OP_OUT
3029 27679205 : && (this_alternative_matches < 0
3030 16064521 : || curr_static_id->operand[nop].type != OP_IN))
3031 : {
3032 11614684 : int last_reload = (lra_reg_info[ORIGINAL_REGNO
3033 11614684 : (operand_reg[nop])]
3034 11614684 : .last_reload);
3035 :
3036 : /* The value of reload_sum has sense only if we
3037 : process insns in their order. It happens only on
3038 : the first constraints sub-pass when we do most of
3039 : reload work. */
3040 11614684 : if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
3041 2517276 : reload_sum += last_reload - bb_reload_num;
3042 : }
3043 : /* If this is a constant that is reloaded into the
3044 : desired class by copying it to memory first, count
3045 : that as another reload. This is consistent with
3046 : other code and is required to avoid choosing another
3047 : alternative when the constant is moved into memory.
3048 : Note that the test here is precisely the same as in
3049 : the code below that calls force_const_mem. */
3050 225541554 : if (CONST_POOL_OK_P (mode, op)
3051 225541619 : && ((targetm.preferred_reload_class
3052 50182908 : (op, this_alternative) == NO_REGS)
3053 48631298 : || no_input_reloads_p))
3054 : {
3055 1551610 : const_to_mem = 1;
3056 1551610 : if (! no_regs_p)
3057 : {
3058 709134 : if (lra_dump_file != NULL)
3059 0 : fprintf
3060 0 : (lra_dump_file,
3061 : " %d Constant reload through memory: "
3062 : "losers++\n", nop);
3063 709134 : losers++;
3064 : }
3065 : }
3066 :
3067 : /* Alternative loses if it requires a type of reload not
3068 : permitted for this insn. We can always reload
3069 : objects with a REG_UNUSED note. */
3070 175358711 : if ((curr_static_id->operand[nop].type != OP_IN
3071 85087153 : && no_output_reloads_p
3072 0 : && ! find_reg_note (curr_insn, REG_UNUSED, op)
3073 0 : && ! scratch_p)
3074 175358711 : || (curr_static_id->operand[nop].type != OP_OUT
3075 90271752 : && no_input_reloads_p && ! const_to_mem)
3076 350717422 : || (this_alternative_matches >= 0
3077 41449767 : && (no_input_reloads_p
3078 41449767 : || (no_output_reloads_p
3079 0 : && (curr_static_id->operand
3080 0 : [this_alternative_matches].type != OP_IN)
3081 0 : && ! find_reg_note (curr_insn, REG_UNUSED,
3082 : no_subreg_reg_operand
3083 0 : [this_alternative_matches])
3084 0 : && ! scratch_p))))
3085 : {
3086 0 : if (lra_dump_file != NULL)
3087 0 : fprintf
3088 0 : (lra_dump_file,
3089 : " No input/output reload -- refuse\n");
3090 0 : goto fail;
3091 : }
3092 :
3093 : /* Alternative loses if it required class pseudo cannot
3094 : hold value of required mode. Such insns can be
3095 : described by insn definitions with mode iterators. */
3096 175358711 : if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
3097 126567745 : && ! hard_reg_set_empty_p (this_alternative_set)
3098 : /* It is common practice for constraints to use a
3099 : class which does not have actually enough regs to
3100 : hold the value (e.g. x86 AREG for mode requiring
3101 : more one general reg). Therefore we have 2
3102 : conditions to check that the reload pseudo cannot
3103 : hold the mode value. */
3104 117977435 : && (!targetm.hard_regno_mode_ok
3105 117977435 : (ira_class_hard_regs[this_alternative][0],
3106 : GET_MODE (*curr_id->operand_loc[nop])))
3107 : /* The above condition is not enough as the first
3108 : reg in ira_class_hard_regs can be not aligned for
3109 : multi-words mode values. */
3110 175358711 : && (prohibited_class_reg_set_mode_p
3111 0 : (this_alternative, this_alternative_set,
3112 0 : GET_MODE (*curr_id->operand_loc[nop]))))
3113 : {
3114 0 : if (lra_dump_file != NULL)
3115 0 : fprintf (lra_dump_file,
3116 : " reload pseudo for op %d "
3117 : "cannot hold the mode value -- refuse\n",
3118 : nop);
3119 0 : goto fail;
3120 : }
3121 :
3122 : /* Check strong discouragement of reload of non-constant
3123 : into class THIS_ALTERNATIVE. */
3124 125175803 : if (! CONSTANT_P (op) && ! no_regs_p
3125 292044726 : && (targetm.preferred_reload_class
3126 116686015 : (op, this_alternative) == NO_REGS
3127 108220039 : || (curr_static_id->operand[nop].type == OP_OUT
3128 74549757 : && (targetm.preferred_output_reload_class
3129 74549757 : (op, this_alternative) == NO_REGS))))
3130 : {
3131 13001085 : if (offmemok && REG_P (op))
3132 : {
3133 792061 : if (lra_dump_file != NULL)
3134 0 : fprintf
3135 0 : (lra_dump_file,
3136 : " %d Spill pseudo into memory: reject+=3\n",
3137 : nop);
3138 792061 : reject += 3;
3139 : }
3140 : else
3141 : {
3142 12209024 : if (lra_dump_file != NULL)
3143 0 : fprintf
3144 0 : (lra_dump_file,
3145 : " %d Non-prefered reload: reject+=%d\n",
3146 : nop, LRA_MAX_REJECT);
3147 12209024 : reject += LRA_MAX_REJECT;
3148 : }
3149 : }
3150 :
3151 175358711 : if (! (MEM_P (op) && offmemok)
3152 175358639 : && ! (const_to_mem && constmemok))
3153 : {
3154 : /* We prefer to reload pseudos over reloading other
3155 : things, since such reloads may be able to be
3156 : eliminated later. So bump REJECT in other cases.
3157 : Don't do this in the case where we are forcing a
3158 : constant into memory and it will then win since
3159 : we don't want to have a different alternative
3160 : match then. */
3161 174399069 : if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
3162 : {
3163 124548382 : if (lra_dump_file != NULL)
3164 1648 : fprintf
3165 1648 : (lra_dump_file,
3166 : " %d Non-pseudo reload: reject+=2\n",
3167 : nop);
3168 124548382 : reject += 2;
3169 : }
3170 :
3171 174399069 : if (! no_regs_p)
3172 165909325 : reload_nregs
3173 165909325 : += ira_reg_class_max_nregs[this_alternative][mode];
3174 :
3175 174399069 : if (SMALL_REGISTER_CLASS_P (this_alternative))
3176 : {
3177 875417 : if (lra_dump_file != NULL)
3178 45 : fprintf
3179 45 : (lra_dump_file,
3180 : " %d Small class reload: reject+=%d\n",
3181 : nop, LRA_LOSER_COST_FACTOR / 2);
3182 875417 : reject += LRA_LOSER_COST_FACTOR / 2;
3183 : }
3184 : }
3185 :
3186 : /* We are trying to spill pseudo into memory. It is
3187 : usually more costly than moving to a hard register
3188 : although it might takes the same number of
3189 : reloads.
3190 :
3191 : Non-pseudo spill may happen also. Suppose a target allows both
3192 : register and memory in the operand constraint alternatives,
3193 : then it's typical that an eliminable register has a substition
3194 : of "base + offset" which can either be reloaded by a simple
3195 : "new_reg <= base + offset" which will match the register
3196 : constraint, or a similar reg addition followed by further spill
3197 : to and reload from memory which will match the memory
3198 : constraint, but this memory spill will be much more costly
3199 : usually.
3200 :
3201 : Code below increases the reject for both pseudo and non-pseudo
3202 : spill. */
3203 175358711 : if (no_regs_p
3204 9332264 : && !(MEM_P (op) && offmemok)
3205 9332220 : && !(REG_P (op) && hard_regno[nop] < 0))
3206 : {
3207 8220050 : if (lra_dump_file != NULL)
3208 13 : fprintf
3209 20 : (lra_dump_file,
3210 : " %d Spill %spseudo into memory: reject+=3\n",
3211 : nop, REG_P (op) ? "" : "Non-");
3212 8220050 : reject += 3;
3213 8220050 : if (VECTOR_MODE_P (mode))
3214 : {
3215 : /* Spilling vectors into memory is usually more
3216 : costly as they contain big values. */
3217 361687 : if (lra_dump_file != NULL)
3218 0 : fprintf
3219 0 : (lra_dump_file,
3220 : " %d Spill vector pseudo: reject+=2\n",
3221 : nop);
3222 361687 : reject += 2;
3223 : }
3224 : }
3225 :
3226 : /* When we use an operand requiring memory in given
3227 : alternative, the insn should write *and* read the
3228 : value to/from memory it is costly in comparison with
3229 : an insn alternative which does not use memory
3230 : (e.g. register or immediate operand). We exclude
3231 : memory operand for such case as we can satisfy the
3232 : memory constraints by reloading address. */
3233 9332264 : if (no_regs_p && offmemok && !MEM_P (op))
3234 : {
3235 9332068 : if (lra_dump_file != NULL)
3236 27 : fprintf
3237 27 : (lra_dump_file,
3238 : " Using memory insn operand %d: reject+=3\n",
3239 : nop);
3240 9332068 : reject += 3;
3241 : }
3242 :
3243 : /* If reload requires moving value through secondary
3244 : memory, it will need one more insn at least. */
3245 175358711 : if (this_alternative != NO_REGS
3246 166026277 : && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
3247 211322022 : && ((curr_static_id->operand[nop].type != OP_OUT
3248 20374790 : && targetm.secondary_memory_needed (mode, cl,
3249 : this_alternative))
3250 32713534 : || (curr_static_id->operand[nop].type != OP_IN
3251 15588638 : && (targetm.secondary_memory_needed
3252 15588638 : (mode, this_alternative, cl)))))
3253 : {
3254 10790457 : if (lra_dump_file != NULL)
3255 16 : fprintf
3256 16 : (lra_dump_file,
3257 : " %d Secondary memory reload needed: "
3258 : "losers++\n", nop);
3259 10790457 : losers++;
3260 : }
3261 :
3262 175358711 : if (MEM_P (op) && offmemok)
3263 72 : addr_losers++;
3264 : else
3265 : {
3266 : /* Input reloads can be inherited more often than
3267 : output reloads can be removed, so penalize output
3268 : reloads. */
3269 175358639 : if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
3270 : {
3271 147679621 : if (lra_dump_file != NULL)
3272 1722 : fprintf
3273 1722 : (lra_dump_file,
3274 : " %d Non input pseudo reload: reject++\n",
3275 : nop);
3276 147679621 : reject++;
3277 : }
3278 :
3279 175358639 : if (curr_static_id->operand[nop].type == OP_INOUT)
3280 : {
3281 194 : if (lra_dump_file != NULL)
3282 0 : fprintf
3283 0 : (lra_dump_file,
3284 : " %d Input/Output reload: reject+=%d\n",
3285 : nop, LRA_LOSER_COST_FACTOR);
3286 194 : reject += LRA_LOSER_COST_FACTOR;
3287 : }
3288 : }
3289 : }
3290 :
3291 467745704 : if (early_clobber_p && ! scratch_p)
3292 : {
3293 164771 : if (lra_dump_file != NULL)
3294 4 : fprintf (lra_dump_file,
3295 : " %d Early clobber: reject++\n", nop);
3296 164771 : reject++;
3297 : }
3298 : /* ??? We check early clobbers after processing all operands
3299 : (see loop below) and there we update the costs more.
3300 : Should we update the cost (may be approximately) here
3301 : because of early clobber register reloads or it is a rare
3302 : or non-important thing to be worth to do it. */
3303 935491408 : overall = (losers * LRA_LOSER_COST_FACTOR + reject
3304 467745704 : - (addr_losers == losers ? static_reject : 0));
3305 467745704 : if ((best_losers == 0 || losers != 0) && best_overall < overall)
3306 : {
3307 74300632 : if (lra_dump_file != NULL)
3308 1036 : fprintf (lra_dump_file,
3309 : " overall=%d,losers=%d -- refuse\n",
3310 : overall, losers);
3311 74300632 : goto fail;
3312 : }
3313 :
3314 393445072 : if (update_and_check_small_class_inputs (nop, nalt,
3315 : this_alternative))
3316 : {
3317 0 : if (lra_dump_file != NULL)
3318 0 : fprintf (lra_dump_file,
3319 : " not enough small class regs -- refuse\n");
3320 0 : goto fail;
3321 : }
3322 393445072 : curr_alt[nop] = this_alternative;
3323 393445072 : curr_alt_set[nop] = this_alternative_set;
3324 393445072 : curr_alt_exclude_start_hard_regs[nop]
3325 393445072 : = this_alternative_exclude_start_hard_regs;
3326 393445072 : curr_alt_win[nop] = this_alternative_win;
3327 393445072 : curr_alt_match_win[nop] = this_alternative_match_win;
3328 393445072 : curr_alt_offmemok[nop] = this_alternative_offmemok;
3329 393445072 : curr_alt_matches[nop] = this_alternative_matches;
3330 :
3331 393445072 : if (this_alternative_matches >= 0
3332 393445072 : && !did_match && !this_alternative_win)
3333 13183958 : curr_alt_win[this_alternative_matches] = false;
3334 :
3335 393445072 : if (early_clobber_p && operand_reg[nop] != NULL_RTX)
3336 169065 : early_clobbered_nops[early_clobbered_regs_num++] = nop;
3337 : }
3338 :
3339 134632990 : if (curr_insn_set != NULL_RTX
3340 : /* Allow just two operands or three operands where the third
3341 : is a clobber. */
3342 130789296 : && (n_operands == 2
3343 28672593 : || (n_operands == 3
3344 26555049 : && GET_CODE (PATTERN (curr_insn)) == PARALLEL
3345 22539475 : && XVECLEN (PATTERN (curr_insn), 0) == 2
3346 22485979 : && GET_CODE (XVECEXP (PATTERN (curr_insn), 0, 1))
3347 : == CLOBBER))
3348 : /* Prevent processing non-move insns. */
3349 124519236 : && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
3350 122754384 : || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
3351 225741793 : && ((! curr_alt_win[0] && ! curr_alt_win[1]
3352 6017837 : && REG_P (no_subreg_reg_operand[0])
3353 2945599 : && REG_P (no_subreg_reg_operand[1])
3354 1216320 : && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
3355 1012400 : || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
3356 90584952 : || (! curr_alt_win[0] && curr_alt_win[1]
3357 27117260 : && REG_P (no_subreg_reg_operand[1])
3358 : /* Check that we reload memory not the memory
3359 : address. */
3360 15520135 : && ! (curr_alt_offmemok[0]
3361 383746 : && MEM_P (no_subreg_reg_operand[0]))
3362 15520135 : && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
3363 76451085 : || (curr_alt_win[0] && ! curr_alt_win[1]
3364 9482556 : && REG_P (no_subreg_reg_operand[0])
3365 : /* Check that we reload memory not the memory
3366 : address. */
3367 7008598 : && ! (curr_alt_offmemok[1]
3368 1013940 : && MEM_P (no_subreg_reg_operand[1]))
3369 7008596 : && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
3370 6131869 : && (! CONST_POOL_OK_P (curr_operand_mode[1],
3371 : no_subreg_reg_operand[1])
3372 2256099 : || (targetm.preferred_reload_class
3373 2256099 : (no_subreg_reg_operand[1],
3374 : (enum reg_class) curr_alt[1]) != NO_REGS))
3375 : /* If it is a result of recent elimination in move
3376 : insn we can transform it into an add still by
3377 : using this alternative. */
3378 6088635 : && GET_CODE (no_subreg_reg_operand[1]) != PLUS
3379 : /* Likewise if the source has been replaced with an
3380 : equivalent value. This only happens once -- the reload
3381 : will use the equivalent value instead of the register it
3382 : replaces -- so there should be no danger of cycling. */
3383 5609364 : && !equiv_substition_p[1])))
3384 : {
3385 : /* We have a move insn and a new reload insn will be similar
3386 : to the current insn. We should avoid such situation as
3387 : it results in LRA cycling. */
3388 20238197 : if (lra_dump_file != NULL)
3389 239 : fprintf (lra_dump_file,
3390 : " Cycle danger: overall += LRA_MAX_REJECT\n");
3391 20238197 : overall += LRA_MAX_REJECT;
3392 : }
3393 134632990 : if (all_this_alternative != NO_REGS
3394 115284521 : && !SMALL_REGISTER_CLASS_P (all_this_alternative)
3395 114425833 : && all_used_nregs != 0 && all_reload_nregs != 0
3396 134632990 : && (all_used_nregs + all_reload_nregs + 1
3397 3988628 : >= ira_class_hard_regs_num[all_this_alternative]))
3398 : {
3399 366 : if (lra_dump_file != NULL)
3400 0 : fprintf
3401 0 : (lra_dump_file,
3402 : " Register starvation: overall += LRA_MAX_REJECT"
3403 : "(class=%s,avail=%d,used=%d,reload=%d)\n",
3404 : reg_class_names[all_this_alternative],
3405 : ira_class_hard_regs_num[all_this_alternative],
3406 : all_used_nregs, all_reload_nregs);
3407 366 : overall += LRA_MAX_REJECT;
3408 366 : if (!prefer_memory_p && INSN_CODE (curr_insn) < 0)
3409 : {
3410 : /* asm can permit memory and reg and can be not enough regs for
3411 : asm -- try now memory: */
3412 102 : prefer_memory_p = true;
3413 102 : if (lra_dump_file != NULL)
3414 0 : fprintf
3415 0 : (lra_dump_file,
3416 : " Trying now memory for operands\n");
3417 102 : goto repeat;
3418 : }
3419 : }
3420 134798473 : ok_p = true;
3421 : curr_alt_dont_inherit_ops_num = 0;
3422 134798473 : for (nop = 0; nop < early_clobbered_regs_num; nop++)
3423 : {
3424 165586 : int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
3425 165586 : HARD_REG_SET temp_set;
3426 :
3427 165586 : i = early_clobbered_nops[nop];
3428 165586 : if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
3429 124778 : || hard_regno[i] < 0)
3430 164915 : continue;
3431 122950 : lra_assert (operand_reg[i] != NULL_RTX);
3432 : clobbered_hard_regno = hard_regno[i];
3433 122950 : CLEAR_HARD_REG_SET (temp_set);
3434 122950 : add_to_hard_reg_set (&temp_set, GET_MODE (*curr_id->operand_loc[i]),
3435 : clobbered_hard_regno);
3436 122950 : first_conflict_j = last_conflict_j = -1;
3437 620826 : for (j = 0; j < n_operands; j++)
3438 497877 : if (j == i
3439 : /* We don't want process insides of match_operator and
3440 : match_parallel because otherwise we would process
3441 : their operands once again generating a wrong
3442 : code. */
3443 374927 : || curr_static_id->operand[j].is_operator)
3444 125091 : continue;
3445 372786 : else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
3446 354378 : || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
3447 18408 : continue;
3448 : /* If we don't reload j-th operand, check conflicts. */
3449 122892 : else if ((curr_alt_win[j] || curr_alt_match_win[j])
3450 415428 : && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
3451 : {
3452 1145 : if (first_conflict_j < 0)
3453 671 : first_conflict_j = j;
3454 1145 : last_conflict_j = j;
3455 : /* Both the earlyclobber operand and conflicting operand
3456 : cannot both be user defined hard registers for asm.
3457 : Let curr_insn_transform diagnose it. */
3458 1145 : if (HARD_REGISTER_P (operand_reg[i])
3459 1 : && REG_USERVAR_P (operand_reg[i])
3460 1 : && operand_reg[j] != NULL_RTX
3461 1 : && HARD_REGISTER_P (operand_reg[j])
3462 1 : && REG_USERVAR_P (operand_reg[j])
3463 1146 : && INSN_CODE (curr_insn) < 0)
3464 1 : return false;
3465 : }
3466 122949 : if (last_conflict_j < 0)
3467 122279 : continue;
3468 :
3469 : /* If an earlyclobber operand conflicts with another non-matching
3470 : operand (ie, they have been assigned the same hard register),
3471 : then it is better to reload the other operand, as there may
3472 : exist yet another operand with a matching constraint associated
3473 : with the earlyclobber operand. However, if one of the operands
3474 : is an explicit use of a hard register, then we must reload the
3475 : other non-hard register operand. */
3476 670 : if (HARD_REGISTER_P (operand_reg[i])
3477 670 : || (first_conflict_j == last_conflict_j
3478 196 : && operand_reg[last_conflict_j] != NULL_RTX
3479 60 : && !curr_alt_match_win[last_conflict_j]
3480 60 : && !HARD_REGISTER_P (operand_reg[last_conflict_j])))
3481 : {
3482 60 : curr_alt_win[last_conflict_j] = false;
3483 60 : curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
3484 60 : = last_conflict_j;
3485 60 : losers++;
3486 60 : if (lra_dump_file != NULL)
3487 0 : fprintf
3488 0 : (lra_dump_file,
3489 : " %d Conflict early clobber reload: losers++\n",
3490 : i);
3491 : }
3492 : else
3493 : {
3494 : /* We need to reload early clobbered register and the
3495 : matched registers. */
3496 3044 : for (j = 0; j < n_operands; j++)
3497 2434 : if (curr_alt_matches[j] == i)
3498 : {
3499 2 : curr_alt_match_win[j] = false;
3500 2 : losers++;
3501 2 : if (lra_dump_file != NULL)
3502 0 : fprintf
3503 0 : (lra_dump_file,
3504 : " %d Matching conflict early clobber "
3505 : "reloads: losers++\n",
3506 : j);
3507 2 : overall += LRA_LOSER_COST_FACTOR;
3508 : }
3509 610 : if (! curr_alt_match_win[i])
3510 610 : curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
3511 : else
3512 : {
3513 : /* Remember pseudos used for match reloads are never
3514 : inherited. */
3515 0 : lra_assert (curr_alt_matches[i] >= 0);
3516 0 : curr_alt_win[curr_alt_matches[i]] = false;
3517 : }
3518 610 : curr_alt_win[i] = curr_alt_match_win[i] = false;
3519 610 : losers++;
3520 610 : if (lra_dump_file != NULL)
3521 0 : fprintf
3522 0 : (lra_dump_file,
3523 : " %d Matched conflict early clobber reloads: "
3524 : "losers++\n",
3525 : i);
3526 : }
3527 : /* Early clobber was already reflected in REJECT. */
3528 670 : if (!matching_early_clobber[i])
3529 : {
3530 670 : lra_assert (reject > 0);
3531 670 : reject--;
3532 670 : matching_early_clobber[i] = 1;
3533 : }
3534 670 : overall += LRA_LOSER_COST_FACTOR - 1;
3535 : }
3536 134632887 : if (lra_dump_file != NULL)
3537 1761 : fprintf (lra_dump_file, " overall=%d,losers=%d,rld_nregs=%d\n",
3538 : overall, losers, reload_nregs);
3539 :
3540 : /* If this alternative can be made to work by reloading, and it
3541 : needs less reloading than the others checked so far, record
3542 : it as the chosen goal for reloading. */
3543 134632887 : if ((best_losers != 0 && losers == 0)
3544 59702697 : || (((best_losers == 0 && losers == 0)
3545 58704939 : || (best_losers != 0 && losers != 0))
3546 59702697 : && (best_overall > overall
3547 15510314 : || (best_overall == overall
3548 : /* If the cost of the reloads is the same,
3549 : prefer alternative which requires minimal
3550 : number of reload regs. */
3551 11558903 : && (reload_nregs < best_reload_nregs
3552 11453622 : || (reload_nregs == best_reload_nregs
3553 11410492 : && (best_reload_sum < reload_sum
3554 11389595 : || (best_reload_sum == reload_sum
3555 11365849 : && nalt < goal_alt_number))))))))
3556 : {
3557 387985066 : for (nop = 0; nop < n_operands; nop++)
3558 : {
3559 268490611 : goal_alt_win[nop] = curr_alt_win[nop];
3560 268490611 : goal_alt_match_win[nop] = curr_alt_match_win[nop];
3561 268490611 : goal_alt_matches[nop] = curr_alt_matches[nop];
3562 268490611 : goal_alt[nop] = curr_alt[nop];
3563 268490611 : goal_alt_exclude_start_hard_regs[nop]
3564 268490611 : = curr_alt_exclude_start_hard_regs[nop];
3565 268490611 : goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
3566 : }
3567 119494455 : goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
3568 119494455 : goal_reuse_alt_p = curr_reuse_alt_p;
3569 119495112 : for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
3570 657 : goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
3571 119494455 : goal_alt_swapped = curr_swapped;
3572 119494455 : goal_alt_out_sp_reload_p = curr_alt_out_sp_reload_p;
3573 119494455 : best_overall = overall;
3574 119494455 : best_losers = losers;
3575 119494455 : best_reload_nregs = reload_nregs;
3576 119494455 : best_reload_sum = reload_sum;
3577 119494455 : goal_alt_number = nalt;
3578 : }
3579 134632887 : if (losers == 0 && !curr_alt_class_change_p)
3580 : /* Everything is satisfied. Do not process alternatives
3581 : anymore. */
3582 : break;
3583 58717339 : fail:
3584 180868827 : ;
3585 : }
3586 : return ok_p;
3587 : }
3588 :
3589 : /* Make reload base reg from address AD. */
3590 : static rtx
3591 0 : base_to_reg (struct address_info *ad)
3592 : {
3593 0 : enum reg_class cl;
3594 0 : int code = -1;
3595 0 : rtx new_inner = NULL_RTX;
3596 0 : rtx new_reg = NULL_RTX;
3597 0 : rtx_insn *insn;
3598 0 : rtx_insn *last_insn = get_last_insn();
3599 :
3600 0 : lra_assert (ad->disp == ad->disp_term);
3601 0 : cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3602 : get_index_code (ad));
3603 0 : new_reg = lra_create_new_reg (GET_MODE (*ad->base), NULL_RTX, cl, NULL,
3604 : "base");
3605 0 : new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
3606 0 : ad->disp_term == NULL
3607 : ? const0_rtx
3608 : : *ad->disp_term);
3609 0 : if (!valid_address_p (ad->mode, new_inner, ad->as))
3610 : return NULL_RTX;
3611 0 : insn = emit_insn (gen_rtx_SET (new_reg, *ad->base));
3612 0 : code = recog_memoized (insn);
3613 0 : if (code < 0)
3614 : {
3615 0 : delete_insns_since (last_insn);
3616 0 : return NULL_RTX;
3617 : }
3618 :
3619 : return new_inner;
3620 : }
3621 :
3622 : /* Make reload base reg + DISP from address AD. Return the new pseudo. */
3623 : static rtx
3624 39 : base_plus_disp_to_reg (struct address_info *ad, rtx disp)
3625 : {
3626 39 : enum reg_class cl;
3627 39 : rtx new_reg;
3628 :
3629 39 : lra_assert (ad->base == ad->base_term);
3630 39 : cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3631 : get_index_code (ad));
3632 39 : new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX, cl, NULL,
3633 : "base + disp");
3634 39 : lra_emit_add (new_reg, *ad->base_term, disp);
3635 39 : return new_reg;
3636 : }
3637 :
3638 : /* Make reload of index part of address AD. Return the new
3639 : pseudo. */
3640 : static rtx
3641 0 : index_part_to_reg (struct address_info *ad, enum reg_class index_class)
3642 : {
3643 0 : rtx new_reg;
3644 :
3645 0 : new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
3646 : index_class, NULL, "index term");
3647 0 : expand_mult (GET_MODE (*ad->index), *ad->index_term,
3648 : GEN_INT (get_index_scale (ad)), new_reg, 1);
3649 0 : return new_reg;
3650 : }
3651 :
3652 : /* Return true if we can add a displacement to address AD, even if that
3653 : makes the address invalid. The fix-up code requires any new address
3654 : to be the sum of the BASE_TERM, INDEX and DISP_TERM fields. */
3655 : static bool
3656 19149 : can_add_disp_p (struct address_info *ad)
3657 : {
3658 19149 : return (!ad->autoinc_p
3659 19149 : && ad->segment == NULL
3660 19149 : && ad->base == ad->base_term
3661 38298 : && ad->disp == ad->disp_term);
3662 : }
3663 :
3664 : /* Make equiv substitution in address AD. Return true if a substitution
3665 : was made. */
3666 : static bool
3667 39405124 : equiv_address_substitution (struct address_info *ad)
3668 : {
3669 39405124 : rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
3670 39405124 : poly_int64 disp;
3671 39405124 : HOST_WIDE_INT scale;
3672 39405124 : bool change_p;
3673 :
3674 39405124 : base_term = strip_subreg (ad->base_term);
3675 9927 : if (base_term == NULL)
3676 : base_reg = new_base_reg = NULL_RTX;
3677 : else
3678 : {
3679 33186966 : base_reg = *base_term;
3680 33186966 : new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
3681 : }
3682 39405124 : index_term = strip_subreg (ad->index_term);
3683 5132 : if (index_term == NULL)
3684 : index_reg = new_index_reg = NULL_RTX;
3685 : else
3686 : {
3687 1889303 : index_reg = *index_term;
3688 1889303 : new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
3689 : }
3690 39405124 : if (base_reg == new_base_reg && index_reg == new_index_reg)
3691 : return false;
3692 137239 : disp = 0;
3693 137239 : change_p = false;
3694 137239 : if (lra_dump_file != NULL)
3695 : {
3696 0 : fprintf (lra_dump_file, "Changing address in insn %d ",
3697 0 : INSN_UID (curr_insn));
3698 0 : dump_value_slim (lra_dump_file, *ad->outer, 1);
3699 : }
3700 137239 : if (base_reg != new_base_reg)
3701 : {
3702 136752 : poly_int64 offset;
3703 136752 : if (REG_P (new_base_reg))
3704 : {
3705 7430 : *base_term = new_base_reg;
3706 7430 : change_p = true;
3707 : }
3708 129322 : else if (GET_CODE (new_base_reg) == PLUS
3709 19149 : && REG_P (XEXP (new_base_reg, 0))
3710 19149 : && poly_int_rtx_p (XEXP (new_base_reg, 1), &offset)
3711 148471 : && can_add_disp_p (ad))
3712 : {
3713 : disp += offset;
3714 19149 : *base_term = XEXP (new_base_reg, 0);
3715 19149 : change_p = true;
3716 : }
3717 136752 : if (ad->base_term2 != NULL)
3718 0 : *ad->base_term2 = *ad->base_term;
3719 : }
3720 137239 : if (index_reg != new_index_reg)
3721 : {
3722 693 : poly_int64 offset;
3723 693 : if (REG_P (new_index_reg))
3724 : {
3725 0 : *index_term = new_index_reg;
3726 0 : change_p = true;
3727 : }
3728 693 : else if (GET_CODE (new_index_reg) == PLUS
3729 0 : && REG_P (XEXP (new_index_reg, 0))
3730 0 : && poly_int_rtx_p (XEXP (new_index_reg, 1), &offset)
3731 0 : && can_add_disp_p (ad)
3732 693 : && (scale = get_index_scale (ad)))
3733 : {
3734 0 : disp += offset * scale;
3735 0 : *index_term = XEXP (new_index_reg, 0);
3736 0 : change_p = true;
3737 : }
3738 : }
3739 137239 : if (maybe_ne (disp, 0))
3740 : {
3741 19149 : if (ad->disp != NULL)
3742 6036 : *ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
3743 : else
3744 : {
3745 13113 : *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
3746 13113 : update_address (ad);
3747 : }
3748 : change_p = true;
3749 : }
3750 137239 : if (lra_dump_file != NULL)
3751 : {
3752 0 : if (! change_p)
3753 0 : fprintf (lra_dump_file, " -- no change\n");
3754 : else
3755 : {
3756 0 : fprintf (lra_dump_file, " on equiv ");
3757 0 : dump_value_slim (lra_dump_file, *ad->outer, 1);
3758 0 : fprintf (lra_dump_file, "\n");
3759 : }
3760 : }
3761 : return change_p;
3762 : }
3763 :
3764 : /* Skip all modifiers and whitespaces in constraint STR and return the
3765 : result. */
3766 : static const char *
3767 510238792 : skip_constraint_modifiers (const char *str)
3768 : {
3769 718866118 : for (;;str++)
3770 614552455 : switch (*str)
3771 : {
3772 104313663 : case '+': case '&' : case '=': case '*': case ' ': case '\t':
3773 104313663 : case '$': case '^' : case '%': case '?': case '!':
3774 104313663 : break;
3775 510238792 : default: return str;
3776 : }
3777 : }
3778 :
3779 : /* Takes a string of 0 or more comma-separated constraints. When more
3780 : than one constraint is present, evaluate whether they all correspond
3781 : to a single, repeated constraint (e.g. "r,r") or whether we have
3782 : more than one distinct constraints (e.g. "r,m"). */
3783 : static bool
3784 162240184 : constraint_unique (const char *cstr)
3785 : {
3786 162240184 : enum constraint_num ca, cb;
3787 162240184 : ca = CONSTRAINT__UNKNOWN;
3788 320771123 : for (;;)
3789 : {
3790 320771123 : cstr = skip_constraint_modifiers (cstr);
3791 320771123 : if (*cstr == '\0' || *cstr == ',')
3792 : cb = CONSTRAINT_X;
3793 : else
3794 : {
3795 320771123 : cb = lookup_constraint (cstr);
3796 320771123 : if (cb == CONSTRAINT__UNKNOWN)
3797 : return false;
3798 308735816 : cstr += CONSTRAINT_LEN (cstr[0], cstr);
3799 : }
3800 : /* Handle the first iteration of the loop. */
3801 308735816 : if (ca == CONSTRAINT__UNKNOWN)
3802 : ca = cb;
3803 : /* Handle the general case of comparing ca with subsequent
3804 : constraints. */
3805 158399905 : else if (ca != cb)
3806 : return false;
3807 165992489 : if (*cstr == '\0')
3808 : return true;
3809 158530939 : if (*cstr == ',')
3810 87323741 : cstr += 1;
3811 : }
3812 : }
3813 :
3814 : /* Major function to make reloads for an address in operand NOP or
3815 : check its correctness (If CHECK_ONLY_P is true). The supported
3816 : cases are:
3817 :
3818 : 1) an address that existed before LRA started, at which point it
3819 : must have been valid. These addresses are subject to elimination
3820 : and may have become invalid due to the elimination offset being out
3821 : of range.
3822 :
3823 : 2) an address created by forcing a constant to memory
3824 : (force_const_to_mem). The initial form of these addresses might
3825 : not be valid, and it is this function's job to make them valid.
3826 :
3827 : 3) a frame address formed from a register and a (possibly zero)
3828 : constant offset. As above, these addresses might not be valid and
3829 : this function must make them so.
3830 :
3831 : Add reloads to the lists *BEFORE and *AFTER. We might need to add
3832 : reloads to *AFTER because of inc/dec, {pre, post} modify in the
3833 : address. Return true for any RTL change.
3834 :
3835 : The function is a helper function which does not produce all
3836 : transformations (when CHECK_ONLY_P is false) which can be
3837 : necessary. It does just basic steps. To do all necessary
3838 : transformations use function process_address. */
3839 : static bool
3840 175788789 : process_address_1 (int nop, bool check_only_p,
3841 : rtx_insn **before, rtx_insn **after)
3842 : {
3843 175788789 : struct address_info ad;
3844 175788789 : rtx new_reg;
3845 175788789 : HOST_WIDE_INT scale;
3846 175788789 : rtx op = *curr_id->operand_loc[nop];
3847 175788789 : rtx mem = extract_mem_from_operand (op);
3848 175788789 : const char *constraint;
3849 175788789 : enum constraint_num cn;
3850 175788789 : bool change_p = false;
3851 :
3852 175788789 : if (MEM_P (mem)
3853 37662020 : && GET_MODE (mem) == BLKmode
3854 25425 : && GET_CODE (XEXP (mem, 0)) == SCRATCH)
3855 : return false;
3856 :
3857 175788789 : constraint
3858 175788789 : = skip_constraint_modifiers (curr_static_id->operand[nop].constraint);
3859 175788789 : if (IN_RANGE (constraint[0], '0', '9'))
3860 : {
3861 13678880 : char *end;
3862 13678880 : unsigned long dup = strtoul (constraint, &end, 10);
3863 13678880 : constraint
3864 13678880 : = skip_constraint_modifiers (curr_static_id->operand[dup].constraint);
3865 : }
3866 187587199 : cn = lookup_constraint (*constraint == '\0' ? "X" : constraint);
3867 : /* If we have several alternatives or/and several constraints in an
3868 : alternative and we can not say at this stage what constraint will be used,
3869 : use unknown constraint. The exception is an address constraint. If
3870 : operand has one address constraint, probably all others constraints are
3871 : address ones. */
3872 163990379 : if (constraint[0] != '\0' && get_constraint_type (cn) != CT_ADDRESS
3873 338028973 : && !constraint_unique (constraint))
3874 : cn = CONSTRAINT__UNKNOWN;
3875 21010155 : if (insn_extra_address_constraint (cn)
3876 : /* When we find an asm operand with an address constraint that
3877 : doesn't satisfy address_operand to begin with, we clear
3878 : is_address, so that we don't try to make a non-address fit.
3879 : If the asm statement got this far, it's because other
3880 : constraints are available, and we'll use them, disregarding
3881 : the unsatisfiable address ones. */
3882 21010155 : && curr_static_id->operand[nop].is_address)
3883 1750176 : decompose_lea_address (&ad, curr_id->operand_loc[nop]);
3884 : /* Do not attempt to decompose arbitrary addresses generated by combine
3885 : for asm operands with loose constraints, e.g 'X'.
3886 : Need to extract memory from op for special memory constraint,
3887 : i.e. bcst_mem_operand in i386 backend. */
3888 174038613 : else if (MEM_P (mem)
3889 174038775 : && !(INSN_CODE (curr_insn) < 0
3890 18141 : && get_constraint_type (cn) == CT_FIXED_FORM
3891 162 : && constraint_satisfied_p (op, cn)))
3892 37661858 : decompose_mem_address (&ad, mem);
3893 136376755 : else if (GET_CODE (op) == SUBREG
3894 3540455 : && MEM_P (SUBREG_REG (op)))
3895 0 : decompose_mem_address (&ad, SUBREG_REG (op));
3896 : else
3897 : return false;
3898 : /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
3899 : index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
3900 : when INDEX_REG_CLASS is a single register class. */
3901 39412034 : enum reg_class index_cl = index_reg_class (curr_insn);
3902 39412034 : if (ad.base_term != NULL
3903 33193808 : && ad.index_term != NULL
3904 1518413 : && ira_class_hard_regs_num[index_cl] == 1
3905 0 : && REG_P (*ad.base_term)
3906 0 : && REG_P (*ad.index_term)
3907 0 : && in_class_p (*ad.base_term, index_cl, NULL)
3908 39412034 : && ! in_class_p (*ad.index_term, index_cl, NULL))
3909 : {
3910 0 : std::swap (ad.base, ad.index);
3911 0 : std::swap (ad.base_term, ad.index_term);
3912 : }
3913 39412034 : if (! check_only_p)
3914 39405124 : change_p = equiv_address_substitution (&ad);
3915 39412034 : if (ad.base_term != NULL
3916 72605842 : && (process_addr_reg
3917 66387616 : (ad.base_term, check_only_p, before,
3918 33193808 : (ad.autoinc_p
3919 4164956 : && !(REG_P (*ad.base_term)
3920 2082478 : && find_regno_note (curr_insn, REG_DEAD,
3921 : REGNO (*ad.base_term)) != NULL_RTX)
3922 : ? after : NULL),
3923 33193808 : base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3924 : get_index_code (&ad), curr_insn))))
3925 : {
3926 438973 : change_p = true;
3927 438973 : if (ad.base_term2 != NULL)
3928 0 : *ad.base_term2 = *ad.base_term;
3929 : }
3930 39412034 : if (ad.index_term != NULL
3931 39412034 : && process_addr_reg (ad.index_term, check_only_p,
3932 : before, NULL, index_cl))
3933 : change_p = true;
3934 :
3935 : /* Target hooks sometimes don't treat extra-constraint addresses as
3936 : legitimate address_operands, so handle them specially. */
3937 39412034 : if (insn_extra_address_constraint (cn)
3938 39412034 : && satisfies_address_constraint_p (&ad, cn))
3939 : return change_p;
3940 :
3941 37661865 : if (check_only_p)
3942 : return change_p;
3943 :
3944 : /* There are three cases where the shape of *AD.INNER may now be invalid:
3945 :
3946 : 1) the original address was valid, but either elimination or
3947 : equiv_address_substitution was applied and that made
3948 : the address invalid.
3949 :
3950 : 2) the address is an invalid symbolic address created by
3951 : force_const_to_mem.
3952 :
3953 : 3) the address is a frame address with an invalid offset.
3954 :
3955 : 4) the address is a frame address with an invalid base.
3956 :
3957 : All these cases involve a non-autoinc address, so there is no
3958 : point revalidating other types. */
3959 37655566 : if (ad.autoinc_p || valid_address_p (op, &ad, cn))
3960 37655124 : return change_p;
3961 :
3962 : /* Any index existed before LRA started, so we can assume that the
3963 : presence and shape of the index is valid. */
3964 442 : push_to_sequence (*before);
3965 442 : lra_assert (ad.disp == ad.disp_term);
3966 442 : if (ad.base == NULL)
3967 : {
3968 350 : if (ad.index == NULL)
3969 : {
3970 350 : rtx_insn *insn;
3971 350 : rtx_insn *last = get_last_insn ();
3972 350 : int code = -1;
3973 350 : enum reg_class cl = base_reg_class (ad.mode, ad.as,
3974 : SCRATCH, SCRATCH,
3975 : curr_insn);
3976 350 : rtx addr = *ad.inner;
3977 :
3978 691 : new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
3979 350 : if (HAVE_lo_sum)
3980 : {
3981 : /* addr => lo_sum (new_base, addr), case (2) above. */
3982 : insn = emit_insn (gen_rtx_SET
3983 : (new_reg,
3984 : gen_rtx_HIGH (Pmode, copy_rtx (addr))));
3985 : code = recog_memoized (insn);
3986 : if (code >= 0)
3987 : {
3988 : *ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
3989 : if (!valid_address_p (op, &ad, cn))
3990 : {
3991 : /* Try to put lo_sum into register. */
3992 : insn = emit_insn (gen_rtx_SET
3993 : (new_reg,
3994 : gen_rtx_LO_SUM (Pmode, new_reg, addr)));
3995 : code = recog_memoized (insn);
3996 : if (code >= 0)
3997 : {
3998 : *ad.inner = new_reg;
3999 : if (!valid_address_p (op, &ad, cn))
4000 : {
4001 : *ad.inner = addr;
4002 : code = -1;
4003 : }
4004 : }
4005 :
4006 : }
4007 : }
4008 : if (code < 0)
4009 : delete_insns_since (last);
4010 : }
4011 :
4012 350 : if (code < 0)
4013 : {
4014 : /* addr => new_base, case (2) above. */
4015 350 : lra_emit_move (new_reg, addr);
4016 :
4017 700 : for (insn = last == NULL_RTX ? get_insns () : NEXT_INSN (last);
4018 700 : insn != NULL_RTX;
4019 350 : insn = NEXT_INSN (insn))
4020 350 : if (recog_memoized (insn) < 0)
4021 : break;
4022 350 : if (insn != NULL_RTX)
4023 : {
4024 : /* Do nothing if we cannot generate right insns.
4025 : This is analogous to reload pass behavior. */
4026 0 : delete_insns_since (last);
4027 0 : end_sequence ();
4028 0 : return false;
4029 : }
4030 350 : *ad.inner = new_reg;
4031 : }
4032 : }
4033 : else
4034 : {
4035 : /* index * scale + disp => new base + index * scale,
4036 : case (1) above. */
4037 0 : enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
4038 0 : GET_CODE (*ad.index),
4039 : curr_insn);
4040 :
4041 0 : lra_assert (index_cl != NO_REGS);
4042 0 : new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "disp");
4043 0 : lra_emit_move (new_reg, *ad.disp);
4044 0 : *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
4045 0 : new_reg, *ad.index);
4046 : }
4047 : }
4048 92 : else if (ad.index == NULL)
4049 : {
4050 53 : int regno;
4051 53 : enum reg_class cl;
4052 53 : rtx set;
4053 53 : rtx_insn *insns, *last_insn;
4054 :
4055 53 : cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
4056 : get_index_code (&ad), curr_insn);
4057 :
4058 53 : if (REG_P (*ad.base_term)
4059 53 : && ira_class_subset_p[get_reg_class (REGNO (*ad.base_term))][cl])
4060 : /* It seems base reg is already in the base reg class and changing it
4061 : does not make a progress. So reload the whole inner address. */
4062 53 : goto reload_inner_addr;
4063 :
4064 : /* Try to reload base into register only if the base is invalid
4065 : for the address but with valid offset, case (4) above. */
4066 0 : start_sequence ();
4067 0 : new_reg = base_to_reg (&ad);
4068 :
4069 : /* base + disp => new base, cases (1) and (3) above. */
4070 : /* Another option would be to reload the displacement into an
4071 : index register. However, postreload has code to optimize
4072 : address reloads that have the same base and different
4073 : displacements, so reloading into an index register would
4074 : not necessarily be a win. */
4075 0 : if (new_reg == NULL_RTX)
4076 : {
4077 : /* See if the target can split the displacement into a
4078 : legitimate new displacement from a local anchor. */
4079 0 : gcc_assert (ad.disp == ad.disp_term);
4080 0 : poly_int64 orig_offset;
4081 0 : rtx offset1, offset2;
4082 0 : if (poly_int_rtx_p (*ad.disp, &orig_offset)
4083 0 : && targetm.legitimize_address_displacement (&offset1, &offset2,
4084 : orig_offset,
4085 : ad.mode))
4086 : {
4087 0 : new_reg = base_plus_disp_to_reg (&ad, offset1);
4088 0 : new_reg = gen_rtx_PLUS (GET_MODE (new_reg), new_reg, offset2);
4089 : }
4090 : else
4091 0 : new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
4092 : }
4093 0 : insns = get_insns ();
4094 0 : last_insn = get_last_insn ();
4095 : /* If we generated at least two insns, try last insn source as
4096 : an address. If we succeed, we generate one less insn. */
4097 0 : if (REG_P (new_reg)
4098 0 : && last_insn != insns
4099 0 : && (set = single_set (last_insn)) != NULL_RTX
4100 0 : && GET_CODE (SET_SRC (set)) == PLUS
4101 0 : && REG_P (XEXP (SET_SRC (set), 0))
4102 0 : && CONSTANT_P (XEXP (SET_SRC (set), 1)))
4103 : {
4104 0 : *ad.inner = SET_SRC (set);
4105 0 : if (valid_address_p (op, &ad, cn))
4106 : {
4107 0 : *ad.base_term = XEXP (SET_SRC (set), 0);
4108 0 : *ad.disp_term = XEXP (SET_SRC (set), 1);
4109 0 : regno = REGNO (*ad.base_term);
4110 0 : if (regno >= FIRST_PSEUDO_REGISTER
4111 0 : && cl != lra_get_allocno_class (regno))
4112 0 : lra_change_class (regno, cl, " Change to", true);
4113 0 : new_reg = SET_SRC (set);
4114 0 : delete_insns_since (PREV_INSN (last_insn));
4115 : }
4116 : }
4117 0 : end_sequence ();
4118 0 : emit_insn (insns);
4119 0 : *ad.inner = new_reg;
4120 : }
4121 39 : else if (ad.disp_term != NULL)
4122 : {
4123 : /* base + scale * index + disp => new base + scale * index,
4124 : case (1) above. */
4125 39 : gcc_assert (ad.disp == ad.disp_term);
4126 39 : new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
4127 39 : *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
4128 39 : new_reg, *ad.index);
4129 : }
4130 0 : else if ((scale = get_index_scale (&ad)) == 1)
4131 : {
4132 : /* The last transformation to one reg will be made in
4133 : curr_insn_transform function. */
4134 0 : end_sequence ();
4135 0 : return false;
4136 : }
4137 0 : else if (scale != 0)
4138 : {
4139 : /* base + scale * index => base + new_reg,
4140 : case (1) above.
4141 : Index part of address may become invalid. For example, we
4142 : changed pseudo on the equivalent memory and a subreg of the
4143 : pseudo onto the memory of different mode for which the scale is
4144 : prohibitted. */
4145 0 : new_reg = index_part_to_reg (&ad, index_cl);
4146 0 : *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
4147 0 : *ad.base_term, new_reg);
4148 : }
4149 : else
4150 : {
4151 53 : enum reg_class cl;
4152 53 : rtx addr;
4153 0 : reload_inner_addr:
4154 53 : cl = base_reg_class (ad.mode, ad.as, SCRATCH, SCRATCH, curr_insn);
4155 53 : addr = *ad.inner;
4156 53 : new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, NULL, "addr");
4157 : /* addr => new_base. */
4158 53 : lra_emit_move (new_reg, addr);
4159 53 : *ad.inner = new_reg;
4160 : }
4161 442 : *before = end_sequence ();
4162 442 : return true;
4163 : }
4164 :
4165 : /* If CHECK_ONLY_P is false, do address reloads until it is necessary.
4166 : Use process_address_1 as a helper function. Return true for any
4167 : RTL changes.
4168 :
4169 : If CHECK_ONLY_P is true, just check address correctness. Return
4170 : false if the address correct. */
4171 : static bool
4172 175298536 : process_address (int nop, bool check_only_p,
4173 : rtx_insn **before, rtx_insn **after)
4174 : {
4175 175298536 : bool res = false;
4176 : /* Use enough iterations to process all address parts: */
4177 175788789 : for (int i = 0; i < 10; i++)
4178 : {
4179 175788789 : if (!process_address_1 (nop, check_only_p, before, after))
4180 : {
4181 : return res;
4182 : }
4183 : else
4184 : {
4185 490253 : if (check_only_p)
4186 : return true;
4187 490253 : res = true;
4188 : }
4189 : }
4190 0 : fatal_insn ("unable to reload address in ", curr_insn);
4191 : }
4192 :
4193 : /* Override the generic address_reload_context in order to
4194 : control the creation of reload pseudos. */
4195 : class lra_autoinc_reload_context : public address_reload_context
4196 : {
4197 : machine_mode mode;
4198 : enum reg_class rclass;
4199 :
4200 : public:
4201 0 : lra_autoinc_reload_context (machine_mode mode, enum reg_class new_rclass)
4202 0 : : mode (mode), rclass (new_rclass) {}
4203 :
4204 0 : rtx get_reload_reg () const override final
4205 : {
4206 0 : return lra_create_new_reg (mode, NULL_RTX, rclass, NULL, "INC/DEC result");
4207 : }
4208 : };
4209 :
4210 : /* Emit insns to reload VALUE into a new register. VALUE is an
4211 : auto-increment or auto-decrement RTX whose operand is a register or
4212 : memory location; so reloading involves incrementing that location.
4213 :
4214 : INC_AMOUNT is the number to increment or decrement by (always
4215 : positive and ignored for POST_MODIFY/PRE_MODIFY).
4216 :
4217 : Return a pseudo containing the result. */
4218 : static rtx
4219 0 : emit_inc (enum reg_class new_rclass, rtx value, poly_int64 inc_amount)
4220 : {
4221 0 : lra_autoinc_reload_context context (GET_MODE (value), new_rclass);
4222 0 : return context.emit_autoinc (value, inc_amount);
4223 : }
4224 :
4225 : /* Return true if the current move insn does not need processing as we
4226 : already know that it satisfies its constraints. */
4227 : static bool
4228 100572029 : simple_move_p (void)
4229 : {
4230 100572029 : rtx dest, src;
4231 100572029 : enum reg_class dclass, sclass;
4232 :
4233 100572029 : lra_assert (curr_insn_set != NULL_RTX);
4234 100572029 : dest = SET_DEST (curr_insn_set);
4235 100572029 : src = SET_SRC (curr_insn_set);
4236 :
4237 : /* If the instruction has multiple sets we need to process it even if it
4238 : is single_set. This can happen if one or more of the SETs are dead.
4239 : See PR73650. */
4240 100572029 : if (multiple_sets (curr_insn))
4241 : return false;
4242 :
4243 100386231 : return ((dclass = get_op_class (dest)) != NO_REGS
4244 21128713 : && (sclass = get_op_class (src)) != NO_REGS
4245 : /* The backend guarantees that register moves of cost 2
4246 : never need reloads. */
4247 89544524 : && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
4248 : }
4249 :
4250 : /* Swap operands NOP and NOP + 1. */
4251 : static inline void
4252 21456508 : swap_operands (int nop)
4253 : {
4254 21456508 : std::swap (curr_operand_mode[nop], curr_operand_mode[nop + 1]);
4255 21456508 : std::swap (original_subreg_reg_mode[nop], original_subreg_reg_mode[nop + 1]);
4256 21456508 : std::swap (*curr_id->operand_loc[nop], *curr_id->operand_loc[nop + 1]);
4257 21456508 : std::swap (equiv_substition_p[nop], equiv_substition_p[nop + 1]);
4258 : /* Swap the duplicates too. */
4259 21456508 : lra_update_dup (curr_id, nop);
4260 21456508 : lra_update_dup (curr_id, nop + 1);
4261 21456508 : }
4262 :
4263 : /* Return TRUE if X is a (subreg of) reg and there are no hard regs of X class
4264 : which can contain value of MODE. */
4265 34 : static bool invalid_mode_reg_p (enum machine_mode mode, rtx x)
4266 : {
4267 34 : if (SUBREG_P (x))
4268 2 : x = SUBREG_REG (x);
4269 34 : if (! REG_P (x))
4270 : return false;
4271 34 : enum reg_class rclass = get_reg_class (REGNO (x));
4272 34 : return (!hard_reg_set_empty_p (reg_class_contents[rclass])
4273 34 : && hard_reg_set_subset_p
4274 34 : (reg_class_contents[rclass],
4275 34 : ira_prohibited_class_mode_regs[rclass][mode]));
4276 : }
4277 :
4278 : /* Return TRUE if regno is referenced in more than one non-debug insn. */
4279 : static bool
4280 2898274 : multiple_insn_refs_p (int regno)
4281 : {
4282 2898274 : unsigned int uid;
4283 2898274 : bitmap_iterator bi;
4284 2898274 : int nrefs = 0;
4285 6977515 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
4286 : {
4287 6971887 : if (!NONDEBUG_INSN_P (lra_insn_recog_data[uid]->insn))
4288 1180967 : continue;
4289 5790920 : if (nrefs == 1)
4290 : return true;
4291 2898274 : nrefs++;
4292 : }
4293 : return false;
4294 : }
4295 :
4296 : /* Main entry point of the constraint code: search the body of the
4297 : current insn to choose the best alternative. It is mimicking insn
4298 : alternative cost calculation model of former reload pass. That is
4299 : because machine descriptions were written to use this model. This
4300 : model can be changed in future. Make commutative operand exchange
4301 : if it is chosen.
4302 :
4303 : if CHECK_ONLY_P is false, do RTL changes to satisfy the
4304 : constraints. Return true if any change happened during function
4305 : call.
4306 :
4307 : If CHECK_ONLY_P is true then don't do any transformation. Just
4308 : check that the insn satisfies all constraints. If the insn does
4309 : not satisfy any constraint, return true. */
4310 : static bool
4311 105896776 : curr_insn_transform (bool check_only_p)
4312 : {
4313 105896776 : int i, j, k;
4314 105896776 : int n_operands;
4315 105896776 : int n_alternatives;
4316 105896776 : int n_outputs;
4317 105896776 : int commutative;
4318 105896776 : signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
4319 105896776 : signed char match_inputs[MAX_RECOG_OPERANDS + 1];
4320 105896776 : signed char outputs[MAX_RECOG_OPERANDS + 1];
4321 105896776 : rtx_insn *before, *after;
4322 105896776 : bool alt_p = false;
4323 : /* Flag that the insn has been changed through a transformation. */
4324 105896776 : bool change_p;
4325 105896776 : bool sec_mem_p;
4326 105896776 : bool use_sec_mem_p;
4327 105896776 : int max_regno_before;
4328 105896776 : int reused_alternative_num;
4329 :
4330 105896776 : curr_insn_set = single_set (curr_insn);
4331 105896776 : if (curr_insn_set != NULL_RTX && simple_move_p ())
4332 : {
4333 : /* We assume that the corresponding insn alternative has no
4334 : earlier clobbers. If it is not the case, don't define move
4335 : cost equal to 2 for the corresponding register classes. */
4336 16226254 : lra_set_used_insn_alternative (curr_insn, LRA_NON_CLOBBERED_ALT);
4337 16226254 : return false;
4338 : }
4339 :
4340 89670522 : no_input_reloads_p = no_output_reloads_p = false;
4341 89670522 : goal_alt_number = -1;
4342 89670522 : change_p = sec_mem_p = false;
4343 :
4344 : /* CALL_INSNs are not allowed to have any output reloads. */
4345 89670522 : if (CALL_P (curr_insn))
4346 5951229 : no_output_reloads_p = true;
4347 :
4348 89670522 : n_operands = curr_static_id->n_operands;
4349 89670522 : n_alternatives = curr_static_id->n_alternatives;
4350 :
4351 : /* Just return "no reloads" if insn has no operands with
4352 : constraints. */
4353 89670522 : if (n_operands == 0 || n_alternatives == 0)
4354 : return false;
4355 :
4356 79209003 : max_regno_before = max_reg_num ();
4357 :
4358 334975045 : for (i = 0; i < n_operands; i++)
4359 : {
4360 176557039 : goal_alt_matched[i][0] = -1;
4361 176557039 : goal_alt_matches[i] = -1;
4362 : }
4363 :
4364 79209003 : commutative = curr_static_id->commutative;
4365 :
4366 : /* Now see what we need for pseudos that didn't get hard regs or got
4367 : the wrong kind of hard reg. For this, we must consider all the
4368 : operands together against the register constraints. */
4369 :
4370 79209003 : best_losers = best_overall = INT_MAX;
4371 79209003 : best_reload_sum = 0;
4372 :
4373 79209003 : curr_swapped = false;
4374 79209003 : goal_alt_swapped = false;
4375 :
4376 79209003 : if (! check_only_p)
4377 : /* Make equivalence substitution and memory subreg elimination
4378 : before address processing because an address legitimacy can
4379 : depend on memory mode. */
4380 255692385 : for (i = 0; i < n_operands; i++)
4381 : {
4382 176503833 : rtx op, subst, old;
4383 176503833 : bool op_change_p = false;
4384 :
4385 176503833 : if (curr_static_id->operand[i].is_operator)
4386 1403641 : continue;
4387 :
4388 175100192 : old = op = *curr_id->operand_loc[i];
4389 175100192 : if (GET_CODE (old) == SUBREG)
4390 3594774 : old = SUBREG_REG (old);
4391 175100192 : subst = get_equiv_with_elimination (old, curr_insn);
4392 175100192 : original_subreg_reg_mode[i] = VOIDmode;
4393 175100192 : equiv_substition_p[i] = false;
4394 175100192 : if (subst != old)
4395 : {
4396 1520816 : equiv_substition_p[i] = true;
4397 1520816 : rtx new_subst = copy_rtx (subst);
4398 1520816 : if (lra_pointer_equiv_set_in (subst))
4399 772656 : lra_pointer_equiv_set_add (new_subst);
4400 1520816 : subst = new_subst;
4401 1520816 : lra_assert (REG_P (old));
4402 1520816 : if (GET_CODE (op) != SUBREG)
4403 1466856 : *curr_id->operand_loc[i] = subst;
4404 : else
4405 : {
4406 53960 : SUBREG_REG (op) = subst;
4407 53960 : if (GET_MODE (subst) == VOIDmode)
4408 90 : original_subreg_reg_mode[i] = GET_MODE (old);
4409 : }
4410 1520816 : if (lra_dump_file != NULL)
4411 : {
4412 3 : fprintf (lra_dump_file,
4413 : "Changing pseudo %d in operand %i of insn %u on equiv ",
4414 3 : REGNO (old), i, INSN_UID (curr_insn));
4415 3 : dump_value_slim (lra_dump_file, subst, 1);
4416 3 : fprintf (lra_dump_file, "\n");
4417 : }
4418 1520816 : op_change_p = change_p = true;
4419 : }
4420 175100192 : if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
4421 : {
4422 1521343 : change_p = true;
4423 1521343 : lra_update_dup (curr_id, i);
4424 : }
4425 : }
4426 :
4427 : /* Reload address registers and displacements. We do it before
4428 : finding an alternative because of memory constraints. */
4429 79209003 : before = after = NULL;
4430 255766042 : for (i = 0; i < n_operands; i++)
4431 176557039 : if (! curr_static_id->operand[i].is_operator
4432 176557039 : && process_address (i, check_only_p, &before, &after))
4433 : {
4434 490250 : if (check_only_p)
4435 : return true;
4436 490250 : change_p = true;
4437 490250 : lra_update_dup (curr_id, i);
4438 : }
4439 :
4440 79209003 : if (change_p)
4441 : /* If we've changed the instruction then any alternative that
4442 : we chose previously may no longer be valid. */
4443 1964559 : lra_set_used_insn_alternative (curr_insn, LRA_UNKNOWN_ALT);
4444 :
4445 79188552 : if (! check_only_p && curr_insn_set != NULL_RTX
4446 154612089 : && check_and_process_move (&change_p, &sec_mem_p))
4447 0 : return change_p;
4448 :
4449 79209003 : try_swapped:
4450 :
4451 89633892 : reused_alternative_num = check_only_p ? LRA_UNKNOWN_ALT : curr_id->used_insn_alternative;
4452 89633892 : if (lra_dump_file != NULL && reused_alternative_num >= 0)
4453 0 : fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
4454 0 : reused_alternative_num, INSN_UID (curr_insn));
4455 :
4456 89633892 : if (process_alt_operands (reused_alternative_num))
4457 80980407 : alt_p = true;
4458 :
4459 89633892 : if (check_only_p)
4460 34984 : return ! alt_p || best_losers != 0;
4461 :
4462 : /* If insn is commutative (it's safe to exchange a certain pair of
4463 : operands) then we need to try each alternative twice, the second
4464 : time matching those two operands as if we had exchanged them. To
4465 : do this, really exchange them in operands.
4466 :
4467 : If we have just tried the alternatives the second time, return
4468 : operands to normal and drop through. */
4469 :
4470 89613441 : if (reused_alternative_num < 0 && commutative >= 0)
4471 : {
4472 20849778 : curr_swapped = !curr_swapped;
4473 20849778 : if (curr_swapped)
4474 : {
4475 10424889 : swap_operands (commutative);
4476 10424889 : goto try_swapped;
4477 : }
4478 : else
4479 10424889 : swap_operands (commutative);
4480 : }
4481 :
4482 79188552 : if (! alt_p && ! sec_mem_p)
4483 : {
4484 : /* No alternative works with reloads?? */
4485 6 : if (INSN_CODE (curr_insn) >= 0)
4486 0 : fatal_insn ("unable to generate reloads for:", curr_insn);
4487 6 : error_for_asm (curr_insn,
4488 : "inconsistent operand constraints in an %<asm%>");
4489 6 : lra_asm_error_p = true;
4490 6 : if (! JUMP_P (curr_insn))
4491 : {
4492 : /* Avoid further trouble with this insn. Don't generate use
4493 : pattern here as we could use the insn SP offset. */
4494 6 : lra_set_insn_deleted (curr_insn);
4495 : }
4496 : else
4497 : {
4498 0 : lra_invalidate_insn_data (curr_insn);
4499 0 : ira_nullify_asm_goto (curr_insn);
4500 0 : lra_update_insn_regno_info (curr_insn);
4501 : }
4502 6 : return true;
4503 : }
4504 :
4505 : /* If the best alternative is with operands 1 and 2 swapped, swap
4506 : them. Update the operand numbers of any reloads already
4507 : pushed. */
4508 :
4509 79188546 : if (goal_alt_swapped)
4510 : {
4511 601990 : if (lra_dump_file != NULL)
4512 18 : fprintf (lra_dump_file, " Commutative operand exchange in insn %u\n",
4513 18 : INSN_UID (curr_insn));
4514 :
4515 : /* Swap the duplicates too. */
4516 601990 : swap_operands (commutative);
4517 601990 : change_p = true;
4518 : }
4519 :
4520 : /* Some targets' TARGET_SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
4521 : too conservatively. So we use the secondary memory only if there
4522 : is no any alternative without reloads. */
4523 79188546 : use_sec_mem_p = false;
4524 79188546 : if (! alt_p)
4525 : use_sec_mem_p = true;
4526 79188546 : else if (sec_mem_p)
4527 : {
4528 15089 : for (i = 0; i < n_operands; i++)
4529 14917 : if (! goal_alt_win[i] && ! goal_alt_match_win[i])
4530 : break;
4531 13376 : use_sec_mem_p = i < n_operands;
4532 : }
4533 :
4534 13376 : if (use_sec_mem_p)
4535 : {
4536 13204 : int in = -1, out = -1;
4537 13204 : rtx new_reg, src, dest, rld;
4538 13204 : machine_mode sec_mode, rld_mode;
4539 :
4540 13204 : lra_assert (curr_insn_set != NULL_RTX && sec_mem_p);
4541 13204 : dest = SET_DEST (curr_insn_set);
4542 13204 : src = SET_SRC (curr_insn_set);
4543 39612 : for (i = 0; i < n_operands; i++)
4544 26408 : if (*curr_id->operand_loc[i] == dest)
4545 : out = i;
4546 13204 : else if (*curr_id->operand_loc[i] == src)
4547 13204 : in = i;
4548 13204 : for (i = 0; i < curr_static_id->n_dups; i++)
4549 0 : if (out < 0 && *curr_id->dup_loc[i] == dest)
4550 0 : out = curr_static_id->dup_num[i];
4551 0 : else if (in < 0 && *curr_id->dup_loc[i] == src)
4552 0 : in = curr_static_id->dup_num[i];
4553 13204 : lra_assert (out >= 0 && in >= 0
4554 : && curr_static_id->operand[out].type == OP_OUT
4555 : && curr_static_id->operand[in].type == OP_IN);
4556 13204 : rld = partial_subreg_p (GET_MODE (src), GET_MODE (dest)) ? src : dest;
4557 13204 : rld_mode = GET_MODE (rld);
4558 13204 : sec_mode = targetm.secondary_memory_needed_mode (rld_mode);
4559 13204 : if (rld_mode != sec_mode
4560 13204 : && (invalid_mode_reg_p (sec_mode, dest)
4561 17 : || invalid_mode_reg_p (sec_mode, src)))
4562 : sec_mode = rld_mode;
4563 13204 : new_reg = lra_create_new_reg (sec_mode, NULL_RTX, NO_REGS, NULL,
4564 : "secondary");
4565 : /* If the mode is changed, it should be wider. */
4566 13204 : lra_assert (!partial_subreg_p (sec_mode, rld_mode));
4567 13204 : if (sec_mode != rld_mode)
4568 : {
4569 : /* If the target says specifically to use another mode for
4570 : secondary memory moves we cannot reuse the original
4571 : insn. */
4572 17 : after = emit_spill_move (false, new_reg, dest);
4573 17 : lra_process_new_insns (curr_insn, NULL, after,
4574 : "Inserting the sec. move");
4575 : /* We may have non null BEFORE here (e.g. after address
4576 : processing. */
4577 17 : push_to_sequence (before);
4578 17 : before = emit_spill_move (true, new_reg, src);
4579 17 : emit_insn (before);
4580 17 : before = end_sequence ();
4581 17 : lra_process_new_insns (curr_insn, before, NULL, "Changing on");
4582 17 : lra_set_insn_deleted (curr_insn);
4583 : }
4584 13187 : else if (dest == rld)
4585 : {
4586 13187 : *curr_id->operand_loc[out] = new_reg;
4587 13187 : lra_update_dup (curr_id, out);
4588 13187 : after = emit_spill_move (false, new_reg, dest);
4589 13187 : lra_process_new_insns (curr_insn, NULL, after,
4590 : "Inserting the sec. move");
4591 : }
4592 : else
4593 : {
4594 0 : *curr_id->operand_loc[in] = new_reg;
4595 0 : lra_update_dup (curr_id, in);
4596 : /* See comments above. */
4597 0 : push_to_sequence (before);
4598 0 : before = emit_spill_move (true, new_reg, src);
4599 0 : emit_insn (before);
4600 0 : before = end_sequence ();
4601 0 : lra_process_new_insns (curr_insn, before, NULL,
4602 : "Inserting the sec. move");
4603 : }
4604 13204 : lra_update_insn_regno_info (curr_insn);
4605 13204 : return true;
4606 : }
4607 :
4608 79175342 : lra_assert (goal_alt_number >= 0);
4609 158256976 : lra_set_used_insn_alternative (curr_insn, goal_reuse_alt_p
4610 : ? goal_alt_number : LRA_UNKNOWN_ALT);
4611 :
4612 79175342 : if (lra_dump_file != NULL)
4613 : {
4614 1187 : const char *p;
4615 :
4616 1187 : fprintf (lra_dump_file, " Choosing alt %d in insn %u:",
4617 1187 : goal_alt_number, INSN_UID (curr_insn));
4618 1187 : print_curr_insn_alt (goal_alt_number);
4619 1187 : if (INSN_CODE (curr_insn) >= 0
4620 1187 : && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
4621 1180 : fprintf (lra_dump_file, " {%s}", p);
4622 1187 : if (maybe_ne (curr_id->sp_offset, 0))
4623 : {
4624 0 : fprintf (lra_dump_file, " (sp_off=");
4625 0 : print_dec (curr_id->sp_offset, lra_dump_file);
4626 0 : fprintf (lra_dump_file, ")");
4627 : }
4628 1187 : fprintf (lra_dump_file, "\n");
4629 : }
4630 :
4631 : /* Right now, for any pair of operands I and J that are required to
4632 : match, with J < I, goal_alt_matches[I] is J. Add I to
4633 : goal_alt_matched[J]. */
4634 :
4635 255652753 : for (i = 0; i < n_operands; i++)
4636 176477411 : if ((j = goal_alt_matches[i]) >= 0)
4637 : {
4638 10459453 : for (k = 0; goal_alt_matched[j][k] >= 0; k++)
4639 : ;
4640 : /* We allow matching one output operand and several input
4641 : operands. */
4642 10459452 : lra_assert (k == 0
4643 : || (curr_static_id->operand[j].type == OP_OUT
4644 : && curr_static_id->operand[i].type == OP_IN
4645 : && (curr_static_id->operand
4646 : [goal_alt_matched[j][0]].type == OP_IN)));
4647 10459452 : goal_alt_matched[j][k] = i;
4648 10459452 : goal_alt_matched[j][k + 1] = -1;
4649 : }
4650 :
4651 255652753 : for (i = 0; i < n_operands; i++)
4652 176477411 : goal_alt_win[i] |= goal_alt_match_win[i];
4653 :
4654 : /* Any constants that aren't allowed and can't be reloaded into
4655 : registers are here changed into memory references. */
4656 255652753 : for (i = 0; i < n_operands; i++)
4657 176477411 : if (goal_alt_win[i])
4658 : {
4659 170430211 : int regno;
4660 170430211 : enum reg_class new_class;
4661 170430211 : rtx reg = *curr_id->operand_loc[i];
4662 :
4663 170430211 : if (GET_CODE (reg) == SUBREG)
4664 3292648 : reg = SUBREG_REG (reg);
4665 :
4666 170430211 : if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
4667 : {
4668 77402513 : bool ok_p = in_class_p (reg, goal_alt[i], &new_class, true);
4669 :
4670 77402513 : if (new_class != NO_REGS && get_reg_class (regno) != new_class)
4671 : {
4672 3498282 : lra_assert (ok_p);
4673 3498282 : lra_change_class (regno, new_class, " Change to", true);
4674 : }
4675 : }
4676 : }
4677 : else
4678 : {
4679 6047200 : const char *constraint;
4680 6047200 : char c;
4681 6047200 : rtx op = *curr_id->operand_loc[i];
4682 6047200 : rtx subreg = NULL_RTX;
4683 6047200 : machine_mode mode = curr_operand_mode[i];
4684 :
4685 6047200 : if (GET_CODE (op) == SUBREG)
4686 : {
4687 238174 : subreg = op;
4688 238174 : op = SUBREG_REG (op);
4689 238174 : mode = GET_MODE (op);
4690 : }
4691 :
4692 6264131 : if (CONST_POOL_OK_P (mode, op)
4693 6264131 : && ((targetm.preferred_reload_class
4694 216931 : (op, (enum reg_class) goal_alt[i]) == NO_REGS)
4695 71272 : || no_input_reloads_p))
4696 : {
4697 145659 : rtx tem = force_const_mem (mode, op);
4698 :
4699 145659 : change_p = true;
4700 145659 : if (subreg != NULL_RTX)
4701 0 : tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
4702 :
4703 145659 : *curr_id->operand_loc[i] = tem;
4704 145659 : lra_update_dup (curr_id, i);
4705 145659 : process_address (i, false, &before, &after);
4706 :
4707 : /* If the alternative accepts constant pool refs directly
4708 : there will be no reload needed at all. */
4709 145659 : if (subreg != NULL_RTX)
4710 0 : continue;
4711 : /* Skip alternatives before the one requested. */
4712 145659 : constraint = (curr_static_id->operand_alternative
4713 145659 : [goal_alt_number * n_operands + i].constraint);
4714 145659 : for (;
4715 246984 : (c = *constraint) && c != ',' && c != '#';
4716 101325 : constraint += CONSTRAINT_LEN (c, constraint))
4717 : {
4718 200029 : enum constraint_num cn = lookup_constraint (constraint);
4719 200029 : if ((insn_extra_memory_constraint (cn)
4720 101456 : || insn_extra_special_memory_constraint (cn)
4721 : || insn_extra_relaxed_memory_constraint (cn))
4722 200160 : && satisfies_memory_constraint_p (tem, cn))
4723 : break;
4724 : }
4725 145659 : if (c == '\0' || c == ',' || c == '#')
4726 46955 : continue;
4727 :
4728 98704 : goal_alt_win[i] = true;
4729 : }
4730 : }
4731 :
4732 : n_outputs = 0;
4733 255652753 : for (i = 0; i < n_operands; i++)
4734 176477411 : if (curr_static_id->operand[i].type == OP_OUT)
4735 68717127 : outputs[n_outputs++] = i;
4736 79175342 : outputs[n_outputs] = -1;
4737 255652753 : for (i = 0; i < n_operands; i++)
4738 : {
4739 176477411 : int regno;
4740 176477411 : bool optional_p = false;
4741 176477411 : rtx old, new_reg;
4742 176477411 : rtx op = *curr_id->operand_loc[i];
4743 :
4744 176477411 : if (goal_alt_win[i])
4745 : {
4746 170528915 : if (goal_alt[i] == NO_REGS
4747 46451092 : && REG_P (op)
4748 5366056 : && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
4749 : /* We assigned a hard register to the pseudo in the past but now
4750 : decided to spill it for the insn. If the pseudo is used only
4751 : in this insn, it is better to spill it here as we free hard
4752 : registers for other pseudos referenced in the insn. The most
4753 : common case of this is a scratch register which will be
4754 : transformed to scratch back at the end of LRA. */
4755 173427189 : && !multiple_insn_refs_p (regno))
4756 : {
4757 11256 : if (lra_get_allocno_class (regno) != NO_REGS)
4758 5296 : lra_change_class (regno, NO_REGS, " Change to", true);
4759 5628 : reg_renumber[regno] = -1;
4760 : }
4761 : /* We can do an optional reload. If the pseudo got a hard
4762 : reg, we might improve the code through inheritance. If
4763 : it does not get a hard register we coalesce memory/memory
4764 : moves later. Ignore move insns to avoid cycling. */
4765 170528915 : if (! lra_simple_p
4766 169984237 : && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
4767 157562321 : && goal_alt[i] != NO_REGS && REG_P (op)
4768 78320047 : && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
4769 65576364 : && regno < new_regno_start
4770 60845451 : && ! ira_former_scratch_p (regno)
4771 60790572 : && reg_renumber[regno] < 0
4772 : /* Check that the optional reload pseudo will be able to
4773 : hold given mode value. */
4774 3857732 : && ! (prohibited_class_reg_set_mode_p
4775 3857732 : (goal_alt[i], reg_class_contents[goal_alt[i]],
4776 3857732 : PSEUDO_REGNO_MODE (regno)))
4777 174386637 : && (curr_insn_set == NULL_RTX
4778 3850604 : || !((REG_P (SET_SRC (curr_insn_set))
4779 : || MEM_P (SET_SRC (curr_insn_set))
4780 : || GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
4781 3207238 : && (REG_P (SET_DEST (curr_insn_set))
4782 : || MEM_P (SET_DEST (curr_insn_set))
4783 : || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
4784 : optional_p = true;
4785 169878389 : else if (goal_alt_matched[i][0] != -1
4786 8776756 : && curr_static_id->operand[i].type == OP_OUT
4787 8775620 : && (curr_static_id->operand_alternative
4788 8775620 : [goal_alt_number * n_operands + i].earlyclobber)
4789 18999 : && REG_P (op))
4790 : {
4791 24019 : for (j = 0; goal_alt_matched[i][j] != -1; j++)
4792 : {
4793 18946 : rtx op2 = *curr_id->operand_loc[goal_alt_matched[i][j]];
4794 :
4795 18946 : if (REG_P (op2) && REGNO (op) != REGNO (op2))
4796 : break;
4797 : }
4798 18946 : if (goal_alt_matched[i][j] != -1)
4799 : {
4800 : /* Generate reloads for different output and matched
4801 : input registers. This is the easiest way to avoid
4802 : creation of non-existing register conflicts in
4803 : lra-lives.cc. */
4804 13873 : match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
4805 : &goal_alt_exclude_start_hard_regs[i], &before,
4806 : &after, true);
4807 : }
4808 171464711 : continue;
4809 18946 : }
4810 : else
4811 : {
4812 169859443 : enum reg_class rclass, common_class;
4813 :
4814 89064511 : if (REG_P (op) && goal_alt[i] != NO_REGS
4815 83698455 : && (regno = REGNO (op)) >= new_regno_start
4816 4740252 : && (rclass = get_reg_class (regno)) == ALL_REGS
4817 0 : && ((common_class = ira_reg_class_subset[rclass][goal_alt[i]])
4818 : != NO_REGS)
4819 0 : && common_class != ALL_REGS
4820 169859443 : && enough_allocatable_hard_regs_p (common_class,
4821 0 : GET_MODE (op)))
4822 : /* Refine reload pseudo class from chosen alternative
4823 : constraint. */
4824 0 : lra_change_class (regno, common_class, " Change to", true);
4825 169859443 : continue;
4826 169859443 : }
4827 : }
4828 :
4829 : /* Operands that match previous ones have already been handled. */
4830 6599022 : if (goal_alt_matches[i] >= 0)
4831 1586322 : continue;
4832 :
4833 : /* We should not have an operand with a non-offsettable address
4834 : appearing where an offsettable address will do. It also may
4835 : be a case when the address should be special in other words
4836 : not a general one (e.g. it needs no index reg). */
4837 5012700 : if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
4838 : {
4839 72 : enum reg_class rclass;
4840 72 : rtx *loc = &XEXP (op, 0);
4841 72 : enum rtx_code code = GET_CODE (*loc);
4842 :
4843 72 : push_to_sequence (before);
4844 72 : rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
4845 : MEM, SCRATCH, curr_insn);
4846 72 : if (GET_RTX_CLASS (code) == RTX_AUTOINC)
4847 0 : new_reg = emit_inc (rclass, *loc,
4848 : /* This value does not matter for MODIFY. */
4849 0 : GET_MODE_SIZE (GET_MODE (op)));
4850 86 : else if (get_reload_reg (OP_IN, Pmode, *loc, rclass,
4851 : NULL, false, false,
4852 : "offsetable address", &new_reg))
4853 : {
4854 72 : rtx addr = *loc;
4855 72 : enum rtx_code code = GET_CODE (addr);
4856 72 : bool align_p = false;
4857 :
4858 72 : if (code == AND && CONST_INT_P (XEXP (addr, 1)))
4859 : {
4860 : /* (and ... (const_int -X)) is used to align to X bytes. */
4861 0 : align_p = true;
4862 0 : addr = XEXP (*loc, 0);
4863 : }
4864 : else
4865 72 : addr = canonicalize_reload_addr (addr);
4866 :
4867 72 : lra_emit_move (new_reg, addr);
4868 72 : if (align_p)
4869 0 : emit_move_insn (new_reg, gen_rtx_AND (GET_MODE (new_reg), new_reg, XEXP (*loc, 1)));
4870 : }
4871 72 : before = end_sequence ();
4872 72 : *loc = new_reg;
4873 72 : lra_update_dup (curr_id, i);
4874 72 : }
4875 5012628 : else if (goal_alt_matched[i][0] == -1)
4876 : {
4877 3329933 : machine_mode mode;
4878 3329933 : rtx reg, *loc;
4879 3329933 : int hard_regno;
4880 3329933 : enum op_type type = curr_static_id->operand[i].type;
4881 :
4882 3329933 : loc = curr_id->operand_loc[i];
4883 3329933 : mode = curr_operand_mode[i];
4884 3329933 : if (GET_CODE (*loc) == SUBREG)
4885 : {
4886 74261 : reg = SUBREG_REG (*loc);
4887 74261 : poly_int64 byte = SUBREG_BYTE (*loc);
4888 74261 : if (REG_P (reg)
4889 : /* Strict_low_part requires reloading the register and not
4890 : just the subreg. Likewise for a strict subreg no wider
4891 : than a word for WORD_REGISTER_OPERATIONS targets. */
4892 74261 : && (curr_static_id->operand[i].strict_low
4893 74194 : || (!paradoxical_subreg_p (mode, GET_MODE (reg))
4894 71596 : && (hard_regno
4895 71596 : = get_try_hard_regno (REGNO (reg))) >= 0
4896 70077 : && (simplify_subreg_regno
4897 144338 : (hard_regno,
4898 70077 : GET_MODE (reg), byte, mode) < 0)
4899 0 : && (goal_alt[i] == NO_REGS
4900 0 : || (simplify_subreg_regno
4901 74261 : (ira_class_hard_regs[goal_alt[i]][0],
4902 0 : GET_MODE (reg), byte, mode) >= 0)))
4903 74194 : || (partial_subreg_p (mode, GET_MODE (reg))
4904 74194 : && known_le (GET_MODE_SIZE (GET_MODE (reg)),
4905 : UNITS_PER_WORD)
4906 : && WORD_REGISTER_OPERATIONS))
4907 : /* Avoid the situation when there are no available hard regs
4908 : for the pseudo mode but there are ones for the subreg
4909 : mode: */
4910 74328 : && !(goal_alt[i] != NO_REGS
4911 67 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
4912 67 : && (prohibited_class_reg_set_mode_p
4913 67 : (goal_alt[i], reg_class_contents[goal_alt[i]],
4914 67 : GET_MODE (reg)))
4915 : && !(prohibited_class_reg_set_mode_p
4916 0 : (goal_alt[i], reg_class_contents[goal_alt[i]],
4917 : mode))))
4918 : {
4919 : /* An OP_INOUT is required when reloading a subreg of a
4920 : mode wider than a word to ensure that data beyond the
4921 : word being reloaded is preserved. Also automatically
4922 : ensure that strict_low_part reloads are made into
4923 : OP_INOUT which should already be true from the backend
4924 : constraints. */
4925 67 : if (type == OP_OUT
4926 67 : && (curr_static_id->operand[i].strict_low
4927 0 : || read_modify_subreg_p (*loc)))
4928 : type = OP_INOUT;
4929 67 : loc = &SUBREG_REG (*loc);
4930 67 : mode = GET_MODE (*loc);
4931 : }
4932 : }
4933 3329933 : old = *loc;
4934 3329933 : if (get_reload_reg (type, mode, old, goal_alt[i],
4935 : &goal_alt_exclude_start_hard_regs[i],
4936 3329933 : loc != curr_id->operand_loc[i],
4937 3329933 : curr_static_id->operand_alternative
4938 3329933 : [goal_alt_number * n_operands + i].earlyclobber,
4939 : "", &new_reg)
4940 3329933 : && type != OP_OUT)
4941 : {
4942 2361726 : push_to_sequence (before);
4943 2361726 : lra_emit_move (new_reg, old);
4944 2361726 : before = end_sequence ();
4945 : }
4946 3329933 : *loc = new_reg;
4947 3329933 : if (type != OP_IN
4948 967114 : && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX
4949 : /* OLD can be an equivalent constant here. */
4950 942504 : && !CONSTANT_P (old)
4951 : /* No need to write back anything for a scratch. */
4952 942504 : && GET_CODE (old) != SCRATCH
4953 4272437 : && (!REG_P(old) || !ira_former_scratch_p (REGNO (old))))
4954 : {
4955 942504 : start_sequence ();
4956 942504 : lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
4957 942504 : emit_insn (after);
4958 942504 : after = end_sequence ();
4959 942504 : *loc = new_reg;
4960 : }
4961 3329933 : for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
4962 619 : if (goal_alt_dont_inherit_ops[j] == i)
4963 : {
4964 619 : lra_set_regno_unique_value (REGNO (new_reg));
4965 619 : break;
4966 : }
4967 3329933 : lra_update_dup (curr_id, i);
4968 : }
4969 1682695 : else if (curr_static_id->operand[i].type == OP_IN
4970 1682695 : && (curr_static_id->operand[goal_alt_matched[i][0]].type
4971 : == OP_OUT
4972 0 : || (curr_static_id->operand[goal_alt_matched[i][0]].type
4973 : == OP_INOUT
4974 0 : && (operands_match_p
4975 0 : (*curr_id->operand_loc[i],
4976 0 : *curr_id->operand_loc[goal_alt_matched[i][0]],
4977 : -1)))))
4978 : {
4979 : /* generate reloads for input and matched outputs. */
4980 15190 : match_inputs[0] = i;
4981 15190 : match_inputs[1] = -1;
4982 15190 : match_reload (goal_alt_matched[i][0], match_inputs, outputs,
4983 : goal_alt[i], &goal_alt_exclude_start_hard_regs[i],
4984 : &before, &after,
4985 15190 : curr_static_id->operand_alternative
4986 15190 : [goal_alt_number * n_operands + goal_alt_matched[i][0]]
4987 15190 : .earlyclobber);
4988 : }
4989 1667505 : else if ((curr_static_id->operand[i].type == OP_OUT
4990 0 : || (curr_static_id->operand[i].type == OP_INOUT
4991 0 : && (operands_match_p
4992 0 : (*curr_id->operand_loc[i],
4993 0 : *curr_id->operand_loc[goal_alt_matched[i][0]],
4994 : -1))))
4995 1667505 : && (curr_static_id->operand[goal_alt_matched[i][0]].type
4996 : == OP_IN))
4997 : /* Generate reloads for output and matched inputs. */
4998 1667505 : match_reload (i, goal_alt_matched[i], outputs, goal_alt[i],
4999 : &goal_alt_exclude_start_hard_regs[i], &before, &after,
5000 1667505 : curr_static_id->operand_alternative
5001 1667505 : [goal_alt_number * n_operands + i].earlyclobber);
5002 0 : else if (curr_static_id->operand[i].type == OP_IN
5003 0 : && (curr_static_id->operand[goal_alt_matched[i][0]].type
5004 : == OP_IN))
5005 : {
5006 : /* Generate reloads for matched inputs. */
5007 0 : match_inputs[0] = i;
5008 0 : for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
5009 0 : match_inputs[j + 1] = k;
5010 0 : match_inputs[j + 1] = -1;
5011 0 : match_reload (-1, match_inputs, outputs, goal_alt[i],
5012 : &goal_alt_exclude_start_hard_regs[i],
5013 : &before, &after, false);
5014 : }
5015 : else
5016 : /* We must generate code in any case when function
5017 : process_alt_operands decides that it is possible. */
5018 0 : gcc_unreachable ();
5019 :
5020 5012700 : if (optional_p)
5021 : {
5022 650526 : rtx reg = op;
5023 :
5024 650526 : lra_assert (REG_P (reg));
5025 650526 : regno = REGNO (reg);
5026 650526 : op = *curr_id->operand_loc[i]; /* Substitution. */
5027 650526 : if (GET_CODE (op) == SUBREG)
5028 0 : op = SUBREG_REG (op);
5029 650526 : gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
5030 650526 : bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
5031 650526 : lra_reg_info[REGNO (op)].restore_rtx = reg;
5032 650526 : if (lra_dump_file != NULL)
5033 3 : fprintf (lra_dump_file,
5034 : " Making reload reg %d for reg %d optional\n",
5035 : REGNO (op), regno);
5036 : }
5037 : }
5038 74886915 : if (before != NULL_RTX || after != NULL_RTX
5039 153256998 : || max_regno_before != max_reg_num ())
5040 5118707 : change_p = true;
5041 79175342 : if (change_p)
5042 : {
5043 6062414 : lra_update_operator_dups (curr_id);
5044 : /* Something changes -- process the insn. */
5045 6062414 : lra_update_insn_regno_info (curr_insn);
5046 6062414 : if (asm_noperands (PATTERN (curr_insn)) >= 0
5047 6062414 : && ++curr_id->asm_reloads_num >= FIRST_PSEUDO_REGISTER)
5048 : /* Most probably there are no enough registers to satisfy asm insn: */
5049 : {
5050 11 : lra_asm_insn_error (curr_insn);
5051 11 : return change_p;
5052 : }
5053 : }
5054 79175331 : if (goal_alt_out_sp_reload_p)
5055 : {
5056 : /* We have an output stack pointer reload -- update sp offset: */
5057 0 : rtx set;
5058 0 : bool done_p = false;
5059 0 : poly_int64 sp_offset = curr_id->sp_offset;
5060 0 : for (rtx_insn *insn = after; insn != NULL_RTX; insn = NEXT_INSN (insn))
5061 0 : if ((set = single_set (insn)) != NULL_RTX
5062 0 : && SET_DEST (set) == stack_pointer_rtx)
5063 : {
5064 0 : lra_assert (!done_p);
5065 0 : done_p = true;
5066 0 : curr_id->sp_offset = 0;
5067 0 : lra_insn_recog_data_t id = lra_get_insn_recog_data (insn);
5068 0 : id->sp_offset = sp_offset;
5069 0 : if (lra_dump_file != NULL)
5070 0 : fprintf (lra_dump_file,
5071 : " Moving sp offset from insn %u to %u\n",
5072 0 : INSN_UID (curr_insn), INSN_UID (insn));
5073 : }
5074 0 : lra_assert (done_p);
5075 : }
5076 79175331 : int const_regno = -1;
5077 79175331 : rtx set;
5078 79175331 : rtx_insn *prev, *const_insn = NULL;
5079 4288422 : if (before != NULL_RTX && (prev = PREV_INSN (curr_insn)) != NULL_RTX
5080 83463753 : && (set = single_set (prev)) != NULL_RTX && CONSTANT_P (SET_SRC (set)))
5081 : {
5082 323106 : rtx reg = SET_DEST (set);
5083 323106 : if (GET_CODE (reg) == SUBREG)
5084 8797 : reg = SUBREG_REG (reg);
5085 : /* Consider only reload insns as we don't want to change the order
5086 : created by previous optimizations. */
5087 234749 : if (REG_P (reg) && (int) REGNO (reg) >= lra_new_regno_start
5088 323899 : && bitmap_bit_p (&lra_reg_info[REGNO (reg)].insn_bitmap,
5089 793 : INSN_UID (curr_insn)))
5090 : {
5091 317 : const_regno = REGNO (reg);
5092 317 : const_insn = prev;
5093 : }
5094 : }
5095 79175331 : lra_process_new_insns (curr_insn, before, after,
5096 : "Inserting insn reload", true);
5097 79175331 : if (const_regno >= 0) {
5098 634 : bool move_p = true;
5099 634 : for (rtx_insn *insn = before; insn != curr_insn; insn = NEXT_INSN (insn))
5100 317 : if (bitmap_bit_p (&lra_reg_info[const_regno].insn_bitmap,
5101 317 : INSN_UID (insn)))
5102 : {
5103 : move_p = false;
5104 : break;
5105 : }
5106 317 : if (move_p)
5107 : {
5108 317 : reorder_insns_nobb (const_insn, const_insn, PREV_INSN (curr_insn));
5109 317 : if (lra_dump_file != NULL)
5110 : {
5111 0 : dump_insn_slim (lra_dump_file, const_insn);
5112 0 : fprintf (lra_dump_file,
5113 : " to decrease reg pressure, it is moved before:\n");
5114 0 : dump_insn_slim (lra_dump_file, curr_insn);
5115 : }
5116 : }
5117 : }
5118 : return change_p;
5119 : }
5120 :
5121 : /* Return true if INSN satisfies all constraints. In other words, no
5122 : reload insns are needed. */
5123 : bool
5124 3360 : lra_constrain_insn (rtx_insn *insn)
5125 : {
5126 3360 : int saved_new_regno_start = new_regno_start;
5127 3360 : int saved_new_insn_uid_start = new_insn_uid_start;
5128 3360 : bool change_p;
5129 :
5130 3360 : curr_insn = insn;
5131 3360 : curr_id = lra_get_insn_recog_data (curr_insn);
5132 3360 : curr_static_id = curr_id->insn_static_data;
5133 3360 : new_insn_uid_start = get_max_uid ();
5134 3360 : new_regno_start = max_reg_num ();
5135 3360 : change_p = curr_insn_transform (true);
5136 3360 : new_regno_start = saved_new_regno_start;
5137 3360 : new_insn_uid_start = saved_new_insn_uid_start;
5138 3360 : return ! change_p;
5139 : }
5140 :
5141 : /* Return true if X is in LIST. */
5142 : static bool
5143 1323884 : in_list_p (rtx x, rtx list)
5144 : {
5145 2260607 : for (; list != NULL_RTX; list = XEXP (list, 1))
5146 1244967 : if (XEXP (list, 0) == x)
5147 : return true;
5148 : return false;
5149 : }
5150 :
5151 : /* Return true if X contains an allocatable hard register (if
5152 : HARD_REG_P) or a (spilled if SPILLED_P) pseudo. */
5153 : static bool
5154 7402431 : contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
5155 : {
5156 7402431 : int i, j;
5157 7402431 : const char *fmt;
5158 7402431 : enum rtx_code code;
5159 :
5160 7402431 : code = GET_CODE (x);
5161 7402431 : if (REG_P (x))
5162 : {
5163 1511166 : int regno = REGNO (x);
5164 1511166 : HARD_REG_SET alloc_regs;
5165 :
5166 1511166 : if (hard_reg_p)
5167 : {
5168 474251 : if (regno >= FIRST_PSEUDO_REGISTER)
5169 142190 : regno = lra_get_regno_hard_regno (regno);
5170 474251 : if (regno < 0)
5171 : return false;
5172 474251 : alloc_regs = ~lra_no_alloc_regs;
5173 474251 : return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
5174 : }
5175 : else
5176 : {
5177 1036915 : if (regno < FIRST_PSEUDO_REGISTER)
5178 : return false;
5179 342028 : if (! spilled_p)
5180 : return true;
5181 181033 : return lra_get_regno_hard_regno (regno) < 0;
5182 : }
5183 : }
5184 5891265 : fmt = GET_RTX_FORMAT (code);
5185 14563938 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5186 : {
5187 9267486 : if (fmt[i] == 'e')
5188 : {
5189 4116064 : if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
5190 : return true;
5191 : }
5192 5151422 : else if (fmt[i] == 'E')
5193 : {
5194 1235873 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5195 1130595 : if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
5196 : return true;
5197 : }
5198 : }
5199 : return false;
5200 : }
5201 :
5202 : /* Process all regs in location *LOC and change them on equivalent
5203 : substitution. Return true if any change was done. */
5204 : static bool
5205 3353 : loc_equivalence_change_p (rtx *loc)
5206 : {
5207 3353 : rtx subst, reg, x = *loc;
5208 3353 : bool result = false;
5209 3353 : enum rtx_code code = GET_CODE (x);
5210 3353 : const char *fmt;
5211 3353 : int i, j;
5212 :
5213 3353 : if (code == SUBREG)
5214 : {
5215 20 : reg = SUBREG_REG (x);
5216 20 : if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
5217 20 : && GET_MODE (subst) == VOIDmode)
5218 : {
5219 : /* We cannot reload debug location. Simplify subreg here
5220 : while we know the inner mode. */
5221 0 : *loc = simplify_gen_subreg (GET_MODE (x), subst,
5222 0 : GET_MODE (reg), SUBREG_BYTE (x));
5223 0 : return true;
5224 : }
5225 : }
5226 3353 : if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
5227 : {
5228 8 : *loc = subst;
5229 8 : return true;
5230 : }
5231 :
5232 : /* Scan all the operand sub-expressions. */
5233 3345 : fmt = GET_RTX_FORMAT (code);
5234 8180 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5235 : {
5236 4835 : if (fmt[i] == 'e')
5237 2581 : result = loc_equivalence_change_p (&XEXP (x, i)) || result;
5238 2254 : else if (fmt[i] == 'E')
5239 270 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5240 200 : result
5241 210 : = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
5242 : }
5243 : return result;
5244 : }
5245 :
5246 : /* Similar to loc_equivalence_change_p, but for use as
5247 : simplify_replace_fn_rtx callback. DATA is insn for which the
5248 : elimination is done. If it null we don't do the elimination. */
5249 : static rtx
5250 42451303 : loc_equivalence_callback (rtx loc, const_rtx, void *data)
5251 : {
5252 42451303 : if (!REG_P (loc))
5253 : return NULL_RTX;
5254 :
5255 10947675 : rtx subst = (data == NULL
5256 10947675 : ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
5257 10947675 : if (subst != loc)
5258 : return subst;
5259 :
5260 : return NULL_RTX;
5261 : }
5262 :
5263 : /* Maximum number of generated reload insns per an insn. It is for
5264 : preventing this pass cycling in a bug case. */
5265 : #define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
5266 :
5267 : /* The current iteration number of this LRA pass. */
5268 : int lra_constraint_iter;
5269 :
5270 : /* True if we should during assignment sub-pass check assignment
5271 : correctness for all pseudos and spill some of them to correct
5272 : conflicts. It can be necessary when we substitute equiv which
5273 : needs checking register allocation correctness because the
5274 : equivalent value contains allocatable hard registers, or when we
5275 : restore multi-register pseudo, or when we change the insn code and
5276 : its operand became INOUT operand when it was IN one before. */
5277 : bool check_and_force_assignment_correctness_p;
5278 :
5279 : /* Return true if REGNO is referenced in more than one block. */
5280 : static bool
5281 153401 : multi_block_pseudo_p (int regno)
5282 : {
5283 153401 : basic_block bb = NULL;
5284 153401 : unsigned int uid;
5285 153401 : bitmap_iterator bi;
5286 :
5287 153401 : if (regno < FIRST_PSEUDO_REGISTER)
5288 : return false;
5289 :
5290 469358 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
5291 320737 : if (bb == NULL)
5292 153401 : bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
5293 167336 : else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
5294 : return true;
5295 : return false;
5296 : }
5297 :
5298 : /* Return true if LIST contains a deleted insn. */
5299 : static bool
5300 719800 : contains_deleted_insn_p (rtx_insn_list *list)
5301 : {
5302 1375692 : for (; list != NULL_RTX; list = list->next ())
5303 655892 : if (NOTE_P (list->insn ())
5304 655892 : && NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
5305 : return true;
5306 : return false;
5307 : }
5308 :
5309 : /* Return true if X contains a pseudo dying in INSN. */
5310 : static bool
5311 2246025 : dead_pseudo_p (rtx x, rtx_insn *insn)
5312 : {
5313 2246025 : int i, j;
5314 2246025 : const char *fmt;
5315 2246025 : enum rtx_code code;
5316 :
5317 2246025 : if (REG_P (x))
5318 503338 : return (insn != NULL_RTX
5319 503338 : && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
5320 1742687 : code = GET_CODE (x);
5321 1742687 : fmt = GET_RTX_FORMAT (code);
5322 4461122 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5323 : {
5324 2723785 : if (fmt[i] == 'e')
5325 : {
5326 1341146 : if (dead_pseudo_p (XEXP (x, i), insn))
5327 : return true;
5328 : }
5329 1382639 : else if (fmt[i] == 'E')
5330 : {
5331 274058 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5332 250373 : if (dead_pseudo_p (XVECEXP (x, i, j), insn))
5333 : return true;
5334 : }
5335 : }
5336 : return false;
5337 : }
5338 :
5339 : /* Return true if INSN contains a dying pseudo in INSN right hand
5340 : side. */
5341 : static bool
5342 654506 : insn_rhs_dead_pseudo_p (rtx_insn *insn)
5343 : {
5344 654506 : rtx set = single_set (insn);
5345 :
5346 654506 : gcc_assert (set != NULL);
5347 654506 : return dead_pseudo_p (SET_SRC (set), insn);
5348 : }
5349 :
5350 : /* Return true if any init insn of REGNO contains a dying pseudo in
5351 : insn right hand side. */
5352 : static bool
5353 718414 : init_insn_rhs_dead_pseudo_p (int regno)
5354 : {
5355 718414 : rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
5356 :
5357 718414 : if (insns == NULL)
5358 : return false;
5359 1303555 : for (; insns != NULL_RTX; insns = insns->next ())
5360 654506 : if (insn_rhs_dead_pseudo_p (insns->insn ()))
5361 : return true;
5362 : return false;
5363 : }
5364 :
5365 : /* Return TRUE if REGNO has a reverse equivalence. The equivalence is
5366 : reverse only if we have one init insn with given REGNO as a
5367 : source. */
5368 : static bool
5369 719800 : reverse_equiv_p (int regno)
5370 : {
5371 719800 : rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
5372 719800 : rtx set;
5373 :
5374 719800 : if (insns == NULL)
5375 : return false;
5376 655892 : if (! INSN_P (insns->insn ())
5377 1311784 : || insns->next () != NULL)
5378 : return false;
5379 655892 : if ((set = single_set (insns->insn ())) == NULL_RTX)
5380 : return false;
5381 655892 : return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
5382 : }
5383 :
5384 : /* Return TRUE if REGNO was reloaded in an equivalence init insn. We
5385 : call this function only for non-reverse equivalence. */
5386 : static bool
5387 712957 : contains_reloaded_insn_p (int regno)
5388 : {
5389 712957 : rtx set;
5390 712957 : rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
5391 :
5392 1362006 : for (; list != NULL; list = list->next ())
5393 649049 : if ((set = single_set (list->insn ())) == NULL_RTX
5394 649049 : || ! REG_P (SET_DEST (set))
5395 1298098 : || (int) REGNO (SET_DEST (set)) != regno)
5396 : return true;
5397 : return false;
5398 : }
5399 :
5400 : /* Try combine secondary memory reload insn FROM for insn TO into TO insn.
5401 : FROM should be a load insn (usually a secondary memory reload insn). Return
5402 : TRUE in case of success. */
5403 : static bool
5404 7230301 : combine_reload_insn (rtx_insn *from, rtx_insn *to)
5405 : {
5406 7230301 : bool ok_p;
5407 7230301 : rtx_insn *saved_insn;
5408 7230301 : rtx set, from_reg, to_reg, op;
5409 7230301 : enum reg_class to_class, from_class;
5410 7230301 : int n, nop;
5411 7230301 : signed char changed_nops[MAX_RECOG_OPERANDS + 1];
5412 :
5413 : /* Check conditions for second memory reload and original insn: */
5414 7230301 : if ((targetm.secondary_memory_needed
5415 : == hook_bool_mode_reg_class_t_reg_class_t_false)
5416 7230301 : || NEXT_INSN (from) != to
5417 4296429 : || !NONDEBUG_INSN_P (to)
5418 11526730 : || CALL_P (to))
5419 : return false;
5420 :
5421 4291020 : lra_insn_recog_data_t id = lra_get_insn_recog_data (to);
5422 4291020 : struct lra_static_insn_data *static_id = id->insn_static_data;
5423 :
5424 4291020 : if (id->used_insn_alternative == LRA_UNKNOWN_ALT
5425 4291020 : || (set = single_set (from)) == NULL_RTX)
5426 31284 : return false;
5427 4259736 : from_reg = SET_DEST (set);
5428 4259736 : to_reg = SET_SRC (set);
5429 : /* Ignore optional reloads: */
5430 4180195 : if (! REG_P (from_reg) || ! REG_P (to_reg)
5431 7114901 : || bitmap_bit_p (&lra_optional_reload_pseudos, REGNO (from_reg)))
5432 1957317 : return false;
5433 2302419 : to_class = lra_get_allocno_class (REGNO (to_reg));
5434 2302419 : from_class = lra_get_allocno_class (REGNO (from_reg));
5435 : /* Check that reload insn is a load: */
5436 2302419 : if (to_class != NO_REGS || from_class == NO_REGS)
5437 : return false;
5438 53793 : for (n = nop = 0; nop < static_id->n_operands; nop++)
5439 : {
5440 38840 : if (static_id->operand[nop].type != OP_IN)
5441 13944 : continue;
5442 24896 : op = *id->operand_loc[nop];
5443 24896 : if (!REG_P (op) || REGNO (op) != REGNO (from_reg))
5444 10123 : continue;
5445 14773 : *id->operand_loc[nop] = to_reg;
5446 14773 : changed_nops[n++] = nop;
5447 : }
5448 14953 : changed_nops[n] = -1;
5449 14953 : lra_update_dups (id, changed_nops);
5450 14953 : lra_update_insn_regno_info (to);
5451 14953 : ok_p = recog_memoized (to) >= 0;
5452 14953 : if (ok_p)
5453 : {
5454 : /* Check that combined insn does not need any reloads: */
5455 14930 : saved_insn = curr_insn;
5456 14930 : curr_insn = to;
5457 14930 : curr_id = lra_get_insn_recog_data (curr_insn);
5458 14930 : curr_static_id = curr_id->insn_static_data;
5459 14930 : for (bool swapped_p = false;;)
5460 : {
5461 17300 : ok_p = !curr_insn_transform (true);
5462 17300 : if (ok_p || curr_static_id->commutative < 0)
5463 : break;
5464 4740 : swap_operands (curr_static_id->commutative);
5465 4740 : if (lra_dump_file != NULL)
5466 : {
5467 0 : fprintf (lra_dump_file,
5468 : " Swapping %scombined insn operands:\n",
5469 : swapped_p ? "back " : "");
5470 0 : dump_insn_slim (lra_dump_file, to);
5471 : }
5472 4740 : if (swapped_p)
5473 : break;
5474 : swapped_p = true;
5475 : }
5476 14930 : curr_insn = saved_insn;
5477 14930 : curr_id = lra_get_insn_recog_data (curr_insn);
5478 14930 : curr_static_id = curr_id->insn_static_data;
5479 : }
5480 14953 : if (ok_p)
5481 : {
5482 3522 : id->used_insn_alternative = -1;
5483 3522 : lra_push_insn_and_update_insn_regno_info (to);
5484 3522 : if (lra_dump_file != NULL)
5485 : {
5486 0 : fprintf (lra_dump_file, " Use combined insn:\n");
5487 0 : dump_insn_slim (lra_dump_file, to);
5488 : }
5489 3522 : return true;
5490 : }
5491 11431 : if (lra_dump_file != NULL)
5492 : {
5493 0 : fprintf (lra_dump_file, " Failed combined insn:\n");
5494 0 : dump_insn_slim (lra_dump_file, to);
5495 : }
5496 23133 : for (int i = 0; i < n; i++)
5497 : {
5498 11702 : nop = changed_nops[i];
5499 11702 : *id->operand_loc[nop] = from_reg;
5500 : }
5501 11431 : lra_update_dups (id, changed_nops);
5502 11431 : lra_update_insn_regno_info (to);
5503 11431 : if (lra_dump_file != NULL)
5504 : {
5505 0 : fprintf (lra_dump_file, " Restoring insn after failed combining:\n");
5506 0 : dump_insn_slim (lra_dump_file, to);
5507 : }
5508 : return false;
5509 : }
5510 :
5511 : /* Entry function of LRA constraint pass. Return true if the
5512 : constraint pass did change the code. */
5513 : bool
5514 3206848 : lra_constraints (bool first_p)
5515 : {
5516 3206848 : bool changed_p;
5517 3206848 : int i, hard_regno, new_insns_num;
5518 3206848 : unsigned int min_len, new_min_len, uid;
5519 3206848 : rtx set, x, reg, nosubreg_dest;
5520 3206848 : rtx_insn *original_insn;
5521 3206848 : basic_block last_bb;
5522 3206848 : bitmap_iterator bi;
5523 :
5524 3206848 : lra_constraint_iter++;
5525 3206848 : if (lra_dump_file != NULL)
5526 194 : fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
5527 : lra_constraint_iter);
5528 3206848 : changed_p = false;
5529 3206848 : if (pic_offset_table_rtx
5530 3206848 : && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
5531 103790 : check_and_force_assignment_correctness_p = true;
5532 3103058 : else if (first_p)
5533 : /* On the first iteration we should check IRA assignment
5534 : correctness. In rare cases, the assignments can be wrong as
5535 : early clobbers operands are ignored in IRA or usages of
5536 : paradoxical sub-registers are not taken into account by
5537 : IRA. */
5538 1431132 : check_and_force_assignment_correctness_p = true;
5539 3206848 : new_insn_uid_start = get_max_uid ();
5540 3206848 : new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
5541 : /* Mark used hard regs for target stack size calulations. */
5542 205587416 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5543 202380568 : if (lra_reg_info[i].nrefs != 0
5544 298865568 : && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
5545 : {
5546 92578855 : int j, nregs;
5547 :
5548 92578855 : nregs = hard_regno_nregs (hard_regno, lra_reg_info[i].biggest_mode);
5549 188187494 : for (j = 0; j < nregs; j++)
5550 95608639 : df_set_regs_ever_live (hard_regno + j, true);
5551 : }
5552 : /* Do elimination before the equivalence processing as we can spill
5553 : some pseudos during elimination. */
5554 3206848 : lra_eliminate (false, first_p);
5555 3206848 : auto_bitmap equiv_insn_bitmap (®_obstack);
5556 :
5557 : /* Register elimination can create new pseudos via the addptr pattern,
5558 : so make sure the equivalency tables are resized appropriately. */
5559 3206848 : ira_expand_reg_equiv ();
5560 205587416 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5561 202380568 : if (lra_reg_info[i].nrefs != 0)
5562 : {
5563 96485000 : ira_reg_equiv[i].profitable_p = true;
5564 96485000 : reg = regno_reg_rtx[i];
5565 96485000 : if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
5566 : {
5567 732174 : bool pseudo_p = contains_reg_p (x, false, false);
5568 :
5569 : /* After RTL transformation, we cannot guarantee that
5570 : pseudo in the substitution was not reloaded which might
5571 : make equivalence invalid. For example, in reverse
5572 : equiv of p0
5573 :
5574 : p0 <- ...
5575 : ...
5576 : equiv_mem <- p0
5577 :
5578 : the memory address register was reloaded before the 2nd
5579 : insn. */
5580 732174 : if ((! first_p && pseudo_p)
5581 : /* We don't use DF for compilation speed sake. So it
5582 : is problematic to update live info when we use an
5583 : equivalence containing pseudos in more than one
5584 : BB. */
5585 724580 : || (pseudo_p && multi_block_pseudo_p (i))
5586 : /* If an init insn was deleted for some reason, cancel
5587 : the equiv. We could update the equiv insns after
5588 : transformations including an equiv insn deletion
5589 : but it is not worthy as such cases are extremely
5590 : rare. */
5591 719800 : || contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
5592 : /* If it is not a reverse equivalence, we check that a
5593 : pseudo in rhs of the init insn is not dying in the
5594 : insn. Otherwise, the live info at the beginning of
5595 : the corresponding BB might be wrong after we
5596 : removed the insn. When the equiv can be a
5597 : constant, the right hand side of the init insn can
5598 : be a pseudo. */
5599 719800 : || (! reverse_equiv_p (i)
5600 718414 : && (init_insn_rhs_dead_pseudo_p (i)
5601 : /* If we reloaded the pseudo in an equivalence
5602 : init insn, we cannot remove the equiv init
5603 : insns and the init insns might write into
5604 : const memory in this case. */
5605 712957 : || contains_reloaded_insn_p (i)))
5606 : /* Prevent access beyond equivalent memory for
5607 : paradoxical subregs. */
5608 714343 : || (MEM_P (x)
5609 1151772 : && maybe_gt (GET_MODE_SIZE (lra_reg_info[i].biggest_mode),
5610 : GET_MODE_SIZE (GET_MODE (x))))
5611 1445807 : || (pic_offset_table_rtx
5612 52012 : && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
5613 7926 : && (targetm.preferred_reload_class
5614 3963 : (x, lra_get_allocno_class (i)) == NO_REGS))
5615 50377 : || contains_symbol_ref_p (x))))
5616 21010 : ira_reg_equiv[i].defined_p
5617 21010 : = ira_reg_equiv[i].caller_save_p = false;
5618 732174 : if (contains_reg_p (x, false, true))
5619 10632 : ira_reg_equiv[i].profitable_p = false;
5620 732174 : if (get_equiv (reg) != reg)
5621 704896 : bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
5622 : }
5623 : }
5624 205587416 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5625 202380568 : update_equiv (i);
5626 : /* We should add all insns containing pseudos which should be
5627 : substituted by their equivalences. */
5628 5543599 : EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
5629 2336751 : lra_push_insn_by_uid (uid);
5630 3206848 : min_len = lra_insn_stack_length ();
5631 3206848 : new_insns_num = 0;
5632 3206848 : last_bb = NULL;
5633 3206848 : changed_p = false;
5634 3206848 : original_insn = NULL;
5635 166132061 : while ((new_min_len = lra_insn_stack_length ()) != 0)
5636 : {
5637 159718365 : curr_insn = lra_pop_insn ();
5638 159718365 : --new_min_len;
5639 159718365 : curr_bb = BLOCK_FOR_INSN (curr_insn);
5640 159718365 : if (curr_bb != last_bb)
5641 : {
5642 20604809 : last_bb = curr_bb;
5643 20604809 : bb_reload_num = lra_curr_reload_num;
5644 : }
5645 159718365 : if (min_len > new_min_len)
5646 : {
5647 : min_len = new_min_len;
5648 : new_insns_num = 0;
5649 : original_insn = curr_insn;
5650 : }
5651 7230301 : else if (combine_reload_insn (curr_insn, original_insn))
5652 : {
5653 3522 : continue;
5654 : }
5655 7226779 : if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
5656 0 : internal_error
5657 0 : ("maximum number of generated reload insns per insn achieved (%d)",
5658 : MAX_RELOAD_INSNS_NUMBER);
5659 159714843 : new_insns_num++;
5660 159714843 : if (DEBUG_INSN_P (curr_insn))
5661 : {
5662 : /* We need to check equivalence in debug insn and change
5663 : pseudo to the equivalent value if necessary. */
5664 52090230 : curr_id = lra_get_insn_recog_data (curr_insn);
5665 52090230 : if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
5666 : {
5667 29635 : rtx old = *curr_id->operand_loc[0];
5668 29635 : *curr_id->operand_loc[0]
5669 29635 : = simplify_replace_fn_rtx (old, NULL_RTX,
5670 : loc_equivalence_callback, curr_insn);
5671 29635 : if (old != *curr_id->operand_loc[0])
5672 : {
5673 : /* If we substitute pseudo by shared equivalence, we can fail
5674 : to update LRA reg info and this can result in many
5675 : unexpected consequences. So keep rtl unshared: */
5676 29635 : *curr_id->operand_loc[0]
5677 29635 : = copy_rtx (*curr_id->operand_loc[0]);
5678 29635 : lra_update_insn_regno_info (curr_insn);
5679 29635 : changed_p = true;
5680 : }
5681 : }
5682 : }
5683 107624613 : else if (INSN_P (curr_insn))
5684 : {
5685 106567540 : if ((set = single_set (curr_insn)) != NULL_RTX)
5686 : {
5687 101243150 : nosubreg_dest = SET_DEST (set);
5688 : /* The equivalence pseudo could be set up as SUBREG in a
5689 : case when it is a call restore insn in a mode
5690 : different from the pseudo mode. */
5691 101243150 : if (GET_CODE (nosubreg_dest) == SUBREG)
5692 1148879 : nosubreg_dest = SUBREG_REG (nosubreg_dest);
5693 101934574 : if ((REG_P (nosubreg_dest)
5694 74782916 : && (x = get_equiv (nosubreg_dest)) != nosubreg_dest
5695 : /* Remove insns which set up a pseudo whose value
5696 : cannot be changed. Such insns might be not in
5697 : init_insns because we don't update equiv data
5698 : during insn transformations.
5699 :
5700 : As an example, let suppose that a pseudo got
5701 : hard register and on the 1st pass was not
5702 : changed to equivalent constant. We generate an
5703 : additional insn setting up the pseudo because of
5704 : secondary memory movement. Then the pseudo is
5705 : spilled and we use the equiv constant. In this
5706 : case we should remove the additional insn and
5707 : this insn is not init_insns list. */
5708 709386 : && (! MEM_P (x) || MEM_READONLY_P (x)
5709 : /* Check that this is actually an insn setting
5710 : up the equivalence. */
5711 326206 : || in_list_p (curr_insn,
5712 326206 : ira_reg_equiv
5713 326206 : [REGNO (nosubreg_dest)].init_insns)))
5714 175335961 : || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
5715 1995356 : && in_list_p (curr_insn,
5716 997678 : ira_reg_equiv
5717 997678 : [REGNO (SET_SRC (set))].init_insns)
5718 : /* This is a reverse equivalence to memory (see ira.cc)
5719 : in store insn. We can reload all the destination and
5720 : have an output reload which is a store to memory. If
5721 : we just remove the insn, we will have the output
5722 : reload storing an undefined value to the memory.
5723 : Check that we did not reload the memory to prevent a
5724 : wrong code generation. We could implement using the
5725 : equivalence still in such case but doing this is not
5726 : worth the efforts as such case is very rare. */
5727 1319 : && MEM_P (nosubreg_dest)))
5728 : {
5729 : /* This is equiv init insn of pseudo which did not get a
5730 : hard register -- remove the insn. */
5731 691424 : if (lra_dump_file != NULL)
5732 : {
5733 9 : fprintf (lra_dump_file,
5734 : " Removing equiv init insn %i (freq=%d)\n",
5735 3 : INSN_UID (curr_insn),
5736 6 : REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
5737 3 : dump_insn_slim (lra_dump_file, curr_insn);
5738 : }
5739 691424 : if (contains_reg_p (x, true, false))
5740 142190 : check_and_force_assignment_correctness_p = true;
5741 691424 : lra_set_insn_deleted (curr_insn);
5742 691424 : continue;
5743 : }
5744 : }
5745 105876116 : curr_id = lra_get_insn_recog_data (curr_insn);
5746 105876116 : curr_static_id = curr_id->insn_static_data;
5747 105876116 : init_curr_insn_input_reloads ();
5748 105876116 : init_curr_operand_mode ();
5749 105876116 : if (curr_insn_transform (false))
5750 : changed_p = true;
5751 : /* Check non-transformed insns too for equiv change as USE
5752 : or CLOBBER don't need reloads but can contain pseudos
5753 : being changed on their equivalences. */
5754 99800492 : else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
5755 99800492 : && loc_equivalence_change_p (&PATTERN (curr_insn)))
5756 : {
5757 8 : lra_update_insn_regno_info (curr_insn);
5758 8 : lra_push_insn_by_uid (INSN_UID (curr_insn));
5759 8 : changed_p = true;
5760 : }
5761 : }
5762 : }
5763 :
5764 : /* If we used a new hard regno, changed_p should be true because the
5765 : hard reg is assigned to a new pseudo. */
5766 3206848 : if (flag_checking && !changed_p)
5767 : {
5768 132440048 : for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5769 129845232 : if (lra_reg_info[i].nrefs != 0
5770 189772965 : && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
5771 : {
5772 58480574 : int j, nregs = hard_regno_nregs (hard_regno,
5773 58480574 : PSEUDO_REGNO_MODE (i));
5774 :
5775 118972626 : for (j = 0; j < nregs; j++)
5776 60492052 : lra_assert (df_regs_ever_live_p (hard_regno + j));
5777 : }
5778 : }
5779 2594856 : if (changed_p)
5780 611995 : lra_dump_insns_if_possible ("changed func after local");
5781 3206848 : return changed_p;
5782 3206848 : }
5783 :
5784 : static void initiate_invariants (void);
5785 : static void finish_invariants (void);
5786 :
5787 : /* Initiate the LRA constraint pass. It is done once per
5788 : function. */
5789 : void
5790 1471362 : lra_constraints_init (void)
5791 : {
5792 1471362 : initiate_invariants ();
5793 1471362 : }
5794 :
5795 : /* Finalize the LRA constraint pass. It is done once per
5796 : function. */
5797 : void
5798 1471362 : lra_constraints_finish (void)
5799 : {
5800 1471362 : finish_invariants ();
5801 1471362 : }
5802 :
5803 :
5804 :
5805 : /* Structure describes invariants for ineheritance. */
5806 : struct lra_invariant
5807 : {
5808 : /* The order number of the invariant. */
5809 : int num;
5810 : /* The invariant RTX. */
5811 : rtx invariant_rtx;
5812 : /* The origin insn of the invariant. */
5813 : rtx_insn *insn;
5814 : };
5815 :
5816 : typedef lra_invariant invariant_t;
5817 : typedef invariant_t *invariant_ptr_t;
5818 : typedef const invariant_t *const_invariant_ptr_t;
5819 :
5820 : /* Pointer to the inheritance invariants. */
5821 : static vec<invariant_ptr_t> invariants;
5822 :
5823 : /* Allocation pool for the invariants. */
5824 : static object_allocator<lra_invariant> *invariants_pool;
5825 :
5826 : /* Hash table for the invariants. */
5827 : static htab_t invariant_table;
5828 :
5829 : /* Hash function for INVARIANT. */
5830 : static hashval_t
5831 173354 : invariant_hash (const void *invariant)
5832 : {
5833 173354 : rtx inv = ((const_invariant_ptr_t) invariant)->invariant_rtx;
5834 173354 : return lra_rtx_hash (inv);
5835 : }
5836 :
5837 : /* Equal function for invariants INVARIANT1 and INVARIANT2. */
5838 : static int
5839 56842 : invariant_eq_p (const void *invariant1, const void *invariant2)
5840 : {
5841 56842 : rtx inv1 = ((const_invariant_ptr_t) invariant1)->invariant_rtx;
5842 56842 : rtx inv2 = ((const_invariant_ptr_t) invariant2)->invariant_rtx;
5843 :
5844 56842 : return rtx_equal_p (inv1, inv2);
5845 : }
5846 :
5847 : /* Insert INVARIANT_RTX into the table if it is not there yet. Return
5848 : invariant which is in the table. */
5849 : static invariant_ptr_t
5850 173162 : insert_invariant (rtx invariant_rtx)
5851 : {
5852 173162 : void **entry_ptr;
5853 173162 : invariant_t invariant;
5854 173162 : invariant_ptr_t invariant_ptr;
5855 :
5856 173162 : invariant.invariant_rtx = invariant_rtx;
5857 173162 : entry_ptr = htab_find_slot (invariant_table, &invariant, INSERT);
5858 173162 : if (*entry_ptr == NULL)
5859 : {
5860 150347 : invariant_ptr = invariants_pool->allocate ();
5861 150347 : invariant_ptr->invariant_rtx = invariant_rtx;
5862 150347 : invariant_ptr->insn = NULL;
5863 150347 : invariants.safe_push (invariant_ptr);
5864 150347 : *entry_ptr = (void *) invariant_ptr;
5865 : }
5866 173162 : return (invariant_ptr_t) *entry_ptr;
5867 : }
5868 :
5869 : /* Initiate the invariant table. */
5870 : static void
5871 1471362 : initiate_invariants (void)
5872 : {
5873 1471362 : invariants.create (100);
5874 1471362 : invariants_pool
5875 1471362 : = new object_allocator<lra_invariant> ("Inheritance invariants");
5876 1471362 : invariant_table = htab_create (100, invariant_hash, invariant_eq_p, NULL);
5877 1471362 : }
5878 :
5879 : /* Finish the invariant table. */
5880 : static void
5881 1471362 : finish_invariants (void)
5882 : {
5883 1471362 : htab_delete (invariant_table);
5884 2942724 : delete invariants_pool;
5885 1471362 : invariants.release ();
5886 1471362 : }
5887 :
5888 : /* Make the invariant table empty. */
5889 : static void
5890 12678484 : clear_invariants (void)
5891 : {
5892 12678484 : htab_empty (invariant_table);
5893 12678484 : invariants_pool->release ();
5894 12678484 : invariants.truncate (0);
5895 12678484 : }
5896 :
5897 :
5898 :
5899 : /* This page contains code to do inheritance/split
5900 : transformations. */
5901 :
5902 : /* Number of reloads passed so far in current EBB. */
5903 : static int reloads_num;
5904 :
5905 : /* Number of calls passed so far in current EBB. */
5906 : static int calls_num;
5907 :
5908 : /* Index ID is the CALLS_NUM associated the last call we saw with
5909 : ABI identifier ID. */
5910 : static int last_call_for_abi[NUM_ABI_IDS];
5911 :
5912 : /* Which registers have been fully or partially clobbered by a call
5913 : since they were last used. */
5914 : static HARD_REG_SET full_and_partial_call_clobbers;
5915 :
5916 : /* Current reload pseudo check for validity of elements in
5917 : USAGE_INSNS. */
5918 : static int curr_usage_insns_check;
5919 :
5920 : /* Info about last usage of registers in EBB to do inheritance/split
5921 : transformation. Inheritance transformation is done from a spilled
5922 : pseudo and split transformations from a hard register or a pseudo
5923 : assigned to a hard register. */
5924 : struct usage_insns
5925 : {
5926 : /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
5927 : value INSNS is valid. The insns is chain of optional debug insns
5928 : and a finishing non-debug insn using the corresponding reg. The
5929 : value is also used to mark the registers which are set up in the
5930 : current insn. The negated insn uid is used for this. */
5931 : int check;
5932 : /* Value of global reloads_num at the last insn in INSNS. */
5933 : int reloads_num;
5934 : /* Value of global reloads_nums at the last insn in INSNS. */
5935 : int calls_num;
5936 : /* It can be true only for splitting. And it means that the restore
5937 : insn should be put after insn given by the following member. */
5938 : bool after_p;
5939 : /* Next insns in the current EBB which use the original reg and the
5940 : original reg value is not changed between the current insn and
5941 : the next insns. In order words, e.g. for inheritance, if we need
5942 : to use the original reg value again in the next insns we can try
5943 : to use the value in a hard register from a reload insn of the
5944 : current insn. */
5945 : rtx insns;
5946 : };
5947 :
5948 : /* Map: regno -> corresponding pseudo usage insns. */
5949 : static struct usage_insns *usage_insns;
5950 :
5951 : static void
5952 246583876 : setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
5953 : {
5954 246583876 : usage_insns[regno].check = curr_usage_insns_check;
5955 246583876 : usage_insns[regno].insns = insn;
5956 246583876 : usage_insns[regno].reloads_num = reloads_num;
5957 246583876 : usage_insns[regno].calls_num = calls_num;
5958 246583876 : usage_insns[regno].after_p = after_p;
5959 246583876 : if (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0)
5960 111547349 : remove_from_hard_reg_set (&full_and_partial_call_clobbers,
5961 111547349 : PSEUDO_REGNO_MODE (regno),
5962 : reg_renumber[regno]);
5963 246583876 : }
5964 :
5965 : /* The function is used to form list REGNO usages which consists of
5966 : optional debug insns finished by a non-debug insn using REGNO.
5967 : RELOADS_NUM is current number of reload insns processed so far. */
5968 : static void
5969 139471162 : add_next_usage_insn (int regno, rtx_insn *insn, int reloads_num)
5970 : {
5971 139471162 : rtx next_usage_insns;
5972 :
5973 139471162 : if (usage_insns[regno].check == curr_usage_insns_check
5974 72895140 : && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
5975 212366302 : && DEBUG_INSN_P (insn))
5976 : {
5977 : /* Check that we did not add the debug insn yet. */
5978 13855850 : if (next_usage_insns != insn
5979 13855850 : && (GET_CODE (next_usage_insns) != INSN_LIST
5980 6182898 : || XEXP (next_usage_insns, 0) != insn))
5981 13855836 : usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
5982 : next_usage_insns);
5983 : }
5984 125615312 : else if (NONDEBUG_INSN_P (insn))
5985 125112462 : setup_next_usage_insn (regno, insn, reloads_num, false);
5986 : else
5987 502850 : usage_insns[regno].check = 0;
5988 139471162 : }
5989 :
5990 : /* Return first non-debug insn in list USAGE_INSNS. */
5991 : static rtx_insn *
5992 1163865 : skip_usage_debug_insns (rtx usage_insns)
5993 : {
5994 1163865 : rtx insn;
5995 :
5996 : /* Skip debug insns. */
5997 1163865 : for (insn = usage_insns;
5998 1445938 : insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
5999 282073 : insn = XEXP (insn, 1))
6000 : ;
6001 1163865 : return safe_as_a <rtx_insn *> (insn);
6002 : }
6003 :
6004 : /* Return true if we need secondary memory moves for insn in
6005 : USAGE_INSNS after inserting inherited pseudo of class INHER_CL
6006 : into the insn. */
6007 : static bool
6008 1163872 : check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
6009 : rtx usage_insns ATTRIBUTE_UNUSED)
6010 : {
6011 1163872 : rtx_insn *insn;
6012 1163872 : rtx set, dest;
6013 1163872 : enum reg_class cl;
6014 :
6015 1163872 : if (inher_cl == ALL_REGS
6016 1163872 : || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
6017 : return false;
6018 1163865 : lra_assert (INSN_P (insn));
6019 1163865 : if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
6020 : return false;
6021 1125963 : dest = SET_DEST (set);
6022 1125963 : if (! REG_P (dest))
6023 : return false;
6024 1125963 : lra_assert (inher_cl != NO_REGS);
6025 1125963 : cl = get_reg_class (REGNO (dest));
6026 1125963 : return (cl != NO_REGS && cl != ALL_REGS
6027 1125963 : && targetm.secondary_memory_needed (GET_MODE (dest), inher_cl, cl));
6028 : }
6029 :
6030 : /* Registers involved in inheritance/split in the current EBB
6031 : (inheritance/split pseudos and original registers). */
6032 : static bitmap_head check_only_regs;
6033 :
6034 : /* Reload pseudos cannot be involded in invariant inheritance in the
6035 : current EBB. */
6036 : static bitmap_head invalid_invariant_regs;
6037 :
6038 : /* Do inheritance transformations for insn INSN, which defines (if
6039 : DEF_P) or uses ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which
6040 : instruction in the EBB next uses ORIGINAL_REGNO; it has the same
6041 : form as the "insns" field of usage_insns. Return true if we
6042 : succeed in such transformation.
6043 :
6044 : The transformations look like:
6045 :
6046 : p <- ... i <- ...
6047 : ... p <- i (new insn)
6048 : ... =>
6049 : <- ... p ... <- ... i ...
6050 : or
6051 : ... i <- p (new insn)
6052 : <- ... p ... <- ... i ...
6053 : ... =>
6054 : <- ... p ... <- ... i ...
6055 : where p is a spilled original pseudo and i is a new inheritance pseudo.
6056 :
6057 :
6058 : The inheritance pseudo has the smallest class of two classes CL and
6059 : class of ORIGINAL REGNO. */
6060 : static bool
6061 1260428 : inherit_reload_reg (bool def_p, int original_regno,
6062 : enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
6063 : {
6064 1260428 : if (optimize_function_for_size_p (cfun))
6065 : return false;
6066 :
6067 1229507 : enum reg_class rclass = lra_get_allocno_class (original_regno);
6068 1229507 : rtx original_reg = regno_reg_rtx[original_regno];
6069 1229507 : rtx new_reg, usage_insn;
6070 1229507 : rtx_insn *new_insns;
6071 :
6072 1229507 : lra_assert (! usage_insns[original_regno].after_p);
6073 1229507 : if (lra_dump_file != NULL)
6074 2 : fprintf (lra_dump_file,
6075 : " <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
6076 1229507 : if (! ira_reg_classes_intersect_p[cl][rclass])
6077 : {
6078 65635 : if (lra_dump_file != NULL)
6079 : {
6080 0 : fprintf (lra_dump_file,
6081 : " Rejecting inheritance for %d "
6082 : "because of disjoint classes %s and %s\n",
6083 : original_regno, reg_class_names[cl],
6084 : reg_class_names[rclass]);
6085 0 : fprintf (lra_dump_file,
6086 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6087 : }
6088 65635 : return false;
6089 : }
6090 1163872 : if ((ira_class_subset_p[cl][rclass] && cl != rclass)
6091 : /* We don't use a subset of two classes because it can be
6092 : NO_REGS. This transformation is still profitable in most
6093 : cases even if the classes are not intersected as register
6094 : move is probably cheaper than a memory load. */
6095 433486 : || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
6096 : {
6097 730386 : if (lra_dump_file != NULL)
6098 2 : fprintf (lra_dump_file, " Use smallest class of %s and %s\n",
6099 : reg_class_names[cl], reg_class_names[rclass]);
6100 :
6101 : rclass = cl;
6102 : }
6103 1163872 : if (check_secondary_memory_needed_p (rclass, next_usage_insns))
6104 : {
6105 : /* Reject inheritance resulting in secondary memory moves.
6106 : Otherwise, there is a danger in LRA cycling. Also such
6107 : transformation will be unprofitable. */
6108 12767 : if (lra_dump_file != NULL)
6109 : {
6110 0 : rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
6111 0 : rtx set = single_set (insn);
6112 :
6113 0 : lra_assert (set != NULL_RTX);
6114 :
6115 0 : rtx dest = SET_DEST (set);
6116 :
6117 0 : lra_assert (REG_P (dest));
6118 0 : fprintf (lra_dump_file,
6119 : " Rejecting inheritance for insn %d(%s)<-%d(%s) "
6120 : "as secondary mem is needed\n",
6121 0 : REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
6122 0 : original_regno, reg_class_names[rclass]);
6123 0 : fprintf (lra_dump_file,
6124 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6125 : }
6126 12767 : return false;
6127 : }
6128 1151105 : if (ira_reg_class_min_nregs[rclass][GET_MODE (original_reg)]
6129 1151105 : != ira_reg_class_max_nregs[rclass][GET_MODE (original_reg)])
6130 : {
6131 29 : if (lra_dump_file != NULL)
6132 : {
6133 0 : fprintf (lra_dump_file,
6134 : " Rejecting inheritance for %d "
6135 : "because of requiring non-uniform class %s\n",
6136 : original_regno, reg_class_names[rclass]);
6137 0 : fprintf (lra_dump_file,
6138 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6139 : }
6140 29 : return false;
6141 : }
6142 1151076 : new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
6143 : rclass, NULL, "inheritance");
6144 1151076 : start_sequence ();
6145 1151076 : if (def_p)
6146 539689 : lra_emit_move (original_reg, new_reg);
6147 : else
6148 611387 : lra_emit_move (new_reg, original_reg);
6149 1151076 : new_insns = end_sequence ();
6150 1151076 : if (NEXT_INSN (new_insns) != NULL_RTX)
6151 : {
6152 0 : if (lra_dump_file != NULL)
6153 : {
6154 0 : fprintf (lra_dump_file,
6155 : " Rejecting inheritance %d->%d "
6156 : "as it results in 2 or more insns:\n",
6157 : original_regno, REGNO (new_reg));
6158 0 : dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
6159 0 : fprintf (lra_dump_file,
6160 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6161 : }
6162 0 : return false;
6163 : }
6164 1151076 : lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
6165 1151076 : lra_update_insn_regno_info (insn);
6166 1151076 : if (! def_p)
6167 : /* We now have a new usage insn for original regno. */
6168 611387 : setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
6169 1151076 : if (lra_dump_file != NULL)
6170 2 : fprintf (lra_dump_file, " Original reg change %d->%d (bb%d):\n",
6171 2 : original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
6172 1151076 : lra_reg_info[REGNO (new_reg)].restore_rtx = regno_reg_rtx[original_regno];
6173 1151076 : bitmap_set_bit (&check_only_regs, REGNO (new_reg));
6174 1151076 : bitmap_set_bit (&check_only_regs, original_regno);
6175 1151076 : bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
6176 1151076 : if (def_p)
6177 539689 : lra_process_new_insns (insn, NULL, new_insns,
6178 : "Add original<-inheritance");
6179 : else
6180 611387 : lra_process_new_insns (insn, new_insns, NULL,
6181 : "Add inheritance<-original");
6182 2582681 : while (next_usage_insns != NULL_RTX)
6183 : {
6184 1431605 : if (GET_CODE (next_usage_insns) != INSN_LIST)
6185 : {
6186 1151076 : usage_insn = next_usage_insns;
6187 1151076 : lra_assert (NONDEBUG_INSN_P (usage_insn));
6188 : next_usage_insns = NULL;
6189 : }
6190 : else
6191 : {
6192 280529 : usage_insn = XEXP (next_usage_insns, 0);
6193 280529 : lra_assert (DEBUG_INSN_P (usage_insn));
6194 280529 : next_usage_insns = XEXP (next_usage_insns, 1);
6195 : }
6196 1431605 : lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
6197 1431605 : DEBUG_INSN_P (usage_insn));
6198 1431605 : lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
6199 1431605 : if (lra_dump_file != NULL)
6200 : {
6201 2 : basic_block bb = BLOCK_FOR_INSN (usage_insn);
6202 2 : fprintf (lra_dump_file,
6203 : " Inheritance reuse change %d->%d (bb%d):\n",
6204 : original_regno, REGNO (new_reg),
6205 : bb ? bb->index : -1);
6206 2 : dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
6207 : }
6208 : }
6209 1151076 : if (lra_dump_file != NULL)
6210 2 : fprintf (lra_dump_file,
6211 : " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
6212 : return true;
6213 : }
6214 :
6215 : /* Return true if we need a caller save/restore for pseudo REGNO which
6216 : was assigned to a hard register. */
6217 : static inline bool
6218 114190331 : need_for_call_save_p (int regno)
6219 : {
6220 114190331 : lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
6221 114190331 : if (usage_insns[regno].calls_num < calls_num)
6222 : {
6223 : unsigned int abis = 0;
6224 117312910 : for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
6225 108288840 : if (last_call_for_abi[i] > usage_insns[regno].calls_num)
6226 9024070 : abis |= 1 << i;
6227 9024070 : gcc_assert (abis);
6228 9024070 : if (call_clobbered_in_region_p (abis, full_and_partial_call_clobbers,
6229 9024070 : PSEUDO_REGNO_MODE (regno),
6230 : reg_renumber[regno]))
6231 : return true;
6232 : }
6233 : return false;
6234 : }
6235 :
6236 : /* Global registers occurring in the current EBB. */
6237 : static bitmap_head ebb_global_regs;
6238 :
6239 : /* Return true if we need a split for hard register REGNO or pseudo
6240 : REGNO which was assigned to a hard register.
6241 : POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
6242 : used for reloads since the EBB end. It is an approximation of the
6243 : used hard registers in the split range. The exact value would
6244 : require expensive calculations. If we were aggressive with
6245 : splitting because of the approximation, the split pseudo will save
6246 : the same hard register assignment and will be removed in the undo
6247 : pass. We still need the approximation because too aggressive
6248 : splitting would result in too inaccurate cost calculation in the
6249 : assignment pass because of too many generated moves which will be
6250 : probably removed in the undo pass. */
6251 : static inline bool
6252 241938608 : need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
6253 : {
6254 241938608 : int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
6255 :
6256 241938608 : lra_assert (hard_regno >= 0);
6257 241938608 : return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
6258 : /* Don't split eliminable hard registers, otherwise we can
6259 : split hard registers like hard frame pointer, which
6260 : lives on BB start/end according to DF-infrastructure,
6261 : when there is a pseudo assigned to the register and
6262 : living in the same BB. */
6263 671194 : && (regno >= FIRST_PSEUDO_REGISTER
6264 44782 : || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
6265 641157 : && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
6266 : /* Don't split call clobbered hard regs living through
6267 : calls, otherwise we might have a check problem in the
6268 : assign sub-pass as in the most cases (exception is a
6269 : situation when check_and_force_assignment_correctness_p value is
6270 : true) the assign pass assumes that all pseudos living
6271 : through calls are assigned to call saved hard regs. */
6272 627499 : && (regno >= FIRST_PSEUDO_REGISTER
6273 1087 : || !TEST_HARD_REG_BIT (full_and_partial_call_clobbers, regno))
6274 : /* We need at least 2 reloads to make pseudo splitting
6275 : profitable. We should provide hard regno splitting in
6276 : any case to solve 1st insn scheduling problem when
6277 : moving hard register definition up might result in
6278 : impossibility to find hard register for reload pseudo of
6279 : small register class. */
6280 1254948 : && (usage_insns[regno].reloads_num
6281 1253886 : + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
6282 2711 : && (regno < FIRST_PSEUDO_REGISTER
6283 : /* For short living pseudos, spilling + inheritance can
6284 : be considered a substitution for splitting.
6285 : Therefore we do not splitting for local pseudos. It
6286 : decreases also aggressiveness of splitting. The
6287 : minimal number of references is chosen taking into
6288 : account that for 2 references splitting has no sense
6289 : as we can just spill the pseudo. */
6290 : || (regno >= FIRST_PSEUDO_REGISTER
6291 2664 : && lra_reg_info[regno].nrefs > 3
6292 2307 : && bitmap_bit_p (&ebb_global_regs, regno))))
6293 242608554 : || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
6294 : }
6295 :
6296 : /* Return class for the split pseudo created from original pseudo with
6297 : ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO. We
6298 : choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
6299 : results in no secondary memory movements. */
6300 : static enum reg_class
6301 1407 : choose_split_class (enum reg_class allocno_class,
6302 : int hard_regno ATTRIBUTE_UNUSED,
6303 : machine_mode mode ATTRIBUTE_UNUSED)
6304 : {
6305 1407 : int i;
6306 1407 : enum reg_class cl, best_cl = NO_REGS;
6307 1407 : enum reg_class hard_reg_class ATTRIBUTE_UNUSED
6308 : = REGNO_REG_CLASS (hard_regno);
6309 :
6310 1407 : if (! targetm.secondary_memory_needed (mode, allocno_class, allocno_class)
6311 1407 : && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
6312 : return allocno_class;
6313 0 : for (i = 0;
6314 0 : (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
6315 : i++)
6316 0 : if (! targetm.secondary_memory_needed (mode, cl, hard_reg_class)
6317 0 : && ! targetm.secondary_memory_needed (mode, hard_reg_class, cl)
6318 0 : && TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
6319 0 : && (best_cl == NO_REGS
6320 0 : || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
6321 : best_cl = cl;
6322 : return best_cl;
6323 : }
6324 :
6325 : /* Copy any equivalence information from ORIGINAL_REGNO to NEW_REGNO. It only
6326 : makes sense to call this function if NEW_REGNO is always equal to
6327 : ORIGINAL_REGNO. Set up defined_p flag when caller_save_p flag is set up and
6328 : CALL_SAVE_P is true. */
6329 :
6330 : static void
6331 657950 : lra_copy_reg_equiv (unsigned int new_regno, unsigned int original_regno,
6332 : bool call_save_p)
6333 : {
6334 657950 : if (!ira_reg_equiv[original_regno].defined_p
6335 595049 : && !(call_save_p && ira_reg_equiv[original_regno].caller_save_p))
6336 : return;
6337 :
6338 63072 : ira_expand_reg_equiv ();
6339 63072 : ira_reg_equiv[new_regno].defined_p = true;
6340 63072 : if (ira_reg_equiv[original_regno].memory)
6341 29933 : ira_reg_equiv[new_regno].memory
6342 29933 : = copy_rtx (ira_reg_equiv[original_regno].memory);
6343 63072 : if (ira_reg_equiv[original_regno].constant)
6344 26790 : ira_reg_equiv[new_regno].constant
6345 26790 : = copy_rtx (ira_reg_equiv[original_regno].constant);
6346 63072 : if (ira_reg_equiv[original_regno].invariant)
6347 6349 : ira_reg_equiv[new_regno].invariant
6348 6349 : = copy_rtx (ira_reg_equiv[original_regno].invariant);
6349 : }
6350 :
6351 : /* Do split transformations for insn INSN, which defines or uses
6352 : ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which instruction in
6353 : the EBB next uses ORIGINAL_REGNO; it has the same form as the
6354 : "insns" field of usage_insns. If TO is not NULL, we don't use
6355 : usage_insns, we put restore insns after TO insn. It is a case when
6356 : we call it from lra_split_hard_reg_for, outside the inheritance
6357 : pass.
6358 :
6359 : The transformations look like:
6360 :
6361 : p <- ... p <- ...
6362 : ... s <- p (new insn -- save)
6363 : ... =>
6364 : ... p <- s (new insn -- restore)
6365 : <- ... p ... <- ... p ...
6366 : or
6367 : <- ... p ... <- ... p ...
6368 : ... s <- p (new insn -- save)
6369 : ... =>
6370 : ... p <- s (new insn -- restore)
6371 : <- ... p ... <- ... p ...
6372 :
6373 : where p is an original pseudo got a hard register or a hard
6374 : register and s is a new split pseudo. The save is put before INSN
6375 : if BEFORE_P is true. Return true if we succeed in such
6376 : transformation. */
6377 : static bool
6378 659653 : split_reg (bool before_p, int original_regno, rtx_insn *insn,
6379 : rtx next_usage_insns, rtx_insn *to)
6380 : {
6381 659653 : enum reg_class rclass;
6382 659653 : rtx original_reg;
6383 659653 : int hard_regno, nregs;
6384 659653 : rtx new_reg, usage_insn;
6385 659653 : rtx_insn *restore, *save;
6386 659653 : bool after_p;
6387 659653 : bool call_save_p;
6388 659653 : machine_mode mode;
6389 :
6390 659653 : if (original_regno < FIRST_PSEUDO_REGISTER)
6391 : {
6392 206 : rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
6393 206 : hard_regno = original_regno;
6394 206 : call_save_p = false;
6395 206 : nregs = 1;
6396 206 : mode = lra_reg_info[hard_regno].biggest_mode;
6397 206 : machine_mode reg_rtx_mode = GET_MODE (regno_reg_rtx[hard_regno]);
6398 : /* A reg can have a biggest_mode of VOIDmode if it was only ever seen as
6399 : part of a multi-word register. In that case, just use the reg_rtx
6400 : mode. Do the same also if the biggest mode was larger than a register
6401 : or we can not compare the modes. Otherwise, limit the size to that of
6402 : the biggest access in the function or to the natural mode at least. */
6403 206 : if (mode == VOIDmode
6404 206 : || !ordered_p (GET_MODE_PRECISION (mode),
6405 206 : GET_MODE_PRECISION (reg_rtx_mode))
6406 206 : || paradoxical_subreg_p (mode, reg_rtx_mode)
6407 411 : || maybe_gt (GET_MODE_PRECISION (reg_rtx_mode), GET_MODE_PRECISION (mode)))
6408 : {
6409 659653 : original_reg = regno_reg_rtx[hard_regno];
6410 659653 : mode = reg_rtx_mode;
6411 : }
6412 : else
6413 189 : original_reg = gen_rtx_REG (mode, hard_regno);
6414 : }
6415 : else
6416 : {
6417 659447 : mode = PSEUDO_REGNO_MODE (original_regno);
6418 659447 : hard_regno = reg_renumber[original_regno];
6419 659447 : nregs = hard_regno_nregs (hard_regno, mode);
6420 659447 : rclass = lra_get_allocno_class (original_regno);
6421 659447 : original_reg = regno_reg_rtx[original_regno];
6422 659447 : call_save_p = need_for_call_save_p (original_regno);
6423 : }
6424 659653 : lra_assert (hard_regno >= 0);
6425 659653 : if (lra_dump_file != NULL)
6426 0 : fprintf (lra_dump_file,
6427 : " ((((((((((((((((((((((((((((((((((((((((((((((((\n");
6428 :
6429 659653 : if (call_save_p)
6430 : {
6431 658246 : mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
6432 : hard_regno_nregs (hard_regno, mode),
6433 : mode);
6434 658246 : new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, NULL, "save");
6435 : }
6436 : else
6437 : {
6438 1407 : rclass = choose_split_class (rclass, hard_regno, mode);
6439 1407 : if (rclass == NO_REGS)
6440 : {
6441 0 : if (lra_dump_file != NULL)
6442 : {
6443 0 : fprintf (lra_dump_file,
6444 : " Rejecting split of %d(%s): "
6445 : "no good reg class for %d(%s)\n",
6446 : original_regno,
6447 0 : reg_class_names[lra_get_allocno_class (original_regno)],
6448 : hard_regno,
6449 0 : reg_class_names[REGNO_REG_CLASS (hard_regno)]);
6450 0 : fprintf
6451 0 : (lra_dump_file,
6452 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6453 : }
6454 0 : return false;
6455 : }
6456 : /* Split_if_necessary can split hard registers used as part of a
6457 : multi-register mode but splits each register individually. The
6458 : mode used for each independent register may not be supported
6459 : so reject the split. Splitting the wider mode should theoretically
6460 : be possible but is not implemented. */
6461 1407 : if (!targetm.hard_regno_mode_ok (hard_regno, mode))
6462 : {
6463 0 : if (lra_dump_file != NULL)
6464 : {
6465 0 : fprintf (lra_dump_file,
6466 : " Rejecting split of %d(%s): unsuitable mode %s\n",
6467 : original_regno,
6468 0 : reg_class_names[lra_get_allocno_class (original_regno)],
6469 0 : GET_MODE_NAME (mode));
6470 0 : fprintf
6471 0 : (lra_dump_file,
6472 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6473 : }
6474 0 : return false;
6475 : }
6476 1407 : new_reg = lra_create_new_reg (mode, original_reg, rclass, NULL, "split");
6477 1407 : reg_renumber[REGNO (new_reg)] = hard_regno;
6478 : }
6479 659653 : int new_regno = REGNO (new_reg);
6480 659653 : save = emit_spill_move (true, new_reg, original_reg);
6481 659653 : if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
6482 : {
6483 0 : if (lra_dump_file != NULL)
6484 : {
6485 0 : fprintf
6486 0 : (lra_dump_file,
6487 : " Rejecting split %d->%d resulting in > 2 save insns:\n",
6488 : original_regno, new_regno);
6489 0 : dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
6490 0 : fprintf (lra_dump_file,
6491 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6492 : }
6493 0 : return false;
6494 : }
6495 659653 : restore = emit_spill_move (false, new_reg, original_reg);
6496 659653 : if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
6497 : {
6498 0 : if (lra_dump_file != NULL)
6499 : {
6500 0 : fprintf (lra_dump_file,
6501 : " Rejecting split %d->%d "
6502 : "resulting in > 2 restore insns:\n",
6503 : original_regno, new_regno);
6504 0 : dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
6505 0 : fprintf (lra_dump_file,
6506 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6507 : }
6508 0 : return false;
6509 : }
6510 : /* Transfer equivalence information to the spill register, so that
6511 : if we fail to allocate the spill register, we have the option of
6512 : rematerializing the original value instead of spilling to the stack. */
6513 659653 : if (!HARD_REGISTER_NUM_P (original_regno)
6514 659447 : && mode == PSEUDO_REGNO_MODE (original_regno))
6515 657950 : lra_copy_reg_equiv (new_regno, original_regno, call_save_p);
6516 659653 : lra_reg_info[new_regno].restore_rtx = regno_reg_rtx[original_regno];
6517 659653 : bitmap_set_bit (&lra_split_regs, new_regno);
6518 659653 : if (to != NULL)
6519 : {
6520 159 : lra_assert (next_usage_insns == NULL);
6521 159 : usage_insn = to;
6522 159 : after_p = true;
6523 : }
6524 : else
6525 : {
6526 : /* We need check_only_regs only inside the inheritance pass. */
6527 659494 : bitmap_set_bit (&check_only_regs, new_regno);
6528 659494 : bitmap_set_bit (&check_only_regs, original_regno);
6529 659494 : after_p = usage_insns[original_regno].after_p;
6530 772568 : for (;;)
6531 : {
6532 772568 : if (GET_CODE (next_usage_insns) != INSN_LIST)
6533 : {
6534 659494 : usage_insn = next_usage_insns;
6535 659494 : break;
6536 : }
6537 113074 : usage_insn = XEXP (next_usage_insns, 0);
6538 113074 : lra_assert (DEBUG_INSN_P (usage_insn));
6539 113074 : next_usage_insns = XEXP (next_usage_insns, 1);
6540 113074 : lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
6541 : true);
6542 113074 : lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
6543 113074 : if (lra_dump_file != NULL)
6544 : {
6545 0 : fprintf (lra_dump_file, " Split reuse change %d->%d:\n",
6546 : original_regno, new_regno);
6547 0 : dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
6548 : }
6549 : }
6550 : }
6551 659653 : lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
6552 659653 : lra_assert (usage_insn != insn || (after_p && before_p));
6553 1117410 : lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
6554 : after_p ? NULL : restore,
6555 : after_p ? restore : NULL,
6556 : call_save_p ? "Add reg<-save" : "Add reg<-split");
6557 659653 : if (call_save_p
6558 658246 : && first_call_insn != NULL
6559 1317899 : && BLOCK_FOR_INSN (first_call_insn) != BLOCK_FOR_INSN (insn))
6560 : /* PR116028: If original_regno is a pseudo that has been assigned a
6561 : callee-saved hard register, then emit the spill insn before the call
6562 : insn 'first_call_insn' instead of adjacent to 'insn'. If 'insn'
6563 : and 'first_call_insn' belong to the same EBB but to two separate
6564 : BBs, and if 'insn' is present in the entry BB, then generating the
6565 : spill insn in the entry BB can prevent shrink wrap from happening.
6566 : This is because the spill insn references the stack pointer and
6567 : hence the prolog gets generated in the entry BB itself. It is
6568 : also more efficient to generate the spill before
6569 : 'first_call_insn' as the spill now occurs only in the path
6570 : containing the call. */
6571 26554 : lra_process_new_insns (first_call_insn, save, NULL, "Add save<-reg");
6572 : else
6573 1266968 : lra_process_new_insns (insn, before_p ? save : NULL,
6574 : before_p ? NULL : save,
6575 : call_save_p ? "Add save<-reg" : "Add split<-reg");
6576 659653 : if (nregs > 1 || original_regno < FIRST_PSEUDO_REGISTER)
6577 : /* If we are trying to split multi-register. We should check
6578 : conflicts on the next assignment sub-pass. IRA can allocate on
6579 : sub-register levels, LRA do this on pseudos level right now and
6580 : this discrepancy may create allocation conflicts after
6581 : splitting.
6582 :
6583 : If we are trying to split hard register we should also check conflicts
6584 : as such splitting can create artificial conflict of the hard register
6585 : with another pseudo because of simplified conflict calculation in
6586 : LRA. */
6587 9831 : check_and_force_assignment_correctness_p = true;
6588 659653 : if (lra_dump_file != NULL)
6589 0 : fprintf (lra_dump_file,
6590 : " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
6591 : return true;
6592 : }
6593 :
6594 : /* Split a hard reg for reload pseudo REGNO having RCLASS and living
6595 : in the range [FROM, TO]. Return true if did a split. Otherwise,
6596 : return false. */
6597 : bool
6598 1174 : spill_hard_reg_in_range (int regno, enum reg_class rclass, rtx_insn *from, rtx_insn *to)
6599 : {
6600 1174 : int i, hard_regno;
6601 1174 : int rclass_size;
6602 1174 : rtx_insn *insn;
6603 1174 : unsigned int uid;
6604 1174 : bitmap_iterator bi;
6605 1174 : HARD_REG_SET ignore;
6606 :
6607 1174 : lra_assert (from != NULL && to != NULL);
6608 1174 : ignore = lra_no_alloc_regs;
6609 3335 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
6610 : {
6611 2161 : lra_insn_recog_data_t id = lra_insn_recog_data[uid];
6612 2161 : struct lra_static_insn_data *static_id = id->insn_static_data;
6613 2161 : struct lra_insn_reg *reg;
6614 :
6615 7472 : for (reg = id->regs; reg != NULL; reg = reg->next)
6616 5311 : if (reg->regno < FIRST_PSEUDO_REGISTER)
6617 247 : SET_HARD_REG_BIT (ignore, reg->regno);
6618 3421 : for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
6619 1260 : SET_HARD_REG_BIT (ignore, reg->regno);
6620 : }
6621 1174 : rclass_size = ira_class_hard_regs_num[rclass];
6622 3420 : for (i = 0; i < rclass_size; i++)
6623 : {
6624 2405 : hard_regno = ira_class_hard_regs[rclass][i];
6625 2405 : if (! TEST_HARD_REG_BIT (lra_reg_info[regno].conflict_hard_regs, hard_regno)
6626 2405 : || TEST_HARD_REG_BIT (ignore, hard_regno))
6627 2240 : continue;
6628 476 : for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
6629 : {
6630 317 : struct lra_static_insn_data *static_id;
6631 317 : struct lra_insn_reg *reg;
6632 :
6633 317 : if (!INSN_P (insn))
6634 0 : continue;
6635 317 : if (bitmap_bit_p (&lra_reg_info[hard_regno].insn_bitmap,
6636 317 : INSN_UID (insn)))
6637 : break;
6638 311 : static_id = lra_get_insn_recog_data (insn)->insn_static_data;
6639 365 : for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
6640 54 : if (reg->regno == hard_regno)
6641 : break;
6642 : if (reg != NULL)
6643 : break;
6644 : }
6645 165 : if (insn != NEXT_INSN (to))
6646 6 : continue;
6647 159 : if (split_reg (true, hard_regno, from, NULL, to))
6648 : return true;
6649 : }
6650 : return false;
6651 : }
6652 :
6653 : /* Recognize that we need a split transformation for insn INSN, which
6654 : defines or uses REGNO in its insn biggest MODE (we use it only if
6655 : REGNO is a hard register). POTENTIAL_RELOAD_HARD_REGS contains
6656 : hard registers which might be used for reloads since the EBB end.
6657 : Put the save before INSN if BEFORE_P is true. MAX_UID is maximla
6658 : uid before starting INSN processing. Return true if we succeed in
6659 : such transformation. */
6660 : static bool
6661 199065038 : split_if_necessary (int regno, machine_mode mode,
6662 : HARD_REG_SET potential_reload_hard_regs,
6663 : bool before_p, rtx_insn *insn, int max_uid)
6664 : {
6665 199065038 : bool res = false;
6666 199065038 : int i, nregs = 1;
6667 199065038 : rtx next_usage_insns;
6668 :
6669 199065038 : if (regno < FIRST_PSEUDO_REGISTER)
6670 93350179 : nregs = hard_regno_nregs (regno, mode);
6671 398484553 : for (i = 0; i < nregs; i++)
6672 199419515 : if (usage_insns[regno + i].check == curr_usage_insns_check
6673 133028298 : && (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
6674 : /* To avoid processing the register twice or more. */
6675 133028298 : && ((GET_CODE (next_usage_insns) != INSN_LIST
6676 128780446 : && INSN_UID (next_usage_insns) < max_uid)
6677 4247852 : || (GET_CODE (next_usage_insns) == INSN_LIST
6678 4247852 : && (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
6679 133028298 : && need_for_split_p (potential_reload_hard_regs, regno + i)
6680 199696997 : && split_reg (before_p, regno + i, insn, next_usage_insns, NULL))
6681 : res = true;
6682 199065038 : return res;
6683 : }
6684 :
6685 : /* Return TRUE if rtx X is considered as an invariant for
6686 : inheritance. */
6687 : static bool
6688 11502535 : invariant_p (const_rtx x)
6689 : {
6690 11502535 : machine_mode mode;
6691 11502535 : const char *fmt;
6692 11502535 : enum rtx_code code;
6693 11502535 : int i, j;
6694 :
6695 11502535 : if (side_effects_p (x))
6696 : return false;
6697 :
6698 11476540 : code = GET_CODE (x);
6699 11476540 : mode = GET_MODE (x);
6700 11476540 : if (code == SUBREG)
6701 : {
6702 462475 : x = SUBREG_REG (x);
6703 462475 : code = GET_CODE (x);
6704 462475 : mode = wider_subreg_mode (mode, GET_MODE (x));
6705 : }
6706 :
6707 11476540 : if (MEM_P (x))
6708 : return false;
6709 :
6710 9728277 : if (REG_P (x))
6711 : {
6712 3469281 : int i, nregs, regno = REGNO (x);
6713 :
6714 3469281 : if (regno >= FIRST_PSEUDO_REGISTER || regno == STACK_POINTER_REGNUM
6715 902273 : || TEST_HARD_REG_BIT (eliminable_regset, regno)
6716 3486401 : || GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
6717 : return false;
6718 2 : nregs = hard_regno_nregs (regno, mode);
6719 2 : for (i = 0; i < nregs; i++)
6720 2 : if (! fixed_regs[regno + i]
6721 : /* A hard register may be clobbered in the current insn
6722 : but we can ignore this case because if the hard
6723 : register is used it should be set somewhere after the
6724 : clobber. */
6725 2 : || bitmap_bit_p (&invalid_invariant_regs, regno + i))
6726 2 : return false;
6727 : }
6728 6258996 : fmt = GET_RTX_FORMAT (code);
6729 10996633 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6730 : {
6731 8309086 : if (fmt[i] == 'e')
6732 : {
6733 5460560 : if (! invariant_p (XEXP (x, i)))
6734 : return false;
6735 : }
6736 2848526 : else if (fmt[i] == 'E')
6737 : {
6738 632492 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6739 524954 : if (! invariant_p (XVECEXP (x, i, j)))
6740 : return false;
6741 : }
6742 : }
6743 : return true;
6744 : }
6745 :
6746 : /* We have 'dest_reg <- invariant'. Let us try to make an invariant
6747 : inheritance transformation (using dest_reg instead invariant in a
6748 : subsequent insn). */
6749 : static bool
6750 173162 : process_invariant_for_inheritance (rtx dst_reg, rtx invariant_rtx)
6751 : {
6752 173162 : invariant_ptr_t invariant_ptr;
6753 173162 : rtx_insn *insn, *new_insns;
6754 173162 : rtx insn_set, insn_reg, new_reg;
6755 173162 : int insn_regno;
6756 173162 : bool succ_p = false;
6757 173162 : int dst_regno = REGNO (dst_reg);
6758 173162 : machine_mode dst_mode = GET_MODE (dst_reg);
6759 173162 : enum reg_class cl = lra_get_allocno_class (dst_regno), insn_reg_cl;
6760 :
6761 173162 : invariant_ptr = insert_invariant (invariant_rtx);
6762 173162 : if ((insn = invariant_ptr->insn) != NULL_RTX)
6763 : {
6764 : /* We have a subsequent insn using the invariant. */
6765 22815 : insn_set = single_set (insn);
6766 22815 : lra_assert (insn_set != NULL);
6767 22815 : insn_reg = SET_DEST (insn_set);
6768 22815 : lra_assert (REG_P (insn_reg));
6769 22815 : insn_regno = REGNO (insn_reg);
6770 22815 : insn_reg_cl = lra_get_allocno_class (insn_regno);
6771 :
6772 22815 : if (dst_mode == GET_MODE (insn_reg)
6773 : /* We should consider only result move reg insns which are
6774 : cheap. */
6775 22743 : && targetm.register_move_cost (dst_mode, cl, insn_reg_cl) == 2
6776 44966 : && targetm.register_move_cost (dst_mode, cl, cl) == 2)
6777 : {
6778 22151 : if (lra_dump_file != NULL)
6779 0 : fprintf (lra_dump_file,
6780 : " [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[\n");
6781 22151 : new_reg = lra_create_new_reg (dst_mode, dst_reg, cl, NULL,
6782 : "invariant inheritance");
6783 22151 : bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
6784 22151 : bitmap_set_bit (&check_only_regs, REGNO (new_reg));
6785 22151 : lra_reg_info[REGNO (new_reg)].restore_rtx = PATTERN (insn);
6786 22151 : start_sequence ();
6787 22151 : lra_emit_move (new_reg, dst_reg);
6788 22151 : new_insns = end_sequence ();
6789 22151 : lra_process_new_insns (curr_insn, NULL, new_insns,
6790 : "Add invariant inheritance<-original");
6791 22151 : start_sequence ();
6792 22151 : lra_emit_move (SET_DEST (insn_set), new_reg);
6793 22151 : new_insns = end_sequence ();
6794 22151 : lra_process_new_insns (insn, NULL, new_insns,
6795 : "Changing reload<-inheritance");
6796 22151 : lra_set_insn_deleted (insn);
6797 22151 : succ_p = true;
6798 22151 : if (lra_dump_file != NULL)
6799 : {
6800 0 : fprintf (lra_dump_file,
6801 : " Invariant inheritance reuse change %d (bb%d):\n",
6802 0 : REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
6803 0 : dump_insn_slim (lra_dump_file, insn);
6804 0 : fprintf (lra_dump_file,
6805 : " ]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]\n");
6806 : }
6807 : }
6808 : }
6809 173162 : invariant_ptr->insn = curr_insn;
6810 173162 : return succ_p;
6811 : }
6812 :
6813 : /* Check only registers living at the current program point in the
6814 : current EBB. */
6815 : static bitmap_head live_regs;
6816 :
6817 : /* Update live info in EBB given by its HEAD and TAIL insns after
6818 : inheritance/split transformation. The function removes dead moves
6819 : too. */
6820 : static void
6821 745946 : update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
6822 : {
6823 745946 : unsigned int j;
6824 745946 : int i, regno;
6825 745946 : bool live_p;
6826 745946 : rtx_insn *prev_insn;
6827 745946 : rtx set;
6828 745946 : bool remove_p;
6829 745946 : basic_block last_bb, prev_bb, curr_bb;
6830 745946 : bitmap_iterator bi;
6831 745946 : struct lra_insn_reg *reg;
6832 745946 : edge e;
6833 745946 : edge_iterator ei;
6834 :
6835 745946 : last_bb = BLOCK_FOR_INSN (tail);
6836 745946 : prev_bb = NULL;
6837 745946 : for (curr_insn = tail;
6838 37123762 : curr_insn != PREV_INSN (head);
6839 36377816 : curr_insn = prev_insn)
6840 : {
6841 36377816 : prev_insn = PREV_INSN (curr_insn);
6842 : /* We need to process empty blocks too. They contain
6843 : NOTE_INSN_BASIC_BLOCK referring for the basic block. */
6844 36377816 : if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
6845 1410099 : continue;
6846 34967717 : curr_bb = BLOCK_FOR_INSN (curr_insn);
6847 34967717 : if (curr_bb != prev_bb)
6848 : {
6849 1483875 : if (prev_bb != NULL)
6850 : {
6851 : /* Update df_get_live_in (prev_bb): */
6852 54274605 : EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6853 53536676 : if (bitmap_bit_p (&live_regs, j))
6854 1611834 : bitmap_set_bit (df_get_live_in (prev_bb), j);
6855 : else
6856 51924842 : bitmap_clear_bit (df_get_live_in (prev_bb), j);
6857 : }
6858 1483875 : if (curr_bb != last_bb)
6859 : {
6860 : /* Update df_get_live_out (curr_bb): */
6861 54274605 : EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6862 : {
6863 53536676 : live_p = bitmap_bit_p (&live_regs, j);
6864 53536676 : if (! live_p)
6865 155679981 : FOR_EACH_EDGE (e, ei, curr_bb->succs)
6866 103809313 : if (bitmap_bit_p (df_get_live_in (e->dest), j))
6867 : {
6868 : live_p = true;
6869 : break;
6870 : }
6871 51924842 : if (live_p)
6872 1666008 : bitmap_set_bit (df_get_live_out (curr_bb), j);
6873 : else
6874 51870668 : bitmap_clear_bit (df_get_live_out (curr_bb), j);
6875 : }
6876 : }
6877 1483875 : prev_bb = curr_bb;
6878 1483875 : bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
6879 : }
6880 34967717 : if (! NONDEBUG_INSN_P (curr_insn))
6881 12878153 : continue;
6882 22089564 : curr_id = lra_get_insn_recog_data (curr_insn);
6883 22089564 : curr_static_id = curr_id->insn_static_data;
6884 22089564 : remove_p = false;
6885 22089564 : if ((set = single_set (curr_insn)) != NULL_RTX
6886 21410625 : && REG_P (SET_DEST (set))
6887 17099891 : && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
6888 12713155 : && SET_DEST (set) != pic_offset_table_rtx
6889 12706533 : && bitmap_bit_p (&check_only_regs, regno)
6890 25317894 : && ! bitmap_bit_p (&live_regs, regno))
6891 : remove_p = true;
6892 : /* See which defined values die here. */
6893 61010177 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6894 38920613 : if (reg->type == OP_OUT && ! reg->subreg_p)
6895 15221357 : bitmap_clear_bit (&live_regs, reg->regno);
6896 26278884 : for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6897 4189320 : if (reg->type == OP_OUT && ! reg->subreg_p)
6898 3171999 : bitmap_clear_bit (&live_regs, reg->regno);
6899 22089564 : if (curr_id->arg_hard_regs != NULL)
6900 : /* Make clobbered argument hard registers die. */
6901 3328500 : for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6902 2388976 : if (regno >= FIRST_PSEUDO_REGISTER)
6903 189780 : bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
6904 : /* Mark each used value as live. */
6905 61010177 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6906 38920613 : if (reg->type != OP_OUT
6907 38920613 : && bitmap_bit_p (&check_only_regs, reg->regno))
6908 4532548 : bitmap_set_bit (&live_regs, reg->regno);
6909 26278884 : for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6910 4189320 : if (reg->type != OP_OUT
6911 4189320 : && bitmap_bit_p (&check_only_regs, reg->regno))
6912 0 : bitmap_set_bit (&live_regs, reg->regno);
6913 22089564 : if (curr_id->arg_hard_regs != NULL)
6914 : /* Make used argument hard registers live. */
6915 3328500 : for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6916 2388976 : if (regno < FIRST_PSEUDO_REGISTER
6917 2388976 : && bitmap_bit_p (&check_only_regs, regno))
6918 0 : bitmap_set_bit (&live_regs, regno);
6919 : /* It is quite important to remove dead move insns because it
6920 : means removing dead store. We don't need to process them for
6921 : constraints. */
6922 22089564 : if (remove_p)
6923 : {
6924 295164 : if (lra_dump_file != NULL)
6925 : {
6926 2 : fprintf (lra_dump_file, " Removing dead insn:\n ");
6927 2 : dump_insn_slim (lra_dump_file, curr_insn);
6928 : }
6929 295164 : lra_set_insn_deleted (curr_insn);
6930 : }
6931 : }
6932 745946 : }
6933 :
6934 : /* The structure describes info to do an inheritance for the current
6935 : insn. We need to collect such info first before doing the
6936 : transformations because the transformations change the insn
6937 : internal representation. */
6938 : struct to_inherit
6939 : {
6940 : /* Original regno. */
6941 : int regno;
6942 : /* Subsequent insns which can inherit original reg value. */
6943 : rtx insns;
6944 : };
6945 :
6946 : /* Array containing all info for doing inheritance from the current
6947 : insn. */
6948 : static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
6949 :
6950 : /* Number elements in the previous array. */
6951 : static int to_inherit_num;
6952 :
6953 : /* Add inheritance info REGNO and INSNS. Their meaning is described in
6954 : structure to_inherit. */
6955 : static void
6956 309913 : add_to_inherit (int regno, rtx insns)
6957 : {
6958 309913 : int i;
6959 :
6960 309994 : for (i = 0; i < to_inherit_num; i++)
6961 81 : if (to_inherit[i].regno == regno)
6962 : return;
6963 309913 : lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
6964 309913 : to_inherit[to_inherit_num].regno = regno;
6965 309913 : to_inherit[to_inherit_num++].insns = insns;
6966 : }
6967 :
6968 : /* Return the last non-debug insn in basic block BB, or the block begin
6969 : note if none. */
6970 : static rtx_insn *
6971 29921665 : get_last_insertion_point (basic_block bb)
6972 : {
6973 29921665 : rtx_insn *insn;
6974 :
6975 32313072 : FOR_BB_INSNS_REVERSE (bb, insn)
6976 32313072 : if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
6977 29921665 : return insn;
6978 0 : gcc_unreachable ();
6979 : }
6980 :
6981 : /* Set up RES by registers living on edges FROM except the edge (FROM,
6982 : TO) or by registers set up in a jump insn in BB FROM. */
6983 : static void
6984 11495532 : get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
6985 : {
6986 11495532 : rtx_insn *last;
6987 11495532 : struct lra_insn_reg *reg;
6988 11495532 : edge e;
6989 11495532 : edge_iterator ei;
6990 :
6991 11495532 : lra_assert (to != NULL);
6992 11495532 : bitmap_clear (res);
6993 34237403 : FOR_EACH_EDGE (e, ei, from->succs)
6994 22741871 : if (e->dest != to)
6995 11246339 : bitmap_ior_into (res, df_get_live_in (e->dest));
6996 11495532 : last = get_last_insertion_point (from);
6997 11495532 : if (! JUMP_P (last))
6998 1866488 : return;
6999 9629044 : curr_id = lra_get_insn_recog_data (last);
7000 19257910 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7001 9628866 : if (reg->type != OP_IN)
7002 76 : bitmap_set_bit (res, reg->regno);
7003 : }
7004 :
7005 : /* Used as a temporary results of some bitmap calculations. */
7006 : static bitmap_head temp_bitmap;
7007 :
7008 : /* We split for reloads of small class of hard regs. The following
7009 : defines how many hard regs the class should have to be qualified as
7010 : small. The code is mostly oriented to x86/x86-64 architecture
7011 : where some insns need to use only specific register or pair of
7012 : registers and these register can live in RTL explicitly, e.g. for
7013 : parameter passing. */
7014 : static const int max_small_class_regs_num = 2;
7015 :
7016 : /* Do inheritance/split transformations in EBB starting with HEAD and
7017 : finishing on TAIL. We process EBB insns in the reverse order.
7018 : Return true if we did any inheritance/split transformation in the
7019 : EBB.
7020 :
7021 : We should avoid excessive splitting which results in worse code
7022 : because of inaccurate cost calculations for spilling new split
7023 : pseudos in such case. To achieve this we do splitting only if
7024 : register pressure is high in given basic block and there are reload
7025 : pseudos requiring hard registers. We could do more register
7026 : pressure calculations at any given program point to avoid necessary
7027 : splitting even more but it is to expensive and the current approach
7028 : works well enough. */
7029 : static bool
7030 12678484 : inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
7031 : {
7032 12678484 : int i, src_regno, dst_regno, nregs;
7033 12678484 : bool change_p, succ_p, update_reloads_num_p;
7034 12678484 : rtx_insn *prev_insn, *last_insn;
7035 12678484 : rtx next_usage_insns, curr_set;
7036 12678484 : enum reg_class cl;
7037 12678484 : struct lra_insn_reg *reg;
7038 12678484 : basic_block last_processed_bb, curr_bb = NULL;
7039 12678484 : HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
7040 12678484 : bitmap to_process;
7041 12678484 : unsigned int j;
7042 12678484 : bitmap_iterator bi;
7043 12678484 : bool head_p, after_p;
7044 :
7045 12678484 : change_p = false;
7046 12678484 : curr_usage_insns_check++;
7047 12678484 : clear_invariants ();
7048 12678484 : reloads_num = calls_num = 0;
7049 164820292 : for (unsigned int i = 0; i < NUM_ABI_IDS; ++i)
7050 152141808 : last_call_for_abi[i] = 0;
7051 12678484 : CLEAR_HARD_REG_SET (full_and_partial_call_clobbers);
7052 12678484 : bitmap_clear (&check_only_regs);
7053 12678484 : bitmap_clear (&invalid_invariant_regs);
7054 12678484 : last_processed_bb = NULL;
7055 12678484 : CLEAR_HARD_REG_SET (potential_reload_hard_regs);
7056 12678484 : live_hard_regs = eliminable_regset | lra_no_alloc_regs;
7057 : /* We don't process new insns generated in the loop. */
7058 234981275 : for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
7059 : {
7060 222302791 : prev_insn = PREV_INSN (curr_insn);
7061 222302791 : if (BLOCK_FOR_INSN (curr_insn) != NULL)
7062 222302557 : curr_bb = BLOCK_FOR_INSN (curr_insn);
7063 222302791 : if (last_processed_bb != curr_bb)
7064 : {
7065 : /* We are at the end of BB. Add qualified living
7066 : pseudos for potential splitting. */
7067 18426133 : to_process = df_get_live_out (curr_bb);
7068 18426133 : if (last_processed_bb != NULL)
7069 : {
7070 : /* We are somewhere in the middle of EBB. */
7071 5747649 : get_live_on_other_edges (curr_bb, last_processed_bb,
7072 : &temp_bitmap);
7073 5747649 : to_process = &temp_bitmap;
7074 : }
7075 18426133 : last_processed_bb = curr_bb;
7076 18426133 : last_insn = get_last_insertion_point (curr_bb);
7077 36852266 : after_p = (! JUMP_P (last_insn)
7078 18426133 : && (! CALL_P (last_insn)
7079 2274148 : || (find_reg_note (last_insn,
7080 : REG_NORETURN, NULL_RTX) == NULL_RTX
7081 1356318 : && ! SIBLING_CALL_P (last_insn))));
7082 18426133 : CLEAR_HARD_REG_SET (potential_reload_hard_regs);
7083 200191954 : EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
7084 : {
7085 181765827 : if ((int) j >= lra_constraint_new_regno_start)
7086 : break;
7087 181765821 : if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
7088 : {
7089 118669928 : if (j < FIRST_PSEUDO_REGISTER)
7090 69321934 : SET_HARD_REG_BIT (live_hard_regs, j);
7091 : else
7092 49347994 : add_to_hard_reg_set (&live_hard_regs,
7093 49347994 : PSEUDO_REGNO_MODE (j),
7094 49347994 : reg_renumber[j]);
7095 118669928 : setup_next_usage_insn (j, last_insn, reloads_num, after_p);
7096 : }
7097 : }
7098 : }
7099 222302791 : src_regno = dst_regno = -1;
7100 222302791 : curr_set = single_set (curr_insn);
7101 222302791 : if (curr_set != NULL_RTX && REG_P (SET_DEST (curr_set)))
7102 83419114 : dst_regno = REGNO (SET_DEST (curr_set));
7103 113177005 : if (curr_set != NULL_RTX && REG_P (SET_SRC (curr_set)))
7104 39232273 : src_regno = REGNO (SET_SRC (curr_set));
7105 222302791 : update_reloads_num_p = true;
7106 222302791 : if (src_regno < lra_constraint_new_regno_start
7107 215958214 : && src_regno >= FIRST_PSEUDO_REGISTER
7108 27689060 : && reg_renumber[src_regno] < 0
7109 3698133 : && dst_regno >= lra_constraint_new_regno_start
7110 224922391 : && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
7111 : {
7112 : /* 'reload_pseudo <- original_pseudo'. */
7113 2619600 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7114 21439 : reloads_num++;
7115 2619600 : update_reloads_num_p = false;
7116 2619600 : succ_p = false;
7117 2619600 : if (usage_insns[src_regno].check == curr_usage_insns_check
7118 2619600 : && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
7119 475562 : succ_p = inherit_reload_reg (false, src_regno, cl,
7120 : curr_insn, next_usage_insns);
7121 475562 : if (succ_p)
7122 : change_p = true;
7123 : else
7124 2167139 : setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
7125 5239200 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7126 625021383 : potential_reload_hard_regs |= reg_class_contents[cl];
7127 : }
7128 219683191 : else if (src_regno < 0
7129 183070518 : && dst_regno >= lra_constraint_new_regno_start
7130 5517021 : && invariant_p (SET_SRC (curr_set))
7131 273482 : && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS
7132 272941 : && ! bitmap_bit_p (&invalid_invariant_regs, dst_regno)
7133 219917475 : && ! bitmap_bit_p (&invalid_invariant_regs,
7134 234284 : ORIGINAL_REGNO(regno_reg_rtx[dst_regno])))
7135 : {
7136 : /* 'reload_pseudo <- invariant'. */
7137 173162 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7138 7502 : reloads_num++;
7139 173162 : update_reloads_num_p = false;
7140 173162 : if (process_invariant_for_inheritance (SET_DEST (curr_set), SET_SRC (curr_set)))
7141 22151 : change_p = true;
7142 346324 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7143 625021383 : potential_reload_hard_regs |= reg_class_contents[cl];
7144 : }
7145 219510029 : else if (src_regno >= lra_constraint_new_regno_start
7146 6344577 : && dst_regno < lra_constraint_new_regno_start
7147 5537816 : && dst_regno >= FIRST_PSEUDO_REGISTER
7148 3714939 : && reg_renumber[dst_regno] < 0
7149 1449733 : && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
7150 1449733 : && usage_insns[dst_regno].check == curr_usage_insns_check
7151 219510029 : && (next_usage_insns
7152 474953 : = usage_insns[dst_regno].insns) != NULL_RTX)
7153 : {
7154 474953 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7155 8000 : reloads_num++;
7156 474953 : update_reloads_num_p = false;
7157 : /* 'original_pseudo <- reload_pseudo'. */
7158 474953 : if (! JUMP_P (curr_insn)
7159 474953 : && inherit_reload_reg (true, dst_regno, cl,
7160 : curr_insn, next_usage_insns))
7161 : change_p = true;
7162 : /* Invalidate. */
7163 474953 : usage_insns[dst_regno].check = 0;
7164 949906 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7165 625021383 : potential_reload_hard_regs |= reg_class_contents[cl];
7166 : }
7167 219035076 : else if (INSN_P (curr_insn))
7168 : {
7169 183610594 : int iter;
7170 183610594 : int max_uid = get_max_uid ();
7171 :
7172 183610594 : curr_id = lra_get_insn_recog_data (curr_insn);
7173 183610594 : curr_static_id = curr_id->insn_static_data;
7174 183610594 : to_inherit_num = 0;
7175 : /* Process insn definitions. */
7176 550831782 : for (iter = 0; iter < 2; iter++)
7177 367221188 : for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
7178 593881757 : reg != NULL;
7179 226660569 : reg = reg->next)
7180 226660569 : if (reg->type != OP_IN
7181 226660569 : && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
7182 : {
7183 45715299 : if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
7184 43593236 : && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
7185 1787419 : && usage_insns[dst_regno].check == curr_usage_insns_check
7186 90571000 : && (next_usage_insns
7187 128027 : = usage_insns[dst_regno].insns) != NULL_RTX)
7188 : {
7189 128027 : struct lra_insn_reg *r;
7190 :
7191 381659 : for (r = curr_id->regs; r != NULL; r = r->next)
7192 253632 : if (r->type != OP_OUT && r->regno == dst_regno)
7193 : break;
7194 : /* Don't do inheritance if the pseudo is also
7195 : used in the insn. */
7196 128027 : if (r == NULL)
7197 : /* We cannot do inheritance right now
7198 : because the current insn reg info (chain
7199 : regs) can change after that. */
7200 128027 : add_to_inherit (dst_regno, next_usage_insns);
7201 : }
7202 : /* We cannot process one reg twice here because of
7203 : usage_insns invalidation. */
7204 90571000 : if ((dst_regno < FIRST_PSEUDO_REGISTER
7205 45715299 : || reg_renumber[dst_regno] >= 0)
7206 88649024 : && ! reg->subreg_p && reg->type != OP_IN)
7207 : {
7208 88371287 : HARD_REG_SET s;
7209 :
7210 88371287 : if (split_if_necessary (dst_regno, reg->biggest_mode,
7211 : potential_reload_hard_regs,
7212 : false, curr_insn, max_uid))
7213 58656 : change_p = true;
7214 88371287 : CLEAR_HARD_REG_SET (s);
7215 88371287 : if (dst_regno < FIRST_PSEUDO_REGISTER)
7216 44855701 : add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
7217 : else
7218 43515586 : add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
7219 43515586 : reg_renumber[dst_regno]);
7220 88371287 : live_hard_regs &= ~s;
7221 176742574 : potential_reload_hard_regs &= ~s;
7222 : }
7223 : /* We should invalidate potential inheritance or
7224 : splitting for the current insn usages to the next
7225 : usage insns (see code below) as the output pseudo
7226 : prevents this. */
7227 90571000 : if ((dst_regno >= FIRST_PSEUDO_REGISTER
7228 45715299 : && reg_renumber[dst_regno] < 0)
7229 88649024 : || (reg->type == OP_OUT && ! reg->subreg_p
7230 80671655 : && (dst_regno < FIRST_PSEUDO_REGISTER
7231 41402057 : || reg_renumber[dst_regno] >= 0)))
7232 : {
7233 : /* Invalidate and mark definitions. */
7234 43324033 : if (dst_regno >= FIRST_PSEUDO_REGISTER)
7235 43324033 : usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
7236 : else
7237 : {
7238 39269598 : nregs = hard_regno_nregs (dst_regno,
7239 39269598 : reg->biggest_mode);
7240 78797714 : for (i = 0; i < nregs; i++)
7241 79056232 : usage_insns[dst_regno + i].check
7242 39528116 : = -(int) INSN_UID (curr_insn);
7243 : }
7244 : }
7245 : }
7246 : /* Process clobbered call regs. */
7247 183610594 : if (curr_id->arg_hard_regs != NULL)
7248 19576821 : for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
7249 13962650 : if (dst_regno >= FIRST_PSEUDO_REGISTER)
7250 1611174 : usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
7251 805587 : = -(int) INSN_UID (curr_insn);
7252 183610594 : if (! JUMP_P (curr_insn))
7253 172179002 : for (i = 0; i < to_inherit_num; i++)
7254 128027 : if (inherit_reload_reg (true, to_inherit[i].regno,
7255 : ALL_REGS, curr_insn,
7256 : to_inherit[i].insns))
7257 102978 : change_p = true;
7258 183610594 : if (CALL_P (curr_insn))
7259 : {
7260 7216602 : rtx cheap, pat, dest;
7261 7216602 : rtx_insn *restore;
7262 7216602 : int regno, hard_regno;
7263 :
7264 7216602 : calls_num++;
7265 7216602 : function_abi callee_abi = insn_callee_abi (curr_insn);
7266 7216602 : last_call_for_abi[callee_abi.id ()] = calls_num;
7267 7216602 : full_and_partial_call_clobbers
7268 7216602 : |= callee_abi.full_and_partial_reg_clobbers ();
7269 7216602 : first_call_insn = curr_insn;
7270 7216602 : if ((cheap = find_reg_note (curr_insn,
7271 : REG_RETURNED, NULL_RTX)) != NULL_RTX
7272 40036 : && ((cheap = XEXP (cheap, 0)), true)
7273 40036 : && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
7274 40036 : && (hard_regno = reg_renumber[regno]) >= 0
7275 29526 : && usage_insns[regno].check == curr_usage_insns_check
7276 : /* If there are pending saves/restores, the
7277 : optimization is not worth. */
7278 25391 : && usage_insns[regno].calls_num == calls_num - 1
7279 7240160 : && callee_abi.clobbers_reg_p (GET_MODE (cheap), hard_regno))
7280 : {
7281 : /* Restore the pseudo from the call result as
7282 : REG_RETURNED note says that the pseudo value is
7283 : in the call result and the pseudo is an argument
7284 : of the call. */
7285 10331 : pat = PATTERN (curr_insn);
7286 10331 : if (GET_CODE (pat) == PARALLEL)
7287 0 : pat = XVECEXP (pat, 0, 0);
7288 10331 : dest = SET_DEST (pat);
7289 : /* For multiple return values dest is PARALLEL.
7290 : Currently we handle only single return value case. */
7291 10331 : if (REG_P (dest))
7292 : {
7293 10331 : start_sequence ();
7294 10331 : emit_move_insn (cheap, copy_rtx (dest));
7295 10331 : restore = end_sequence ();
7296 10331 : lra_process_new_insns (curr_insn, NULL, restore,
7297 : "Inserting call parameter restore");
7298 : /* We don't need to save/restore of the pseudo from
7299 : this call. */
7300 10331 : usage_insns[regno].calls_num = calls_num;
7301 10331 : remove_from_hard_reg_set
7302 10331 : (&full_and_partial_call_clobbers,
7303 10331 : GET_MODE (cheap), hard_regno);
7304 10331 : bitmap_set_bit (&check_only_regs, regno);
7305 : }
7306 : }
7307 : }
7308 183610594 : to_inherit_num = 0;
7309 : /* Process insn usages. */
7310 550831782 : for (iter = 0; iter < 2; iter++)
7311 367221188 : for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
7312 593881757 : reg != NULL;
7313 226660569 : reg = reg->next)
7314 226660569 : if ((reg->type != OP_OUT
7315 89285606 : || (reg->type == OP_OUT && reg->subreg_p))
7316 227199033 : && (src_regno = reg->regno) < lra_constraint_new_regno_start)
7317 : {
7318 126533799 : if (src_regno >= FIRST_PSEUDO_REGISTER
7319 73544772 : && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
7320 : {
7321 2446194 : if (usage_insns[src_regno].check == curr_usage_insns_check
7322 780573 : && (next_usage_insns
7323 780573 : = usage_insns[src_regno].insns) != NULL_RTX
7324 3226767 : && NONDEBUG_INSN_P (curr_insn))
7325 181886 : add_to_inherit (src_regno, next_usage_insns);
7326 4528616 : else if (usage_insns[src_regno].check
7327 2264308 : != -(int) INSN_UID (curr_insn))
7328 : /* Add usages but only if the reg is not set up
7329 : in the same insn. */
7330 2264308 : add_next_usage_insn (src_regno, curr_insn, reloads_num);
7331 : }
7332 71098578 : else if (src_regno < FIRST_PSEUDO_REGISTER
7333 71098578 : || reg_renumber[src_regno] >= 0)
7334 : {
7335 123953048 : bool before_p;
7336 123953048 : rtx_insn *use_insn = curr_insn;
7337 123953048 : rtx_insn *prev_insn = PREV_INSN (curr_insn);
7338 :
7339 247906096 : before_p = (JUMP_P (curr_insn)
7340 123953048 : || (CALL_P (curr_insn) && reg->type == OP_IN));
7341 123953048 : if (NONDEBUG_INSN_P (curr_insn)
7342 110693856 : && (! JUMP_P (curr_insn) || reg->type == OP_IN)
7343 234646799 : && split_if_necessary (src_regno, reg->biggest_mode,
7344 : potential_reload_hard_regs,
7345 : before_p, curr_insn, max_uid))
7346 : {
7347 218826 : if (reg->subreg_p)
7348 3149 : check_and_force_assignment_correctness_p = true;
7349 218826 : change_p = true;
7350 : /* Invalidate. */
7351 218826 : usage_insns[src_regno].check = 0;
7352 218826 : if (before_p && PREV_INSN (curr_insn) != prev_insn)
7353 : use_insn = PREV_INSN (curr_insn);
7354 : }
7355 123953048 : if (NONDEBUG_INSN_P (curr_insn))
7356 : {
7357 110693856 : if (src_regno < FIRST_PSEUDO_REGISTER)
7358 48494501 : add_to_hard_reg_set (&live_hard_regs,
7359 48494501 : reg->biggest_mode, src_regno);
7360 : else
7361 62199355 : add_to_hard_reg_set (&live_hard_regs,
7362 62199355 : PSEUDO_REGNO_MODE (src_regno),
7363 62199355 : reg_renumber[src_regno]);
7364 : }
7365 123953048 : if (src_regno >= FIRST_PSEUDO_REGISTER)
7366 70964021 : add_next_usage_insn (src_regno, use_insn, reloads_num);
7367 : else
7368 : {
7369 106074797 : for (i = 0; i < hard_regno_nregs (src_regno, reg->biggest_mode); i++)
7370 53085770 : add_next_usage_insn (src_regno + i, use_insn, reloads_num);
7371 : }
7372 : }
7373 : }
7374 : /* Process used call regs. */
7375 183610594 : if (curr_id->arg_hard_regs != NULL)
7376 19576821 : for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
7377 13962650 : if (src_regno < FIRST_PSEUDO_REGISTER)
7378 : {
7379 13157063 : SET_HARD_REG_BIT (live_hard_regs, src_regno);
7380 13157063 : add_next_usage_insn (src_regno, curr_insn, reloads_num);
7381 : }
7382 183792480 : for (i = 0; i < to_inherit_num; i++)
7383 : {
7384 181886 : src_regno = to_inherit[i].regno;
7385 181886 : if (inherit_reload_reg (false, src_regno, ALL_REGS,
7386 : curr_insn, to_inherit[i].insns))
7387 : change_p = true;
7388 : else
7389 22960 : setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
7390 : }
7391 : }
7392 183683516 : if (update_reloads_num_p
7393 219035076 : && NONDEBUG_INSN_P (curr_insn) && curr_set != NULL_RTX)
7394 : {
7395 109909290 : int regno = -1;
7396 109909290 : if ((REG_P (SET_DEST (curr_set))
7397 80151399 : && (regno = REGNO (SET_DEST (curr_set))) >= lra_constraint_new_regno_start
7398 8134024 : && reg_renumber[regno] < 0
7399 5202972 : && (cl = lra_get_allocno_class (regno)) != NO_REGS)
7400 185140326 : || (REG_P (SET_SRC (curr_set))
7401 34884390 : && (regno = REGNO (SET_SRC (curr_set))) >= lra_constraint_new_regno_start
7402 5938795 : && reg_renumber[regno] < 0
7403 3487939 : && (cl = lra_get_allocno_class (regno)) != NO_REGS))
7404 : {
7405 8138010 : if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
7406 210135 : reloads_num++;
7407 16276020 : if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
7408 222302791 : potential_reload_hard_regs |= reg_class_contents[cl];
7409 : }
7410 : }
7411 222302791 : if (NONDEBUG_INSN_P (curr_insn))
7412 : {
7413 119437051 : int regno;
7414 :
7415 : /* Invalidate invariants with changed regs. */
7416 119437051 : curr_id = lra_get_insn_recog_data (curr_insn);
7417 306464025 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7418 187026974 : if (reg->type != OP_IN)
7419 : {
7420 79953487 : bitmap_set_bit (&invalid_invariant_regs, reg->regno);
7421 159906974 : bitmap_set_bit (&invalid_invariant_regs,
7422 79953487 : ORIGINAL_REGNO (regno_reg_rtx[reg->regno]));
7423 : }
7424 119437051 : curr_static_id = curr_id->insn_static_data;
7425 150944735 : for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
7426 31507684 : if (reg->type != OP_IN)
7427 22545865 : bitmap_set_bit (&invalid_invariant_regs, reg->regno);
7428 119437051 : if (curr_id->arg_hard_regs != NULL)
7429 19576821 : for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
7430 13962650 : if (regno >= FIRST_PSEUDO_REGISTER)
7431 805587 : bitmap_set_bit (&invalid_invariant_regs,
7432 : regno - FIRST_PSEUDO_REGISTER);
7433 : }
7434 : /* We reached the start of the current basic block. */
7435 222302783 : if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
7436 431927098 : || BLOCK_FOR_INSN (prev_insn) != curr_bb)
7437 : {
7438 : /* We reached the beginning of the current block -- do
7439 : rest of spliting in the current BB. */
7440 18426367 : to_process = df_get_live_in (curr_bb);
7441 18426367 : if (BLOCK_FOR_INSN (head) != curr_bb)
7442 : {
7443 : /* We are somewhere in the middle of EBB. */
7444 5747883 : get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
7445 : curr_bb, &temp_bitmap);
7446 5747883 : to_process = &temp_bitmap;
7447 : }
7448 18426367 : head_p = true;
7449 193383368 : EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
7450 : {
7451 174957008 : if ((int) j >= lra_constraint_new_regno_start)
7452 : break;
7453 111415845 : if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
7454 110784058 : && usage_insns[j].check == curr_usage_insns_check
7455 283867311 : && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
7456 : {
7457 108910310 : if (need_for_split_p (potential_reload_hard_regs, j))
7458 : {
7459 382012 : if (lra_dump_file != NULL && head_p)
7460 : {
7461 0 : fprintf (lra_dump_file,
7462 : " ----------------------------------\n");
7463 0 : head_p = false;
7464 : }
7465 382012 : if (split_reg (false, j, bb_note (curr_bb),
7466 : next_usage_insns, NULL))
7467 382012 : change_p = true;
7468 : }
7469 108910310 : usage_insns[j].check = 0;
7470 : }
7471 : }
7472 : }
7473 : }
7474 12678484 : first_call_insn = NULL;
7475 12678484 : return change_p;
7476 : }
7477 :
7478 : /* This value affects EBB forming. If probability of edge from EBB to
7479 : a BB is not greater than the following value, we don't add the BB
7480 : to EBB. */
7481 : #define EBB_PROBABILITY_CUTOFF \
7482 : ((REG_BR_PROB_BASE * param_lra_inheritance_ebb_probability_cutoff) / 100)
7483 :
7484 : /* Current number of inheritance/split iteration. */
7485 : int lra_inheritance_iter;
7486 :
7487 : /* Entry function for inheritance/split pass. */
7488 : void
7489 1537521 : lra_inheritance (void)
7490 : {
7491 1537521 : int i;
7492 1537521 : basic_block bb, start_bb;
7493 1537521 : edge e;
7494 :
7495 1537521 : lra_inheritance_iter++;
7496 1537521 : if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
7497 : return;
7498 1534665 : timevar_push (TV_LRA_INHERITANCE);
7499 1534665 : if (lra_dump_file != NULL)
7500 97 : fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
7501 : lra_inheritance_iter);
7502 1534665 : curr_usage_insns_check = 0;
7503 1534665 : usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
7504 227898805 : for (i = 0; i < lra_constraint_new_regno_start; i++)
7505 226364140 : usage_insns[i].check = 0;
7506 1534665 : bitmap_initialize (&check_only_regs, ®_obstack);
7507 1534665 : bitmap_initialize (&invalid_invariant_regs, ®_obstack);
7508 1534665 : bitmap_initialize (&live_regs, ®_obstack);
7509 1534665 : bitmap_initialize (&temp_bitmap, ®_obstack);
7510 1534665 : bitmap_initialize (&ebb_global_regs, ®_obstack);
7511 14213149 : FOR_EACH_BB_FN (bb, cfun)
7512 : {
7513 12678484 : start_bb = bb;
7514 12678484 : if (lra_dump_file != NULL)
7515 347 : fprintf (lra_dump_file, "EBB");
7516 : /* Form a EBB starting with BB. */
7517 12678484 : bitmap_clear (&ebb_global_regs);
7518 12678484 : bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
7519 18426133 : for (;;)
7520 : {
7521 18426133 : if (lra_dump_file != NULL)
7522 477 : fprintf (lra_dump_file, " %d", bb->index);
7523 18426133 : if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
7524 16891468 : || LABEL_P (BB_HEAD (bb->next_bb)))
7525 : break;
7526 8163788 : e = find_fallthru_edge (bb->succs);
7527 8163788 : if (! e)
7528 : break;
7529 8163788 : if (e->probability.initialized_p ()
7530 8163788 : && e->probability.to_reg_br_prob_base () < EBB_PROBABILITY_CUTOFF)
7531 : break;
7532 : bb = bb->next_bb;
7533 : }
7534 12678484 : bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
7535 12678484 : if (lra_dump_file != NULL)
7536 347 : fprintf (lra_dump_file, "\n");
7537 12678484 : if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
7538 : /* Remember that the EBB head and tail can change in
7539 : inherit_in_ebb. */
7540 745946 : update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
7541 : }
7542 1534665 : bitmap_release (&ebb_global_regs);
7543 1534665 : bitmap_release (&temp_bitmap);
7544 1534665 : bitmap_release (&live_regs);
7545 1534665 : bitmap_release (&invalid_invariant_regs);
7546 1534665 : bitmap_release (&check_only_regs);
7547 1534665 : free (usage_insns);
7548 1534665 : lra_dump_insns_if_possible ("func after inheritance");
7549 1534665 : timevar_pop (TV_LRA_INHERITANCE);
7550 : }
7551 :
7552 :
7553 :
7554 : /* This page contains code to undo failed inheritance/split
7555 : transformations. */
7556 :
7557 : /* Current number of iteration undoing inheritance/split. */
7558 : int lra_undo_inheritance_iter;
7559 :
7560 : /* Fix BB live info LIVE after removing pseudos created on pass doing
7561 : inheritance/split which are REMOVED_PSEUDOS. */
7562 : static void
7563 36852266 : fix_bb_live_info (bitmap live, bitmap removed_pseudos)
7564 : {
7565 36852266 : unsigned int regno;
7566 36852266 : bitmap_iterator bi;
7567 :
7568 209662980 : EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
7569 172810714 : if (bitmap_clear_bit (live, regno)
7570 172810714 : && REG_P (lra_reg_info[regno].restore_rtx))
7571 1206612 : bitmap_set_bit (live, REGNO (lra_reg_info[regno].restore_rtx));
7572 36852266 : }
7573 :
7574 : /* Return regno of the (subreg of) REG. Otherwise, return a negative
7575 : number. */
7576 : static int
7577 67278083 : get_regno (rtx reg)
7578 : {
7579 1089330 : if (GET_CODE (reg) == SUBREG)
7580 1024511 : reg = SUBREG_REG (reg);
7581 67278083 : if (REG_P (reg))
7582 43497469 : return REGNO (reg);
7583 : return -1;
7584 : }
7585 :
7586 : /* Delete a move INSN with destination reg DREGNO and a previous
7587 : clobber insn with the same regno. The inheritance/split code can
7588 : generate moves with preceding clobber and when we delete such moves
7589 : we should delete the clobber insn too to keep the correct life
7590 : info. */
7591 : static void
7592 746299 : delete_move_and_clobber (rtx_insn *insn, int dregno)
7593 : {
7594 746299 : rtx_insn *prev_insn = PREV_INSN (insn);
7595 :
7596 746299 : lra_set_insn_deleted (insn);
7597 746299 : lra_assert (dregno >= 0);
7598 746299 : if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
7599 317113 : && GET_CODE (PATTERN (prev_insn)) == CLOBBER
7600 746655 : && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
7601 0 : lra_set_insn_deleted (prev_insn);
7602 746299 : }
7603 :
7604 : /* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
7605 : return true if we did any change. The undo transformations for
7606 : inheritance looks like
7607 : i <- i2
7608 : p <- i => p <- i2
7609 : or removing
7610 : p <- i, i <- p, and i <- i3
7611 : where p is original pseudo from which inheritance pseudo i was
7612 : created, i and i3 are removed inheritance pseudos, i2 is another
7613 : not removed inheritance pseudo. All split pseudos or other
7614 : occurrences of removed inheritance pseudos are changed on the
7615 : corresponding original pseudos.
7616 :
7617 : The function also schedules insns changed and created during
7618 : inheritance/split pass for processing by the subsequent constraint
7619 : pass. */
7620 : static bool
7621 1534665 : remove_inheritance_pseudos (bitmap remove_pseudos)
7622 : {
7623 1534665 : basic_block bb;
7624 1534665 : int regno, sregno, prev_sregno, dregno;
7625 1534665 : rtx restore_rtx;
7626 1534665 : rtx set, prev_set;
7627 1534665 : rtx_insn *prev_insn;
7628 1534665 : bool change_p, done_p;
7629 :
7630 1534665 : change_p = ! bitmap_empty_p (remove_pseudos);
7631 : /* We cannot finish the function right away if CHANGE_P is true
7632 : because we need to marks insns affected by previous
7633 : inheritance/split pass for processing by the subsequent
7634 : constraint pass. */
7635 19960798 : FOR_EACH_BB_FN (bb, cfun)
7636 : {
7637 18426133 : fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
7638 18426133 : fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
7639 243253387 : FOR_BB_INSNS_REVERSE (bb, curr_insn)
7640 : {
7641 224827254 : if (! INSN_P (curr_insn))
7642 35767661 : continue;
7643 189059593 : done_p = false;
7644 189059593 : sregno = dregno = -1;
7645 46274933 : if (change_p && NONDEBUG_INSN_P (curr_insn)
7646 220852292 : && (set = single_set (curr_insn)) != NULL_RTX)
7647 : {
7648 30788775 : dregno = get_regno (SET_DEST (set));
7649 61577550 : sregno = get_regno (SET_SRC (set));
7650 : }
7651 :
7652 189059593 : if (sregno >= 0 && dregno >= 0)
7653 : {
7654 10806094 : if (bitmap_bit_p (remove_pseudos, dregno)
7655 10806094 : && ! REG_P (lra_reg_info[dregno].restore_rtx))
7656 : {
7657 : /* invariant inheritance pseudo <- original pseudo */
7658 6859 : if (lra_dump_file != NULL)
7659 : {
7660 0 : fprintf (lra_dump_file, " Removing invariant inheritance:\n");
7661 0 : dump_insn_slim (lra_dump_file, curr_insn);
7662 0 : fprintf (lra_dump_file, "\n");
7663 : }
7664 6859 : delete_move_and_clobber (curr_insn, dregno);
7665 6859 : done_p = true;
7666 : }
7667 10799235 : else if (bitmap_bit_p (remove_pseudos, sregno)
7668 10799235 : && ! REG_P (lra_reg_info[sregno].restore_rtx))
7669 : {
7670 : /* reload pseudo <- invariant inheritance pseudo */
7671 6859 : start_sequence ();
7672 : /* We cannot just change the source. It might be
7673 : an insn different from the move. */
7674 6859 : emit_insn (lra_reg_info[sregno].restore_rtx);
7675 6859 : rtx_insn *new_insns = end_sequence ();
7676 6859 : lra_assert (single_set (new_insns) != NULL
7677 : && SET_DEST (set) == SET_DEST (single_set (new_insns)));
7678 6859 : lra_process_new_insns (curr_insn, NULL, new_insns,
7679 : "Changing reload<-invariant inheritance");
7680 6859 : delete_move_and_clobber (curr_insn, dregno);
7681 6859 : done_p = true;
7682 : }
7683 10792376 : else if ((bitmap_bit_p (remove_pseudos, sregno)
7684 1207860 : && (get_regno (lra_reg_info[sregno].restore_rtx) == dregno
7685 569099 : || (bitmap_bit_p (remove_pseudos, dregno)
7686 187904 : && get_regno (lra_reg_info[sregno].restore_rtx) >= 0
7687 187904 : && (get_regno (lra_reg_info[sregno].restore_rtx)
7688 187904 : == get_regno (lra_reg_info[dregno].restore_rtx)))))
7689 11267523 : || (bitmap_bit_p (remove_pseudos, dregno)
7690 645202 : && get_regno (lra_reg_info[dregno].restore_rtx) == sregno))
7691 : /* One of the following cases:
7692 : original <- removed inheritance pseudo
7693 : removed inherit pseudo <- another removed inherit pseudo
7694 : removed inherit pseudo <- original pseudo
7695 : Or
7696 : removed_split_pseudo <- original_reg
7697 : original_reg <- removed_split_pseudo */
7698 : {
7699 177860 : if (lra_dump_file != NULL)
7700 : {
7701 0 : fprintf (lra_dump_file, " Removing %s:\n",
7702 0 : bitmap_bit_p (&lra_split_regs, sregno)
7703 0 : || bitmap_bit_p (&lra_split_regs, dregno)
7704 : ? "split" : "inheritance");
7705 0 : dump_insn_slim (lra_dump_file, curr_insn);
7706 : }
7707 177860 : delete_move_and_clobber (curr_insn, dregno);
7708 177860 : done_p = true;
7709 : }
7710 10614516 : else if (bitmap_bit_p (remove_pseudos, sregno)
7711 10614516 : && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
7712 : {
7713 : /* Search the following pattern:
7714 : inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
7715 : original_pseudo <- inherit_or_split_pseudo1
7716 : where the 2nd insn is the current insn and
7717 : inherit_or_split_pseudo2 is not removed. If it is found,
7718 : change the current insn onto:
7719 : original_pseudo <- inherit_or_split_pseudo2. */
7720 723891 : for (prev_insn = PREV_INSN (curr_insn);
7721 723891 : prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
7722 248744 : prev_insn = PREV_INSN (prev_insn))
7723 : ;
7724 475147 : if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
7725 462397 : && (prev_set = single_set (prev_insn)) != NULL_RTX
7726 : /* There should be no subregs in insn we are
7727 : searching because only the original reg might
7728 : be in subreg when we changed the mode of
7729 : load/store for splitting. */
7730 456541 : && REG_P (SET_DEST (prev_set))
7731 351966 : && REG_P (SET_SRC (prev_set))
7732 270747 : && (int) REGNO (SET_DEST (prev_set)) == sregno
7733 184798 : && ((prev_sregno = REGNO (SET_SRC (prev_set)))
7734 : >= FIRST_PSEUDO_REGISTER)
7735 184798 : && (lra_reg_info[prev_sregno].restore_rtx == NULL_RTX
7736 143313 : ||
7737 : /* As we consider chain of inheritance or
7738 : splitting described in above comment we should
7739 : check that sregno and prev_sregno were
7740 : inheritance/split pseudos created from the
7741 : same original regno. */
7742 286626 : (get_regno (lra_reg_info[sregno].restore_rtx) >= 0
7743 286626 : && (get_regno (lra_reg_info[sregno].restore_rtx)
7744 286626 : == get_regno (lra_reg_info[prev_sregno].restore_rtx))))
7745 659945 : && ! bitmap_bit_p (remove_pseudos, prev_sregno))
7746 : {
7747 102466 : int restore_regno = get_regno (lra_reg_info[sregno].restore_rtx);
7748 102466 : if (restore_regno < 0)
7749 0 : restore_regno = prev_sregno;
7750 102466 : lra_assert (GET_MODE (SET_SRC (prev_set))
7751 : == GET_MODE (regno_reg_rtx[restore_regno]));
7752 : /* Although we have a single set, the insn can
7753 : contain more one sregno register occurrence
7754 : as a source. Change all occurrences. */
7755 102466 : lra_substitute_pseudo_within_insn (curr_insn, sregno,
7756 : regno_reg_rtx[restore_regno],
7757 : false);
7758 : /* As we are finishing with processing the insn
7759 : here, check the destination too as it might
7760 : inheritance pseudo for another pseudo. */
7761 102466 : if (bitmap_bit_p (remove_pseudos, dregno)
7762 0 : && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
7763 102466 : && (restore_rtx
7764 0 : = lra_reg_info[dregno].restore_rtx) != NULL_RTX)
7765 : {
7766 0 : if (GET_CODE (SET_DEST (set)) == SUBREG)
7767 0 : SUBREG_REG (SET_DEST (set)) = restore_rtx;
7768 : else
7769 0 : SET_DEST (set) = restore_rtx;
7770 : }
7771 102466 : lra_push_insn_and_update_insn_regno_info (curr_insn);
7772 102466 : lra_set_used_insn_alternative_by_uid
7773 102466 : (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
7774 102466 : done_p = true;
7775 102466 : if (lra_dump_file != NULL)
7776 : {
7777 0 : fprintf (lra_dump_file, " Change reload insn:\n");
7778 0 : dump_insn_slim (lra_dump_file, curr_insn);
7779 : }
7780 : }
7781 : }
7782 : }
7783 191578 : if (! done_p)
7784 : {
7785 188765549 : struct lra_insn_reg *reg;
7786 188765549 : bool restored_regs_p = false;
7787 188765549 : bool kept_regs_p = false;
7788 :
7789 188765549 : curr_id = lra_get_insn_recog_data (curr_insn);
7790 394095851 : for (reg = curr_id->regs; reg != NULL; reg = reg->next)
7791 : {
7792 205330302 : regno = reg->regno;
7793 205330302 : restore_rtx = lra_reg_info[regno].restore_rtx;
7794 205330302 : if (restore_rtx != NULL_RTX)
7795 : {
7796 5906173 : if (change_p && bitmap_bit_p (remove_pseudos, regno))
7797 : {
7798 822470 : lra_substitute_pseudo_within_insn
7799 822470 : (curr_insn, regno, restore_rtx, false);
7800 822470 : restored_regs_p = true;
7801 : }
7802 : else
7803 : kept_regs_p = true;
7804 : }
7805 : }
7806 188765549 : if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
7807 : {
7808 : /* The instruction has changed since the previous
7809 : constraints pass. */
7810 4454947 : lra_push_insn_and_update_insn_regno_info (curr_insn);
7811 4454947 : lra_set_used_insn_alternative_by_uid
7812 4454947 : (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
7813 : }
7814 184310602 : else if (restored_regs_p)
7815 : /* The instruction has been restored to the form that
7816 : it had during the previous constraints pass. */
7817 660256 : lra_update_insn_regno_info (curr_insn);
7818 5115203 : if (restored_regs_p && lra_dump_file != NULL)
7819 : {
7820 0 : fprintf (lra_dump_file, " Insn after restoring regs:\n");
7821 0 : dump_insn_slim (lra_dump_file, curr_insn);
7822 : }
7823 : }
7824 : }
7825 : }
7826 1534665 : return change_p;
7827 : }
7828 :
7829 : /* If optional reload pseudos failed to get a hard register or was not
7830 : inherited, it is better to remove optional reloads. We do this
7831 : transformation after undoing inheritance to figure out necessity to
7832 : remove optional reloads easier. Return true if we do any
7833 : change. */
7834 : static bool
7835 1534665 : undo_optional_reloads (void)
7836 : {
7837 1534665 : bool change_p, keep_p;
7838 1534665 : unsigned int regno, uid;
7839 1534665 : bitmap_iterator bi, bi2;
7840 1534665 : rtx_insn *insn;
7841 1534665 : rtx set, src, dest;
7842 1534665 : auto_bitmap removed_optional_reload_pseudos (®_obstack);
7843 :
7844 1534665 : bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
7845 2542140 : EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
7846 : {
7847 1007475 : keep_p = false;
7848 : /* Keep optional reloads from previous subpasses. */
7849 1007475 : if (lra_reg_info[regno].restore_rtx == NULL_RTX
7850 : /* If the original pseudo changed its allocation, just
7851 : removing the optional pseudo is dangerous as the original
7852 : pseudo will have longer live range. */
7853 1007475 : || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] >= 0)
7854 : keep_p = true;
7855 621062 : else if (reg_renumber[regno] >= 0)
7856 1779254 : EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
7857 : {
7858 1296827 : insn = lra_insn_recog_data[uid]->insn;
7859 1296827 : if ((set = single_set (insn)) == NULL_RTX)
7860 6613 : continue;
7861 1290214 : src = SET_SRC (set);
7862 1290214 : dest = SET_DEST (set);
7863 1290214 : if ((! REG_P (src) && ! SUBREG_P (src))
7864 691126 : || (! REG_P (dest) && ! SUBREG_P (dest)))
7865 599120 : continue;
7866 691094 : if (get_regno (dest) == (int) regno
7867 : /* Ignore insn for optional reloads itself. */
7868 1164090 : && (get_regno (lra_reg_info[regno].restore_rtx)
7869 582045 : != get_regno (src))
7870 : /* Check only inheritance on last inheritance pass. */
7871 123306 : && get_regno (src) >= new_regno_start
7872 : /* Check that the optional reload was inherited. */
7873 814400 : && bitmap_bit_p (&lra_inheritance_pseudos, get_regno (src)))
7874 : {
7875 : keep_p = true;
7876 : break;
7877 : }
7878 : }
7879 992146 : if (keep_p)
7880 : {
7881 509719 : bitmap_clear_bit (removed_optional_reload_pseudos, regno);
7882 509719 : if (lra_dump_file != NULL)
7883 3 : fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
7884 : }
7885 : }
7886 1534665 : change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
7887 1534665 : auto_bitmap insn_bitmap (®_obstack);
7888 2032421 : EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
7889 : {
7890 497756 : if (lra_dump_file != NULL)
7891 2 : fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
7892 497756 : bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
7893 1580938 : EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
7894 : {
7895 : /* We may have already removed a clobber. */
7896 1083182 : if (!lra_insn_recog_data[uid])
7897 0 : continue;
7898 1083182 : insn = lra_insn_recog_data[uid]->insn;
7899 1083182 : if ((set = single_set (insn)) != NULL_RTX)
7900 : {
7901 1077296 : src = SET_SRC (set);
7902 1077296 : dest = SET_DEST (set);
7903 491850 : if ((REG_P (src) || SUBREG_P (src))
7904 585458 : && (REG_P (dest) || SUBREG_P (dest))
7905 1662722 : && ((get_regno (src) == (int) regno
7906 224334 : && (get_regno (lra_reg_info[regno].restore_rtx)
7907 112167 : == get_regno (dest)))
7908 503904 : || (get_regno (dest) == (int) regno
7909 473259 : && (get_regno (lra_reg_info[regno].restore_rtx)
7910 473259 : == get_regno (src)))))
7911 : {
7912 554721 : if (lra_dump_file != NULL)
7913 : {
7914 0 : fprintf (lra_dump_file, " Deleting move %u\n",
7915 0 : INSN_UID (insn));
7916 0 : dump_insn_slim (lra_dump_file, insn);
7917 : }
7918 1109442 : delete_move_and_clobber (insn, get_regno (dest));
7919 554721 : continue;
7920 : }
7921 : /* We should not worry about generation memory-memory
7922 : moves here as if the corresponding inheritance did
7923 : not work (inheritance pseudo did not get a hard reg),
7924 : we remove the inheritance pseudo and the optional
7925 : reload. */
7926 : }
7927 528461 : if (GET_CODE (PATTERN (insn)) == CLOBBER
7928 0 : && REG_P (SET_DEST (insn))
7929 528461 : && get_regno (SET_DEST (insn)) == (int) regno)
7930 : /* Refuse to remap clobbers to preexisting pseudos. */
7931 0 : gcc_unreachable ();
7932 528461 : lra_substitute_pseudo_within_insn
7933 528461 : (insn, regno, lra_reg_info[regno].restore_rtx, false);
7934 528461 : lra_update_insn_regno_info (insn);
7935 528461 : if (lra_dump_file != NULL)
7936 : {
7937 4 : fprintf (lra_dump_file,
7938 : " Restoring original insn:\n");
7939 4 : dump_insn_slim (lra_dump_file, insn);
7940 : }
7941 : }
7942 : }
7943 : /* Clear restore_regnos. */
7944 2542140 : EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
7945 1007475 : lra_reg_info[regno].restore_rtx = NULL_RTX;
7946 1534665 : return change_p;
7947 1534665 : }
7948 :
7949 : /* Entry function for undoing inheritance/split transformation. Return true
7950 : if we did any RTL change in this pass. */
7951 : bool
7952 1537521 : lra_undo_inheritance (void)
7953 : {
7954 1537521 : unsigned int regno;
7955 1537521 : int hard_regno;
7956 1537521 : int n_all_inherit, n_inherit, n_all_split, n_split;
7957 1537521 : rtx restore_rtx;
7958 1537521 : bitmap_iterator bi;
7959 1537521 : bool change_p;
7960 :
7961 1537521 : lra_undo_inheritance_iter++;
7962 1537521 : if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
7963 : return false;
7964 1534665 : if (lra_dump_file != NULL)
7965 97 : fprintf (lra_dump_file,
7966 : "\n********** Undoing inheritance #%d: **********\n\n",
7967 : lra_undo_inheritance_iter);
7968 1534665 : auto_bitmap remove_pseudos (®_obstack);
7969 1534665 : n_inherit = n_all_inherit = 0;
7970 3390651 : EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
7971 1855986 : if (lra_reg_info[regno].restore_rtx != NULL_RTX)
7972 : {
7973 1173227 : n_all_inherit++;
7974 1173227 : if (reg_renumber[regno] < 0
7975 : /* If the original pseudo changed its allocation, just
7976 : removing inheritance is dangerous as for changing
7977 : allocation we used shorter live-ranges. */
7978 1173227 : && (! REG_P (lra_reg_info[regno].restore_rtx)
7979 417718 : || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
7980 424577 : bitmap_set_bit (remove_pseudos, regno);
7981 : else
7982 748650 : n_inherit++;
7983 : }
7984 1534665 : if (lra_dump_file != NULL && n_all_inherit != 0)
7985 2 : fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
7986 : n_inherit, n_all_inherit,
7987 2 : (double) n_inherit / n_all_inherit * 100);
7988 1534665 : n_split = n_all_split = 0;
7989 2494408 : EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
7990 959743 : if ((restore_rtx = lra_reg_info[regno].restore_rtx) != NULL_RTX)
7991 : {
7992 659529 : int restore_regno = REGNO (restore_rtx);
7993 :
7994 659529 : n_all_split++;
7995 1318976 : hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
7996 659529 : ? reg_renumber[restore_regno] : restore_regno);
7997 659529 : if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
7998 2208 : bitmap_set_bit (remove_pseudos, regno);
7999 : else
8000 : {
8001 657321 : n_split++;
8002 657321 : if (lra_dump_file != NULL)
8003 0 : fprintf (lra_dump_file, " Keep split r%d (orig=r%d)\n",
8004 : regno, restore_regno);
8005 : }
8006 : }
8007 1534665 : if (lra_dump_file != NULL && n_all_split != 0)
8008 0 : fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
8009 : n_split, n_all_split,
8010 0 : (double) n_split / n_all_split * 100);
8011 1534665 : change_p = remove_inheritance_pseudos (remove_pseudos);
8012 : /* Clear restore_regnos. */
8013 3390651 : EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
8014 1855986 : lra_reg_info[regno].restore_rtx = NULL_RTX;
8015 2494408 : EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
8016 959743 : lra_reg_info[regno].restore_rtx = NULL_RTX;
8017 1534665 : change_p = undo_optional_reloads () || change_p;
8018 : if (change_p)
8019 110721 : lra_dump_insns_if_possible ("changed func after undoing inheritance");
8020 1534665 : return change_p;
8021 1534665 : }
|