Branch data Line data Source code
1 : : /* Change pseudos by memory.
2 : : Copyright (C) 2010-2025 Free Software Foundation, Inc.
3 : : Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4 : :
5 : : This file is part of GCC.
6 : :
7 : : GCC is free software; you can redistribute it and/or modify it under
8 : : the terms of the GNU General Public License as published by the Free
9 : : Software Foundation; either version 3, or (at your option) any later
10 : : version.
11 : :
12 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 : : for more details.
16 : :
17 : : You should have received a copy of the GNU General Public License
18 : : along with GCC; see the file COPYING3. If not see
19 : : <http://www.gnu.org/licenses/>. */
20 : :
21 : :
22 : : /* This file contains code for a pass to change spilled pseudos into
23 : : memory.
24 : :
25 : : The pass creates necessary stack slots and assigns spilled pseudos
26 : : to the stack slots in following way:
27 : :
28 : : for all spilled pseudos P most frequently used first do
29 : : for all stack slots S do
30 : : if P doesn't conflict with pseudos assigned to S then
31 : : assign S to P and goto to the next pseudo process
32 : : end
33 : : end
34 : : create new stack slot S and assign P to S
35 : : end
36 : :
37 : : The actual algorithm is bit more complicated because of different
38 : : pseudo sizes.
39 : :
40 : : After that the code changes spilled pseudos (except ones created
41 : : from scratches) by corresponding stack slot memory in RTL.
42 : :
43 : : If at least one stack slot was created, we need to run more passes
44 : : because we have new addresses which should be checked and because
45 : : the old address displacements might change and address constraints
46 : : (or insn memory constraints) might not be satisfied any more.
47 : :
48 : : For some targets, the pass can spill some pseudos into hard
49 : : registers of different class (usually into vector registers)
50 : : instead of spilling them into memory if it is possible and
51 : : profitable. Spilling GENERAL_REGS pseudo into SSE registers for
52 : : Intel Corei7 is an example of such optimization. And this is
53 : : actually recommended by Intel optimization guide.
54 : :
55 : : The file also contains code for final change of pseudos on hard
56 : : regs correspondingly assigned to them. */
57 : :
58 : : #include "config.h"
59 : : #include "system.h"
60 : : #include "coretypes.h"
61 : : #include "backend.h"
62 : : #include "target.h"
63 : : #include "rtl.h"
64 : : #include "df.h"
65 : : #include "insn-config.h"
66 : : #include "regs.h"
67 : : #include "memmodel.h"
68 : : #include "ira.h"
69 : : #include "recog.h"
70 : : #include "output.h"
71 : : #include "cfgrtl.h"
72 : : #include "lra.h"
73 : : #include "lra-int.h"
74 : :
75 : :
76 : : /* Max regno at the start of the pass. */
77 : : static int regs_num;
78 : :
79 : : /* Map spilled regno -> hard regno used instead of memory for
80 : : spilling. */
81 : : static rtx *spill_hard_reg;
82 : :
83 : : /* The structure describes stack slot of a spilled pseudo. */
84 : : struct pseudo_slot
85 : : {
86 : : /* Number (0, 1, ...) of the stack slot to which given pseudo
87 : : belongs. */
88 : : int slot_num;
89 : : /* First or next slot with the same slot number. */
90 : : struct pseudo_slot *next, *first;
91 : : /* Memory representing the spilled pseudo. */
92 : : rtx mem;
93 : : };
94 : :
95 : : /* The stack slots for each spilled pseudo. Indexed by regnos. */
96 : : static struct pseudo_slot *pseudo_slots;
97 : :
98 : : /* The structure describes a register or a stack slot which can be
99 : : used for several spilled pseudos. */
100 : : class slot
101 : : {
102 : : public:
103 : : /* First pseudo with given stack slot. */
104 : : int regno;
105 : : /* Hard reg into which the slot pseudos are spilled. The value is
106 : : negative for pseudos spilled into memory. */
107 : : int hard_regno;
108 : : /* Maximum alignment required by all users of the slot. */
109 : : unsigned int align;
110 : : /* Maximum size required by all users of the slot. */
111 : : poly_int64 size;
112 : : /* Memory representing the all stack slot. It can be different from
113 : : memory representing a pseudo belonging to give stack slot because
114 : : pseudo can be placed in a part of the corresponding stack slot.
115 : : The value is NULL for pseudos spilled into a hard reg. */
116 : : rtx mem;
117 : : /* Combined live ranges of all pseudos belonging to given slot. It
118 : : is used to figure out that a new spilled pseudo can use given
119 : : stack slot. */
120 : : lra_live_range_t live_ranges;
121 : : };
122 : :
123 : : /* Array containing info about the stack slots. The array element is
124 : : indexed by the stack slot number in the range [0..slots_num). */
125 : : static class slot *slots;
126 : : /* The number of the stack slots currently existing. */
127 : : static int slots_num;
128 : :
129 : : /* Set up memory of the spilled pseudo I. The function can allocate
130 : : the corresponding stack slot if it is not done yet. */
131 : : static void
132 : 1365519 : assign_mem_slot (int i)
133 : : {
134 : 1365519 : rtx x = NULL_RTX;
135 : 1365519 : machine_mode mode = GET_MODE (regno_reg_rtx[i]);
136 : 2731038 : poly_int64 inherent_size = PSEUDO_REGNO_BYTES (i);
137 : 1365519 : machine_mode wider_mode
138 : 1365519 : = wider_subreg_mode (mode, lra_reg_info[i].biggest_mode);
139 : 2731038 : poly_int64 total_size = GET_MODE_SIZE (wider_mode);
140 : 1365519 : poly_int64 adjust = 0;
141 : :
142 : 1365519 : lra_assert (regno_reg_rtx[i] != NULL_RTX && REG_P (regno_reg_rtx[i])
143 : : && lra_reg_info[i].nrefs != 0 && reg_renumber[i] < 0);
144 : :
145 : 1365519 : unsigned int slot_num = pseudo_slots[i].slot_num;
146 : 1365519 : x = slots[slot_num].mem;
147 : 1365519 : if (!x)
148 : : {
149 : 1513474 : x = assign_stack_local (BLKmode, slots[slot_num].size,
150 : 756737 : slots[slot_num].align);
151 : 756737 : slots[slot_num].mem = x;
152 : : }
153 : :
154 : : /* On a big endian machine, the "address" of the slot is the address
155 : : of the low part that fits its inherent mode. */
156 : 1365519 : adjust += subreg_size_lowpart_offset (inherent_size, total_size);
157 : 1365519 : x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
158 : :
159 : : /* Set all of the memory attributes as appropriate for a spill. */
160 : 1365519 : set_mem_attrs_for_spill (x);
161 : 1365519 : pseudo_slots[i].mem = x;
162 : 1365519 : }
163 : :
164 : : /* Sort pseudos according their usage frequencies. */
165 : : static int
166 : 28702698 : regno_freq_compare (const void *v1p, const void *v2p)
167 : : {
168 : 28702698 : const int regno1 = *(const int *) v1p;
169 : 28702698 : const int regno2 = *(const int *) v2p;
170 : 28702698 : int diff;
171 : :
172 : 28702698 : if ((diff = lra_reg_info[regno2].freq - lra_reg_info[regno1].freq) != 0)
173 : : return diff;
174 : 11229660 : return regno1 - regno2;
175 : : }
176 : :
177 : : /* Sort pseudos according to their slots, putting the slots in the order
178 : : that they should be allocated.
179 : :
180 : : First prefer to group slots with variable sizes together and slots
181 : : with constant sizes together, since that usually makes them easier
182 : : to address from a common anchor point. E.g. loads of polynomial-sized
183 : : registers tend to take polynomial offsets while loads of constant-sized
184 : : registers tend to take constant (non-polynomial) offsets.
185 : :
186 : : Next, slots with lower numbers have the highest priority and should
187 : : get the smallest displacement from the stack or frame pointer
188 : : (whichever is being used).
189 : :
190 : : The first allocated slot is always closest to the frame pointer,
191 : : so prefer lower slot numbers when frame_pointer_needed. If the stack
192 : : and frame grow in the same direction, then the first allocated slot is
193 : : always closest to the initial stack pointer and furthest away from the
194 : : final stack pointer, so allocate higher numbers first when using the
195 : : stack pointer in that case. The reverse is true if the stack and
196 : : frame grow in opposite directions. */
197 : : static int
198 : 29079311 : pseudo_reg_slot_compare (const void *v1p, const void *v2p)
199 : : {
200 : 29079311 : const int regno1 = *(const int *) v1p;
201 : 29079311 : const int regno2 = *(const int *) v2p;
202 : 29079311 : int diff, slot_num1, slot_num2;
203 : :
204 : 29079311 : slot_num1 = pseudo_slots[regno1].slot_num;
205 : 29079311 : slot_num2 = pseudo_slots[regno2].slot_num;
206 : 29079311 : diff = (int (slots[slot_num1].size.is_constant ())
207 : 29079311 : - int (slots[slot_num2].size.is_constant ()));
208 : 29079311 : if (diff != 0)
209 : : return diff;
210 : 29079311 : if ((diff = slot_num1 - slot_num2) != 0)
211 : 22114335 : return (frame_pointer_needed
212 : 22114335 : || (!FRAME_GROWS_DOWNWARD) == STACK_GROWS_DOWNWARD ? diff : -diff);
213 : 13929952 : poly_int64 total_size1 = GET_MODE_SIZE (lra_reg_info[regno1].biggest_mode);
214 : 13929952 : poly_int64 total_size2 = GET_MODE_SIZE (lra_reg_info[regno2].biggest_mode);
215 : 6964976 : if ((diff = compare_sizes_for_sort (total_size2, total_size1)) != 0)
216 : 1499578 : return diff;
217 : 5465398 : return regno1 - regno2;
218 : : }
219 : :
220 : : /* Assign spill hard registers to N pseudos in PSEUDO_REGNOS which is
221 : : sorted in order of highest frequency first. Put the pseudos which
222 : : did not get a spill hard register at the beginning of array
223 : : PSEUDO_REGNOS. Return the number of such pseudos. */
224 : : static int
225 : 198162 : assign_spill_hard_regs (int *pseudo_regnos, int n)
226 : : {
227 : 198162 : int i, k, p, regno, res, spill_class_size, hard_regno, nr;
228 : 198162 : enum reg_class rclass, spill_class;
229 : 198162 : machine_mode mode;
230 : 198162 : lra_live_range_t r;
231 : 198162 : rtx_insn *insn;
232 : 198162 : rtx set;
233 : 198162 : basic_block bb;
234 : 198162 : HARD_REG_SET conflict_hard_regs;
235 : 198162 : bitmap setjump_crosses = regstat_get_setjmp_crosses ();
236 : : /* Hard registers which cannot be used for any purpose at given
237 : : program point because they are unallocatable or already allocated
238 : : for other pseudos. */
239 : 198162 : HARD_REG_SET *reserved_hard_regs;
240 : :
241 : 198162 : if (! lra_reg_spill_p)
242 : : return n;
243 : : /* Set up reserved hard regs for every program point. */
244 : 0 : reserved_hard_regs = XNEWVEC (HARD_REG_SET, lra_live_max_point);
245 : 0 : for (p = 0; p < lra_live_max_point; p++)
246 : 0 : reserved_hard_regs[p] = lra_no_alloc_regs;
247 : 0 : for (i = FIRST_PSEUDO_REGISTER; i < regs_num; i++)
248 : 0 : if (lra_reg_info[i].nrefs != 0
249 : 0 : && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
250 : 0 : for (r = lra_reg_info[i].live_ranges; r != NULL; r = r->next)
251 : 0 : for (p = r->start; p <= r->finish; p++)
252 : 0 : add_to_hard_reg_set (&reserved_hard_regs[p],
253 : : lra_reg_info[i].biggest_mode, hard_regno);
254 : 0 : auto_bitmap ok_insn_bitmap (®_obstack);
255 : 0 : FOR_EACH_BB_FN (bb, cfun)
256 : 0 : FOR_BB_INSNS (bb, insn)
257 : 0 : if (DEBUG_INSN_P (insn)
258 : 0 : || ((set = single_set (insn)) != NULL_RTX
259 : 0 : && REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))))
260 : 0 : bitmap_set_bit (ok_insn_bitmap, INSN_UID (insn));
261 : 0 : for (res = i = 0; i < n; i++)
262 : : {
263 : 0 : regno = pseudo_regnos[i];
264 : 0 : gcc_assert (lra_reg_info[regno].live_ranges);
265 : 0 : rclass = lra_get_allocno_class (regno);
266 : 0 : if (bitmap_bit_p (setjump_crosses, regno)
267 : 0 : || (spill_class
268 : 0 : = ((enum reg_class)
269 : 0 : targetm.spill_class ((reg_class_t) rclass,
270 : 0 : PSEUDO_REGNO_MODE (regno)))) == NO_REGS
271 : 0 : || bitmap_intersect_compl_p (&lra_reg_info[regno].insn_bitmap,
272 : 0 : ok_insn_bitmap))
273 : : {
274 : 0 : pseudo_regnos[res++] = regno;
275 : 0 : continue;
276 : : }
277 : 0 : lra_assert (spill_class != NO_REGS);
278 : 0 : conflict_hard_regs = lra_reg_info[regno].conflict_hard_regs;
279 : 0 : for (r = lra_reg_info[regno].live_ranges; r != NULL; r = r->next)
280 : 0 : for (p = r->start; p <= r->finish; p++)
281 : 0 : conflict_hard_regs |= reserved_hard_regs[p];
282 : 0 : spill_class_size = ira_class_hard_regs_num[spill_class];
283 : 0 : mode = lra_reg_info[regno].biggest_mode;
284 : 0 : for (k = 0; k < spill_class_size; k++)
285 : : {
286 : 0 : hard_regno = ira_class_hard_regs[spill_class][k];
287 : 0 : if (TEST_HARD_REG_BIT (eliminable_regset, hard_regno)
288 : 0 : || !targetm.hard_regno_mode_ok (hard_regno, mode))
289 : 0 : continue;
290 : 0 : if (! overlaps_hard_reg_set_p (conflict_hard_regs, mode, hard_regno))
291 : : break;
292 : : }
293 : 0 : if (k >= spill_class_size)
294 : : {
295 : : /* There is no available regs -- assign memory later. */
296 : 0 : pseudo_regnos[res++] = regno;
297 : 0 : continue;
298 : : }
299 : 0 : if (lra_dump_file != NULL)
300 : 0 : fprintf (lra_dump_file, " Spill r%d into hr%d\n", regno, hard_regno);
301 : 0 : add_to_hard_reg_set (&hard_regs_spilled_into,
302 : 0 : lra_reg_info[regno].biggest_mode, hard_regno);
303 : : /* Update reserved_hard_regs. */
304 : 0 : for (r = lra_reg_info[regno].live_ranges; r != NULL; r = r->next)
305 : 0 : for (p = r->start; p <= r->finish; p++)
306 : 0 : add_to_hard_reg_set (&reserved_hard_regs[p],
307 : : lra_reg_info[regno].biggest_mode, hard_regno);
308 : 0 : spill_hard_reg[regno]
309 : 0 : = gen_raw_REG (PSEUDO_REGNO_MODE (regno), hard_regno);
310 : 0 : for (nr = 0;
311 : 0 : nr < hard_regno_nregs (hard_regno,
312 : 0 : lra_reg_info[regno].biggest_mode);
313 : : nr++)
314 : : /* Just loop. */
315 : 0 : df_set_regs_ever_live (hard_regno + nr, true);
316 : : }
317 : 0 : free (reserved_hard_regs);
318 : 0 : return res;
319 : 0 : }
320 : :
321 : : /* Add pseudo REGNO to slot SLOT_NUM. */
322 : : static void
323 : 1365519 : add_pseudo_to_slot (int regno, int slot_num)
324 : : {
325 : 1365519 : struct pseudo_slot *first;
326 : :
327 : : /* Each pseudo has an inherent size which comes from its own mode,
328 : : and a total size which provides room for paradoxical subregs.
329 : : We need to make sure the size and alignment of the slot are
330 : : sufficient for both. */
331 : 2731038 : machine_mode mode = wider_subreg_mode (PSEUDO_REGNO_MODE (regno),
332 : 1365519 : lra_reg_info[regno].biggest_mode);
333 : 1365519 : unsigned int align = spill_slot_alignment (mode);
334 : 1365519 : slots[slot_num].align = MAX (slots[slot_num].align, align);
335 : 1365519 : slots[slot_num].size = upper_bound (slots[slot_num].size,
336 : 1365519 : GET_MODE_SIZE (mode));
337 : :
338 : 1365519 : if (slots[slot_num].regno < 0)
339 : : {
340 : : /* It is the first pseudo in the slot. */
341 : 756737 : slots[slot_num].regno = regno;
342 : 756737 : pseudo_slots[regno].first = &pseudo_slots[regno];
343 : 756737 : pseudo_slots[regno].next = NULL;
344 : : }
345 : : else
346 : : {
347 : 608782 : first = pseudo_slots[regno].first = &pseudo_slots[slots[slot_num].regno];
348 : 608782 : pseudo_slots[regno].next = first->next;
349 : 608782 : first->next = &pseudo_slots[regno];
350 : 608782 : lra_assert (slots[slot_num].live_ranges);
351 : : }
352 : 1365519 : pseudo_slots[regno].mem = NULL_RTX;
353 : 1365519 : pseudo_slots[regno].slot_num = slot_num;
354 : :
355 : : /* Pseudos with empty ranges shouldn't need to be spilled; if we get
356 : : an empty range added to a slot, something fishy is going on, such
357 : : as missing live range information, and without that information
358 : : we may generate wrong code. We can probably relax this to an
359 : : lra_assert at some point. Likewise the analogous one in
360 : : assign_spill_hard_regs. */
361 : 1365519 : gcc_assert (lra_reg_info[regno].live_ranges);
362 : :
363 : 1365519 : slots[slot_num].live_ranges
364 : 1365519 : = lra_merge_live_ranges (slots[slot_num].live_ranges,
365 : : lra_copy_live_range_list
366 : : (lra_reg_info[regno].live_ranges));
367 : 1365519 : }
368 : :
369 : : /* Recompute the combined live ranges of pseudos assigned to stack
370 : : slots. This brings the live ranges of slots back in sync with
371 : : those of pseudos, after recomputing live ranges for pseudos. */
372 : : void
373 : 0 : lra_recompute_slots_live_ranges (void)
374 : : {
375 : 0 : for (int i = 0; i < slots_num; i++)
376 : : {
377 : 0 : if (slots[i].regno < 0)
378 : 0 : continue;
379 : 0 : lra_reset_live_range_list (slots[i].live_ranges);
380 : 0 : for (struct pseudo_slot *next = pseudo_slots[slots[i].regno].first;
381 : 0 : next; next = next->next)
382 : : {
383 : 0 : int regno = next - pseudo_slots;
384 : 0 : lra_assert (!(lra_intersected_live_ranges_p
385 : : (lra_reg_info[regno].live_ranges,
386 : : slots[i].live_ranges)));
387 : 0 : slots[i].live_ranges
388 : 0 : = lra_merge_live_ranges (slots[i].live_ranges,
389 : : lra_copy_live_range_list
390 : 0 : (lra_reg_info[regno].live_ranges));
391 : : }
392 : : }
393 : 0 : }
394 : :
395 : : /* Assign stack slot numbers to pseudos in array PSEUDO_REGNOS of
396 : : length N. Sort pseudos in PSEUDO_REGNOS for subsequent assigning
397 : : memory stack slots. */
398 : : static void
399 : 198162 : assign_stack_slot_num_and_sort_pseudos (int *pseudo_regnos, int n)
400 : : {
401 : 198162 : int i, j, regno;
402 : :
403 : : /* Assign stack slot numbers to spilled pseudos, use smaller numbers
404 : : for most frequently used pseudos. */
405 : 1563681 : for (i = 0; i < n; i++)
406 : : {
407 : 1365519 : regno = pseudo_regnos[i];
408 : 1365519 : if (! flag_ira_share_spill_slots)
409 : 27562 : j = slots_num;
410 : : else
411 : : {
412 : 1337957 : machine_mode mode
413 : 2675914 : = wider_subreg_mode (PSEUDO_REGNO_MODE (regno),
414 : 1337957 : lra_reg_info[regno].biggest_mode);
415 : 26336479 : for (j = 0; j < slots_num; j++)
416 : 25607304 : if (slots[j].hard_regno < 0
417 : : /* Although it's possible to share slots between modes
418 : : with constant and non-constant widths, we usually
419 : : get better spill code by keeping the constant and
420 : : non-constant areas separate. */
421 : 25607304 : && (GET_MODE_SIZE (mode).is_constant ()
422 : : == slots[j].size.is_constant ())
423 : 51214608 : && ! (lra_intersected_live_ranges_p
424 : 25607304 : (slots[j].live_ranges,
425 : 25607304 : lra_reg_info[regno].live_ranges)))
426 : : {
427 : : /* A slot without allocated memory can be shared. */
428 : 608782 : if (slots[j].mem == NULL_RTX)
429 : : break;
430 : :
431 : : /* A slot with allocated memory can be shared only with equal
432 : : or smaller register with equal or smaller alignment. */
433 : 0 : if (slots[j].align >= spill_slot_alignment (mode)
434 : 0 : && known_ge (slots[j].size, GET_MODE_SIZE (mode)))
435 : : break;
436 : : }
437 : : }
438 : 1365519 : if (j >= slots_num)
439 : : {
440 : : /* New slot. */
441 : 756737 : slots[j].live_ranges = NULL;
442 : 756737 : slots[j].size = 0;
443 : 756737 : slots[j].align = BITS_PER_UNIT;
444 : 756737 : slots[j].regno = slots[j].hard_regno = -1;
445 : 756737 : slots[j].mem = NULL_RTX;
446 : 756737 : slots_num++;
447 : : }
448 : 1365519 : add_pseudo_to_slot (regno, j);
449 : : }
450 : : /* Sort regnos according to their slot numbers. */
451 : 198162 : qsort (pseudo_regnos, n, sizeof (int), pseudo_reg_slot_compare);
452 : 198162 : }
453 : :
454 : : /* Recursively process LOC in INSN and change spilled pseudos to the
455 : : corresponding memory or spilled hard reg. Ignore spilled pseudos
456 : : created from the scratches. Return true if the pseudo nrefs equal
457 : : to 0 (don't change the pseudo in this case). Otherwise return false. */
458 : : static bool
459 : 33824650 : remove_pseudos (rtx *loc, rtx_insn *insn)
460 : : {
461 : 33824650 : int i;
462 : 33824650 : rtx hard_reg;
463 : 33824650 : const char *fmt;
464 : 33824650 : enum rtx_code code;
465 : 33824650 : bool res = false;
466 : :
467 : 33824650 : if (*loc == NULL_RTX)
468 : : return res;
469 : 31081081 : code = GET_CODE (*loc);
470 : 31081081 : if (code == SUBREG && REG_P (SUBREG_REG (*loc)))
471 : : {
472 : : /* Try to remove memory subregs to simplify LRA job
473 : : and avoid LRA cycling in case of subreg memory reload. */
474 : 264192 : res = remove_pseudos (&SUBREG_REG (*loc), insn);
475 : 264192 : if (GET_CODE (SUBREG_REG (*loc)) == MEM)
476 : : {
477 : 200490 : alter_subreg (loc, false);
478 : 200490 : if (GET_CODE (*loc) == MEM)
479 : : {
480 : 200490 : lra_update_insn_recog_data (insn);
481 : 200490 : if (lra_dump_file != NULL)
482 : 1 : fprintf (lra_dump_file,
483 : : "Memory subreg was simplified in insn #%u\n",
484 : 1 : INSN_UID (insn));
485 : : }
486 : : }
487 : 264192 : return res;
488 : : }
489 : 14244052 : else if (code == REG && (i = REGNO (*loc)) >= FIRST_PSEUDO_REGISTER
490 : 8505709 : && lra_get_regno_hard_regno (i) < 0
491 : : /* We do not want to assign memory for former scratches because
492 : : it might result in an address reload for some targets. In
493 : : any case we transform such pseudos not getting hard registers
494 : : into scratches back. */
495 : 35679163 : && ! ira_former_scratch_p (i))
496 : : {
497 : 4862274 : if (lra_reg_info[i].nrefs == 0
498 : 23382 : && pseudo_slots[i].mem == NULL && spill_hard_reg[i] == NULL)
499 : : return true;
500 : 4862274 : if ((hard_reg = spill_hard_reg[i]) != NULL_RTX)
501 : 0 : *loc = copy_rtx (hard_reg);
502 : 4862274 : else if (pseudo_slots[i].mem != NULL_RTX)
503 : : /* There might be no memory slot or hard reg for a pseudo when we spill
504 : : the frame pointer after elimination of frame pointer to stack
505 : : pointer became impossible. */
506 : : {
507 : 9724548 : rtx x = lra_eliminate_regs_1 (insn, pseudo_slots[i].mem,
508 : 4862274 : GET_MODE (pseudo_slots[i].mem),
509 : : false, false, 0, true);
510 : 4862274 : *loc = x != pseudo_slots[i].mem ? x : copy_rtx (x);
511 : : }
512 : 4862274 : return res;
513 : : }
514 : :
515 : 25954615 : fmt = GET_RTX_FORMAT (code);
516 : 62573755 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
517 : : {
518 : 36619140 : if (fmt[i] == 'e')
519 : 25810273 : res = remove_pseudos (&XEXP (*loc, i), insn) || res;
520 : 10808867 : else if (fmt[i] == 'E')
521 : : {
522 : 100100 : int j;
523 : :
524 : 316902 : for (j = XVECLEN (*loc, i) - 1; j >= 0; j--)
525 : 216802 : res = remove_pseudos (&XVECEXP (*loc, i, j), insn) || res;
526 : : }
527 : : }
528 : : return res;
529 : : }
530 : :
531 : : /* Convert spilled pseudos into their stack slots or spill hard regs,
532 : : put insns to process on the constraint stack (that is all insns in
533 : : which pseudos were changed to memory or spill hard regs). */
534 : : static void
535 : 198162 : spill_pseudos (void)
536 : : {
537 : 198162 : basic_block bb;
538 : 198162 : rtx_insn *insn, *curr;
539 : 198162 : int i;
540 : :
541 : 198162 : auto_bitmap spilled_pseudos (®_obstack);
542 : 198162 : auto_bitmap changed_insns (®_obstack);
543 : 35382346 : for (i = FIRST_PSEUDO_REGISTER; i < regs_num; i++)
544 : : {
545 : 18045983 : if (lra_reg_info[i].nrefs != 0 && lra_get_regno_hard_regno (i) < 0
546 : 36552969 : && ! ira_former_scratch_p (i))
547 : : {
548 : 1365519 : bitmap_set_bit (spilled_pseudos, i);
549 : 1365519 : bitmap_ior_into (changed_insns, &lra_reg_info[i].insn_bitmap);
550 : : }
551 : : }
552 : 7091420 : FOR_EACH_BB_FN (bb, cfun)
553 : : {
554 : 204449934 : FOR_BB_INSNS_SAFE (bb, insn, curr)
555 : : {
556 : 95331709 : bool removed_pseudo_p = false;
557 : :
558 : 95331709 : if (bitmap_bit_p (changed_insns, INSN_UID (insn)))
559 : : {
560 : 4789814 : rtx *link_loc, link;
561 : :
562 : 4789814 : removed_pseudo_p = remove_pseudos (&PATTERN (insn), insn);
563 : 4789814 : if (CALL_P (insn)
564 : 4789814 : && remove_pseudos (&CALL_INSN_FUNCTION_USAGE (insn), insn))
565 : : removed_pseudo_p = true;
566 : 4789814 : for (link_loc = ®_NOTES (insn);
567 : 8050735 : (link = *link_loc) != NULL_RTX;
568 : 3260921 : link_loc = &XEXP (link, 1))
569 : : {
570 : 3260921 : switch (REG_NOTE_KIND (link))
571 : : {
572 : 0 : case REG_FRAME_RELATED_EXPR:
573 : 0 : case REG_CFA_DEF_CFA:
574 : 0 : case REG_CFA_ADJUST_CFA:
575 : 0 : case REG_CFA_OFFSET:
576 : 0 : case REG_CFA_REGISTER:
577 : 0 : case REG_CFA_EXPRESSION:
578 : 0 : case REG_CFA_RESTORE:
579 : 0 : case REG_CFA_SET_VDRAP:
580 : 0 : if (remove_pseudos (&XEXP (link, 0), insn))
581 : 3260921 : removed_pseudo_p = true;
582 : : break;
583 : : default:
584 : : break;
585 : : }
586 : : }
587 : 4789814 : if (GET_CODE (PATTERN (insn)) == CLOBBER)
588 : : /* This is a CLOBBER insn with pseudo spilled to memory.
589 : : Mark it for removing it later together with LRA temporary
590 : : CLOBBER insns. */
591 : 7524 : LRA_TEMP_CLOBBER_P (PATTERN (insn)) = 1;
592 : 4789814 : if (lra_dump_file != NULL)
593 : 2 : fprintf (lra_dump_file,
594 : : "Changing spilled pseudos to memory in insn #%u\n",
595 : 2 : INSN_UID (insn));
596 : 4789814 : lra_push_insn_and_update_insn_regno_info (insn);
597 : 4789814 : if (lra_reg_spill_p || targetm.different_addr_displacement_p ())
598 : 0 : lra_set_used_insn_alternative (insn, LRA_UNKNOWN_ALT);
599 : : }
600 : 90541895 : else if (CALL_P (insn)
601 : : /* Presence of any pseudo in CALL_INSN_FUNCTION_USAGE
602 : : does not affect value of insn_bitmap of the
603 : : corresponding lra_reg_info. That is because we
604 : : don't need to reload pseudos in
605 : : CALL_INSN_FUNCTION_USAGEs. So if we process only
606 : : insns in the insn_bitmap of given pseudo here, we
607 : : can miss the pseudo in some
608 : : CALL_INSN_FUNCTION_USAGEs. */
609 : 90541895 : && remove_pseudos (&CALL_INSN_FUNCTION_USAGE (insn), insn))
610 : : removed_pseudo_p = true;
611 : 4789814 : if (removed_pseudo_p)
612 : : {
613 : 0 : lra_assert (DEBUG_INSN_P (insn));
614 : 0 : lra_invalidate_insn_data (insn);
615 : 0 : INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
616 : 0 : if (lra_dump_file != NULL)
617 : 0 : fprintf (lra_dump_file,
618 : : "Debug insn #%u is reset because it referenced "
619 : 0 : "removed pseudo\n", INSN_UID (insn));
620 : : }
621 : 95331709 : bitmap_and_compl_into (df_get_live_in (bb), spilled_pseudos);
622 : 95331709 : bitmap_and_compl_into (df_get_live_out (bb), spilled_pseudos);
623 : : }
624 : : }
625 : 198162 : }
626 : :
627 : : /* Return true if we need scratch reg assignments. */
628 : : bool
629 : 326 : lra_need_for_scratch_reg_p (void)
630 : : {
631 : 326 : int i; max_regno = max_reg_num ();
632 : :
633 : 17659 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
634 : 6473 : if (lra_reg_info[i].nrefs != 0 && lra_get_regno_hard_regno (i) < 0
635 : 17333 : && ira_former_scratch_p (i))
636 : : return true;
637 : : return false;
638 : : }
639 : :
640 : : /* Return true if we need to change some pseudos into memory. */
641 : : bool
642 : 1649120 : lra_need_for_spills_p (void)
643 : : {
644 : 1649120 : int i;
645 : :
646 : 1649120 : max_regno = max_reg_num ();
647 : 88687659 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
648 : 39593477 : if (lra_reg_info[i].nrefs != 0 && lra_get_regno_hard_regno (i) < 0
649 : 87444142 : && ! ira_former_scratch_p (i))
650 : : return true;
651 : : return false;
652 : : }
653 : :
654 : : /* Change spilled pseudos into memory or spill hard regs. Put changed
655 : : insns on the constraint stack (these insns will be considered on
656 : : the next constraint pass). The changed insns are all insns in
657 : : which pseudos were changed. */
658 : : void
659 : 198162 : lra_spill (void)
660 : : {
661 : 198162 : int i, n, n2, curr_regno;
662 : 198162 : int *pseudo_regnos;
663 : :
664 : 198162 : regs_num = max_reg_num ();
665 : 198162 : spill_hard_reg = XNEWVEC (rtx, regs_num);
666 : 198162 : pseudo_regnos = XNEWVEC (int, regs_num);
667 : 35580508 : for (n = 0, i = FIRST_PSEUDO_REGISTER; i < regs_num; i++)
668 : 18045983 : if (lra_reg_info[i].nrefs != 0 && lra_get_regno_hard_regno (i) < 0
669 : : /* We do not want to assign memory for former scratches. */
670 : 36552969 : && ! ira_former_scratch_p (i))
671 : 1365519 : pseudo_regnos[n++] = i;
672 : 198162 : lra_assert (n > 0);
673 : 198162 : pseudo_slots = XNEWVEC (struct pseudo_slot, regs_num);
674 : 35382346 : for (i = FIRST_PSEUDO_REGISTER; i < regs_num; i++)
675 : : {
676 : 35184184 : spill_hard_reg[i] = NULL_RTX;
677 : 35184184 : pseudo_slots[i].mem = NULL_RTX;
678 : : }
679 : 198162 : slots = XNEWVEC (class slot, regs_num);
680 : : /* Sort regnos according their usage frequencies. */
681 : 198162 : qsort (pseudo_regnos, n, sizeof (int), regno_freq_compare);
682 : 198162 : n = assign_spill_hard_regs (pseudo_regnos, n);
683 : 198162 : slots_num = 0;
684 : 198162 : assign_stack_slot_num_and_sort_pseudos (pseudo_regnos, n);
685 : 1761843 : for (i = 0; i < n; i++)
686 : 1365519 : if (pseudo_slots[pseudo_regnos[i]].mem == NULL_RTX)
687 : 1365519 : assign_mem_slot (pseudo_regnos[i]);
688 : 198162 : if ((n2 = lra_update_fp2sp_elimination (pseudo_regnos)) > 0)
689 : : {
690 : : /* Assign stack slots to spilled pseudos assigned to fp. */
691 : 0 : assign_stack_slot_num_and_sort_pseudos (pseudo_regnos, n2);
692 : 0 : for (i = 0; i < n2; i++)
693 : 0 : if (pseudo_slots[pseudo_regnos[i]].mem == NULL_RTX)
694 : 0 : assign_mem_slot (pseudo_regnos[i]);
695 : : }
696 : 198162 : if (n + n2 > 0 && crtl->stack_alignment_needed)
697 : : /* If we have a stack frame, we must align it now. The stack size
698 : : may be a part of the offset computation for register
699 : : elimination. */
700 : 198162 : assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
701 : 198162 : if (lra_dump_file != NULL)
702 : : {
703 : 2 : for (i = 0; i < slots_num; i++)
704 : : {
705 : 1 : fprintf (lra_dump_file, " Slot %d regnos (width = ", i);
706 : 1 : print_dec (slots[i].size, lra_dump_file, SIGNED);
707 : 1 : fprintf (lra_dump_file, "):");
708 : 1 : for (curr_regno = slots[i].regno;;
709 : 0 : curr_regno = pseudo_slots[curr_regno].next - pseudo_slots)
710 : : {
711 : 1 : fprintf (lra_dump_file, " %d", curr_regno);
712 : 1 : if (pseudo_slots[curr_regno].next == NULL)
713 : : break;
714 : : }
715 : 1 : fprintf (lra_dump_file, "\n");
716 : : }
717 : : }
718 : 198162 : spill_pseudos ();
719 : 198162 : free (slots);
720 : 198162 : free (pseudo_slots);
721 : 198162 : free (pseudo_regnos);
722 : 198162 : free (spill_hard_reg);
723 : 198162 : }
724 : :
725 : : /* Apply alter_subreg for subregs of regs in *LOC. Use FINAL_P for
726 : : alter_subreg calls. Return true if any subreg of reg is
727 : : processed. */
728 : : static bool
729 : 345794454 : alter_subregs (rtx *loc, bool final_p)
730 : : {
731 : 345794454 : int i;
732 : 345794454 : rtx x = *loc;
733 : 345794454 : bool res;
734 : 345794454 : const char *fmt;
735 : 345794454 : enum rtx_code code;
736 : :
737 : 345794454 : if (x == NULL_RTX)
738 : : return false;
739 : 345794454 : code = GET_CODE (x);
740 : 345794454 : if (code == SUBREG && REG_P (SUBREG_REG (x)))
741 : : {
742 : 2883177 : lra_assert (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER);
743 : 2883177 : alter_subreg (loc, final_p);
744 : 2883177 : return true;
745 : : }
746 : 342911277 : fmt = GET_RTX_FORMAT (code);
747 : 342911277 : res = false;
748 : 821465050 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
749 : : {
750 : 478553773 : if (fmt[i] == 'e')
751 : : {
752 : 118441109 : if (alter_subregs (&XEXP (x, i), final_p))
753 : 478553773 : res = true;
754 : : }
755 : 360112664 : else if (fmt[i] == 'E')
756 : : {
757 : 473136 : int j;
758 : :
759 : 2231320 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
760 : 1758184 : if (alter_subregs (&XVECEXP (x, i, j), final_p))
761 : 811 : res = true;
762 : : }
763 : : }
764 : : return res;
765 : : }
766 : :
767 : : /* Final change of pseudos got hard registers into the corresponding
768 : : hard registers and removing temporary clobbers. */
769 : : void
770 : 1449060 : lra_final_code_change (void)
771 : : {
772 : 1449060 : int i, hard_regno;
773 : 1449060 : basic_block bb;
774 : 1449060 : rtx_insn *insn, *curr;
775 : 1449060 : rtx set;
776 : 1449060 : int max_regno = max_reg_num ();
777 : :
778 : 77632369 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
779 : 74734249 : if (lra_reg_info[i].nrefs != 0
780 : 109408962 : && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
781 : 34669452 : SET_REGNO (regno_reg_rtx[i], hard_regno);
782 : 15845764 : FOR_EACH_BB_FN (bb, cfun)
783 : 364576284 : FOR_BB_INSNS_SAFE (bb, insn, curr)
784 : 167891438 : if (INSN_P (insn))
785 : : {
786 : 139665172 : rtx pat = PATTERN (insn);
787 : :
788 : 139665172 : if (GET_CODE (pat) == USE && XEXP (pat, 0) == const1_rtx)
789 : : {
790 : : /* Remove markers to eliminate critical edges for jump insn
791 : : output reloads (see code in ira.cc::ira). */
792 : 11 : lra_invalidate_insn_data (insn);
793 : 11 : delete_insn (insn);
794 : 11 : continue;
795 : : }
796 : 139665161 : if (GET_CODE (pat) == CLOBBER && LRA_TEMP_CLOBBER_P (pat))
797 : : {
798 : : /* Remove clobbers temporarily created in LRA. We don't
799 : : need them anymore and don't want to waste compiler
800 : : time processing them in a few subsequent passes. */
801 : 133730 : lra_invalidate_insn_data (insn);
802 : 133730 : delete_insn (insn);
803 : 133730 : continue;
804 : : }
805 : :
806 : : /* IRA can generate move insns involving pseudos. It is
807 : : better remove them earlier to speed up compiler a bit.
808 : : It is also better to do it here as they might not pass
809 : : final RTL check in LRA, (e.g. insn moving a control
810 : : register into itself). So remove an useless move insn
811 : : unless next insn is USE marking the return reg (we should
812 : : save this as some subsequent optimizations assume that
813 : : such original insns are saved). */
814 : 77937396 : if (NONJUMP_INSN_P (insn) && GET_CODE (pat) == SET
815 : 66076843 : && REG_P (SET_SRC (pat)) && REG_P (SET_DEST (pat))
816 : 17123183 : && REGNO (SET_SRC (pat)) == REGNO (SET_DEST (pat))
817 : 150251591 : && REGNO (SET_SRC (pat)) >= FIRST_PSEUDO_REGISTER)
818 : : {
819 : 0 : lra_invalidate_insn_data (insn);
820 : 0 : delete_insn (insn);
821 : 0 : continue;
822 : : }
823 : :
824 : 139531431 : lra_insn_recog_data_t id = lra_get_insn_recog_data (insn);
825 : 139531431 : struct lra_insn_reg *reg;
826 : :
827 : 291541460 : for (reg = id->regs; reg != NULL; reg = reg->next)
828 : 152010647 : if (reg->regno >= FIRST_PSEUDO_REGISTER
829 : 99669093 : && lra_reg_info [reg->regno].nrefs == 0)
830 : : break;
831 : :
832 : 139531431 : if (reg != NULL)
833 : : {
834 : : /* Pseudos still can be in debug insns in some very rare
835 : : and complicated cases, e.g. the pseudo was removed by
836 : : inheritance and the debug insn is not EBBs where the
837 : : inheritance happened. It is difficult and time
838 : : consuming to find what hard register corresponds the
839 : : pseudo -- so just remove the debug insn. Another
840 : : solution could be assigning hard reg/memory but it
841 : : would be a misleading info. It is better not to have
842 : : info than have it wrong. */
843 : 618 : lra_assert (DEBUG_INSN_P (insn));
844 : 618 : lra_invalidate_insn_data (insn);
845 : 618 : delete_insn (insn);
846 : 618 : continue;
847 : : }
848 : :
849 : 139530813 : struct lra_static_insn_data *static_id = id->insn_static_data;
850 : 139530813 : bool insn_change_p = false;
851 : :
852 : 371748731 : for (i = id->insn_static_data->n_operands - 1; i >= 0; i--)
853 : 196532389 : if ((DEBUG_INSN_P (insn) || ! static_id->operand[i].is_operator)
854 : 421343296 : && alter_subregs (id->operand_loc[i], ! DEBUG_INSN_P (insn)))
855 : : {
856 : 2880945 : lra_update_dup (id, i);
857 : 2880945 : insn_change_p = true;
858 : : }
859 : 139530813 : if ((GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
860 : 139530813 : && alter_subregs (&XEXP (pat, 0), false))
861 : : insn_change_p = true;
862 : 139530813 : if (insn_change_p)
863 : 2562030 : lra_update_operator_dups (id);
864 : :
865 : 139530813 : if ((set = single_set (insn)) != NULL
866 : 87535012 : && REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
867 : 157631808 : && REGNO (SET_SRC (set)) == REGNO (SET_DEST (set)))
868 : : {
869 : : /* Remove an useless move insn. IRA can generate move
870 : : insns involving pseudos. It is better remove them
871 : : earlier to speed up compiler a bit. It is also
872 : : better to do it here as they might not pass final RTL
873 : : check in LRA, (e.g. insn moving a control register
874 : : into itself). */
875 : 11036672 : lra_invalidate_insn_data (insn);
876 : 11036672 : delete_insn (insn);
877 : : }
878 : : }
879 : 1449060 : }
|