Branch data Line data Source code
1 : : /* Definitions for computing resource usage of specific insns.
2 : : Copyright (C) 1999-2024 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : #include "config.h"
21 : : #include "system.h"
22 : : #include "coretypes.h"
23 : : #include "backend.h"
24 : : #include "target.h"
25 : : #include "rtl.h"
26 : : #include "df.h"
27 : : #include "memmodel.h"
28 : : #include "tm_p.h"
29 : : #include "regs.h"
30 : : #include "emit-rtl.h"
31 : : #include "resource.h"
32 : : #include "insn-attr.h"
33 : : #include "function-abi.h"
34 : :
35 : : /* This structure is used to record liveness information at the targets or
36 : : fallthrough insns of branches. We will most likely need the information
37 : : at targets again, so save them in a hash table rather than recomputing them
38 : : each time. */
39 : :
40 : : struct target_info
41 : : {
42 : : int uid; /* INSN_UID of target. */
43 : : struct target_info *next; /* Next info for same hash bucket. */
44 : : HARD_REG_SET live_regs; /* Registers live at target. */
45 : : int block; /* Basic block number containing target. */
46 : : int bb_tick; /* Generation count of basic block info. */
47 : : };
48 : :
49 : : #define TARGET_HASH_PRIME 257
50 : :
51 : : /* Indicates what resources are required at the beginning of the epilogue. */
52 : : static struct resources start_of_epilogue_needs;
53 : :
54 : : /* Indicates what resources are required at function end. */
55 : : static struct resources end_of_function_needs;
56 : :
57 : : /* Define the hash table itself. */
58 : : static struct target_info **target_hash_table = NULL;
59 : :
60 : : /* For each basic block, we maintain a generation number of its basic
61 : : block info, which is updated each time we move an insn from the
62 : : target of a jump. This is the generation number indexed by block
63 : : number. */
64 : :
65 : : static int *bb_ticks;
66 : :
67 : : /* Marks registers possibly live at the current place being scanned by
68 : : mark_target_live_regs. Also used by update_live_status. */
69 : :
70 : : static HARD_REG_SET current_live_regs;
71 : :
72 : : /* Marks registers for which we have seen a REG_DEAD note but no assignment.
73 : : Also only used by the next two functions. */
74 : :
75 : : static HARD_REG_SET pending_dead_regs;
76 : :
77 : : static void update_live_status (rtx, const_rtx, void *);
78 : : static int find_basic_block (rtx_insn *, int);
79 : : static rtx_insn *next_insn_no_annul (rtx_insn *);
80 : : static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
81 : : rtx *, int, struct resources,
82 : : struct resources);
83 : :
84 : : /* Utility function called from mark_target_live_regs via note_stores.
85 : : It deadens any CLOBBERed registers and livens any SET registers. */
86 : :
87 : : static void
88 : 0 : update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
89 : : {
90 : 0 : int first_regno, last_regno;
91 : 0 : int i;
92 : :
93 : 0 : if (!REG_P (dest)
94 : 0 : && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
95 : : return;
96 : :
97 : 0 : if (GET_CODE (dest) == SUBREG)
98 : : {
99 : 0 : first_regno = subreg_regno (dest);
100 : 0 : last_regno = first_regno + subreg_nregs (dest);
101 : :
102 : : }
103 : : else
104 : : {
105 : 0 : first_regno = REGNO (dest);
106 : 0 : last_regno = END_REGNO (dest);
107 : : }
108 : :
109 : 0 : if (GET_CODE (x) == CLOBBER)
110 : 0 : for (i = first_regno; i < last_regno; i++)
111 : 0 : CLEAR_HARD_REG_BIT (current_live_regs, i);
112 : : else
113 : 0 : for (i = first_regno; i < last_regno; i++)
114 : : {
115 : 0 : SET_HARD_REG_BIT (current_live_regs, i);
116 : 0 : CLEAR_HARD_REG_BIT (pending_dead_regs, i);
117 : : }
118 : : }
119 : :
120 : : /* Find the number of the basic block with correct live register
121 : : information that starts closest to INSN. Return -1 if we couldn't
122 : : find such a basic block or the beginning is more than
123 : : SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for
124 : : an unlimited search.
125 : :
126 : : The delay slot filling code destroys the control-flow graph so,
127 : : instead of finding the basic block containing INSN, we search
128 : : backwards toward a BARRIER where the live register information is
129 : : correct. */
130 : :
131 : : static int
132 : 0 : find_basic_block (rtx_insn *insn, int search_limit)
133 : : {
134 : : /* Scan backwards to the previous BARRIER. Then see if we can find a
135 : : label that starts a basic block. Return the basic block number. */
136 : 0 : for (insn = prev_nonnote_insn (insn);
137 : 0 : insn && !BARRIER_P (insn) && search_limit != 0;
138 : 0 : insn = prev_nonnote_insn (insn), --search_limit)
139 : : ;
140 : :
141 : : /* The closest BARRIER is too far away. */
142 : 0 : if (search_limit == 0)
143 : : return -1;
144 : :
145 : : /* The start of the function. */
146 : 0 : else if (insn == 0)
147 : 0 : return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
148 : :
149 : : /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
150 : : anything other than a CODE_LABEL or note, we can't find this code. */
151 : 0 : for (insn = next_nonnote_insn (insn);
152 : 0 : insn && LABEL_P (insn);
153 : 0 : insn = next_nonnote_insn (insn))
154 : 0 : if (BLOCK_FOR_INSN (insn))
155 : 0 : return BLOCK_FOR_INSN (insn)->index;
156 : :
157 : : return -1;
158 : : }
159 : :
160 : : /* Similar to next_insn, but ignores insns in the delay slots of
161 : : an annulled branch. */
162 : :
163 : : static rtx_insn *
164 : 0 : next_insn_no_annul (rtx_insn *insn)
165 : : {
166 : 0 : if (insn)
167 : : {
168 : : /* If INSN is an annulled branch, skip any insns from the target
169 : : of the branch. */
170 : 0 : if (JUMP_P (insn)
171 : 0 : && INSN_ANNULLED_BRANCH_P (insn)
172 : 0 : && NEXT_INSN (PREV_INSN (insn)) != insn)
173 : : {
174 : 0 : rtx_insn *next = NEXT_INSN (insn);
175 : :
176 : 0 : while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
177 : 0 : && INSN_FROM_TARGET_P (next))
178 : : {
179 : 0 : insn = next;
180 : 0 : next = NEXT_INSN (insn);
181 : : }
182 : : }
183 : :
184 : 0 : insn = NEXT_INSN (insn);
185 : 0 : if (insn && NONJUMP_INSN_P (insn)
186 : 0 : && GET_CODE (PATTERN (insn)) == SEQUENCE)
187 : 0 : insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
188 : : }
189 : :
190 : 0 : return insn;
191 : : }
192 : :
193 : : /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
194 : : which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS
195 : : is TRUE, resources used by the called routine will be included for
196 : : CALL_INSNs. */
197 : :
198 : : void
199 : 0 : mark_referenced_resources (rtx x, struct resources *res,
200 : : bool include_delayed_effects)
201 : : {
202 : 0 : enum rtx_code code = GET_CODE (x);
203 : 0 : int i, j;
204 : 0 : unsigned int r;
205 : 0 : const char *format_ptr;
206 : :
207 : : /* Handle leaf items for which we set resource flags. Also, special-case
208 : : CALL, SET and CLOBBER operators. */
209 : 0 : switch (code)
210 : : {
211 : : case CONST:
212 : : CASE_CONST_ANY:
213 : : case PC:
214 : : case SYMBOL_REF:
215 : : case LABEL_REF:
216 : : case DEBUG_INSN:
217 : : return;
218 : :
219 : 0 : case SUBREG:
220 : 0 : if (!REG_P (SUBREG_REG (x)))
221 : 0 : mark_referenced_resources (SUBREG_REG (x), res, false);
222 : : else
223 : : {
224 : 0 : unsigned int regno = subreg_regno (x);
225 : 0 : unsigned int last_regno = regno + subreg_nregs (x);
226 : :
227 : 0 : gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
228 : 0 : for (r = regno; r < last_regno; r++)
229 : 0 : SET_HARD_REG_BIT (res->regs, r);
230 : : }
231 : : return;
232 : :
233 : 0 : case REG:
234 : 0 : gcc_assert (HARD_REGISTER_P (x));
235 : 0 : add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
236 : 0 : return;
237 : :
238 : 0 : case MEM:
239 : : /* If this memory shouldn't change, it really isn't referencing
240 : : memory. */
241 : 0 : if (! MEM_READONLY_P (x))
242 : 0 : res->memory = 1;
243 : 0 : res->volatil |= MEM_VOLATILE_P (x);
244 : :
245 : : /* Mark registers used to access memory. */
246 : 0 : mark_referenced_resources (XEXP (x, 0), res, false);
247 : 0 : return;
248 : :
249 : 0 : case UNSPEC_VOLATILE:
250 : 0 : case TRAP_IF:
251 : 0 : case ASM_INPUT:
252 : : /* Traditional asm's are always volatile. */
253 : 0 : res->volatil = 1;
254 : 0 : break;
255 : :
256 : 0 : case ASM_OPERANDS:
257 : 0 : res->volatil |= MEM_VOLATILE_P (x);
258 : :
259 : : /* For all ASM_OPERANDS, we must traverse the vector of input operands.
260 : : We cannot just fall through here since then we would be confused
261 : : by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
262 : : traditional asms unlike their normal usage. */
263 : :
264 : 0 : for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
265 : 0 : mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
266 : : return;
267 : :
268 : 0 : case CALL:
269 : : /* The first operand will be a (MEM (xxx)) but doesn't really reference
270 : : memory. The second operand may be referenced, though. */
271 : 0 : mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
272 : 0 : mark_referenced_resources (XEXP (x, 1), res, false);
273 : 0 : return;
274 : :
275 : 0 : case SET:
276 : : /* Usually, the first operand of SET is set, not referenced. But
277 : : registers used to access memory are referenced. SET_DEST is
278 : : also referenced if it is a ZERO_EXTRACT. */
279 : :
280 : 0 : mark_referenced_resources (SET_SRC (x), res, false);
281 : :
282 : 0 : x = SET_DEST (x);
283 : 0 : if (GET_CODE (x) == ZERO_EXTRACT
284 : 0 : || GET_CODE (x) == STRICT_LOW_PART)
285 : 0 : mark_referenced_resources (x, res, false);
286 : 0 : else if (GET_CODE (x) == SUBREG)
287 : 0 : x = SUBREG_REG (x);
288 : 0 : if (MEM_P (x))
289 : 0 : mark_referenced_resources (XEXP (x, 0), res, false);
290 : : return;
291 : :
292 : : case CLOBBER:
293 : : return;
294 : :
295 : 0 : case CALL_INSN:
296 : 0 : if (include_delayed_effects)
297 : : {
298 : : /* A CALL references memory, the frame pointer if it exists, the
299 : : stack pointer, any global registers and any registers given in
300 : : USE insns immediately in front of the CALL.
301 : :
302 : : However, we may have moved some of the parameter loading insns
303 : : into the delay slot of this CALL. If so, the USE's for them
304 : : don't count and should be skipped. */
305 : 0 : rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
306 : 0 : rtx_sequence *sequence = 0;
307 : 0 : int seq_size = 0;
308 : 0 : int i;
309 : :
310 : : /* If we are part of a delay slot sequence, point at the SEQUENCE. */
311 : 0 : if (NEXT_INSN (insn) != x)
312 : : {
313 : 0 : sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
314 : 0 : seq_size = sequence->len ();
315 : 0 : gcc_assert (GET_CODE (sequence) == SEQUENCE);
316 : : }
317 : :
318 : 0 : res->memory = 1;
319 : 0 : SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
320 : 0 : if (frame_pointer_needed)
321 : : {
322 : 0 : SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
323 : 0 : if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
324 : 0 : SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
325 : : }
326 : :
327 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
328 : 0 : if (global_regs[i])
329 : 0 : SET_HARD_REG_BIT (res->regs, i);
330 : :
331 : : /* Check for a REG_SETJMP. If it exists, then we must
332 : : assume that this call can need any register.
333 : :
334 : : This is done to be more conservative about how we handle setjmp.
335 : : We assume that they both use and set all registers. Using all
336 : : registers ensures that a register will not be considered dead
337 : : just because it crosses a setjmp call. A register should be
338 : : considered dead only if the setjmp call returns nonzero. */
339 : 0 : if (find_reg_note (x, REG_SETJMP, NULL))
340 : 0 : SET_HARD_REG_SET (res->regs);
341 : :
342 : 0 : {
343 : 0 : rtx link;
344 : :
345 : 0 : for (link = CALL_INSN_FUNCTION_USAGE (x);
346 : 0 : link;
347 : 0 : link = XEXP (link, 1))
348 : 0 : if (GET_CODE (XEXP (link, 0)) == USE)
349 : : {
350 : 0 : for (i = 1; i < seq_size; i++)
351 : : {
352 : 0 : rtx slot_pat = PATTERN (sequence->element (i));
353 : 0 : if (GET_CODE (slot_pat) == SET
354 : 0 : && rtx_equal_p (SET_DEST (slot_pat),
355 : 0 : XEXP (XEXP (link, 0), 0)))
356 : : break;
357 : : }
358 : 0 : if (i >= seq_size)
359 : 0 : mark_referenced_resources (XEXP (XEXP (link, 0), 0),
360 : : res, false);
361 : : }
362 : : }
363 : : }
364 : :
365 : : /* ... fall through to other INSN processing ... */
366 : 0 : gcc_fallthrough ();
367 : :
368 : 0 : case INSN:
369 : 0 : case JUMP_INSN:
370 : :
371 : 0 : if (GET_CODE (PATTERN (x)) == COND_EXEC)
372 : : /* In addition to the usual references, also consider all outputs
373 : : as referenced, to compensate for mark_set_resources treating
374 : : them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART
375 : : handling, execpt that we got a partial incidence instead of a partial
376 : : width. */
377 : 0 : mark_set_resources (x, res, 0,
378 : : include_delayed_effects
379 : : ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
380 : :
381 : 0 : if (! include_delayed_effects
382 : : && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
383 : : return;
384 : :
385 : : /* No special processing, just speed up. */
386 : 0 : mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
387 : 0 : return;
388 : :
389 : : default:
390 : : break;
391 : : }
392 : :
393 : : /* Process each sub-expression and flag what it needs. */
394 : 0 : format_ptr = GET_RTX_FORMAT (code);
395 : 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
396 : 0 : switch (*format_ptr++)
397 : : {
398 : 0 : case 'e':
399 : 0 : mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
400 : 0 : break;
401 : :
402 : : case 'E':
403 : 0 : for (j = 0; j < XVECLEN (x, i); j++)
404 : 0 : mark_referenced_resources (XVECEXP (x, i, j), res,
405 : : include_delayed_effects);
406 : : break;
407 : : }
408 : : }
409 : :
410 : : /* A subroutine of mark_target_live_regs. Search forward from TARGET
411 : : looking for registers that are set before they are used. These are dead.
412 : : Stop after passing a few conditional jumps, and/or a small
413 : : number of unconditional branches. */
414 : :
415 : : static rtx_insn *
416 : 0 : find_dead_or_set_registers (rtx_insn *target, struct resources *res,
417 : : rtx *jump_target, int jump_count,
418 : : struct resources set, struct resources needed)
419 : : {
420 : 0 : HARD_REG_SET scratch;
421 : 0 : rtx_insn *insn;
422 : 0 : rtx_insn *next_insn;
423 : 0 : rtx_insn *jump_insn = 0;
424 : 0 : int i;
425 : :
426 : 0 : for (insn = target; insn; insn = next_insn)
427 : : {
428 : 0 : rtx_insn *this_insn = insn;
429 : :
430 : 0 : next_insn = NEXT_INSN (insn);
431 : :
432 : : /* If this instruction can throw an exception, then we don't
433 : : know where we might end up next. That means that we have to
434 : : assume that whatever we have already marked as live really is
435 : : live. */
436 : 0 : if (can_throw_internal (insn))
437 : : break;
438 : :
439 : 0 : switch (GET_CODE (insn))
440 : : {
441 : 0 : case CODE_LABEL:
442 : : /* After a label, any pending dead registers that weren't yet
443 : : used can be made dead. */
444 : 0 : pending_dead_regs &= ~needed.regs;
445 : 0 : res->regs &= ~pending_dead_regs;
446 : 0 : CLEAR_HARD_REG_SET (pending_dead_regs);
447 : :
448 : 0 : continue;
449 : :
450 : 0 : case BARRIER:
451 : 0 : case NOTE:
452 : 0 : case DEBUG_INSN:
453 : 0 : continue;
454 : :
455 : 0 : case INSN:
456 : 0 : if (GET_CODE (PATTERN (insn)) == USE)
457 : : {
458 : : /* If INSN is a USE made by update_block, we care about the
459 : : underlying insn. Any registers set by the underlying insn
460 : : are live since the insn is being done somewhere else. */
461 : 0 : if (INSN_P (XEXP (PATTERN (insn), 0)))
462 : 0 : mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
463 : : MARK_SRC_DEST_CALL);
464 : :
465 : : /* All other USE insns are to be ignored. */
466 : 0 : continue;
467 : : }
468 : 0 : else if (GET_CODE (PATTERN (insn)) == CLOBBER)
469 : 0 : continue;
470 : 0 : else if (rtx_sequence *seq =
471 : 0 : dyn_cast <rtx_sequence *> (PATTERN (insn)))
472 : : {
473 : : /* An unconditional jump can be used to fill the delay slot
474 : : of a call, so search for a JUMP_INSN in any position. */
475 : 0 : for (i = 0; i < seq->len (); i++)
476 : : {
477 : 0 : this_insn = seq->insn (i);
478 : 0 : if (JUMP_P (this_insn))
479 : : break;
480 : : }
481 : : }
482 : :
483 : 0 : default:
484 : 0 : break;
485 : 0 : }
486 : :
487 : 0 : if (rtx_jump_insn *this_jump_insn =
488 : 0 : dyn_cast <rtx_jump_insn *> (this_insn))
489 : : {
490 : 0 : if (jump_count++ < 10)
491 : : {
492 : 0 : if (any_uncondjump_p (this_jump_insn)
493 : 0 : || ANY_RETURN_P (PATTERN (this_jump_insn)))
494 : : {
495 : 0 : rtx lab_or_return = this_jump_insn->jump_label ();
496 : 0 : if (ANY_RETURN_P (lab_or_return))
497 : : next_insn = NULL;
498 : : else
499 : 0 : next_insn = as_a <rtx_insn *> (lab_or_return);
500 : 0 : if (jump_insn == 0)
501 : : {
502 : 0 : jump_insn = insn;
503 : 0 : if (jump_target)
504 : 0 : *jump_target = JUMP_LABEL (this_jump_insn);
505 : : }
506 : : }
507 : 0 : else if (any_condjump_p (this_jump_insn))
508 : : {
509 : 0 : struct resources target_set, target_res;
510 : 0 : struct resources fallthrough_res;
511 : :
512 : : /* We can handle conditional branches here by following
513 : : both paths, and then IOR the results of the two paths
514 : : together, which will give us registers that are dead
515 : : on both paths. Since this is expensive, we give it
516 : : a much higher cost than unconditional branches. The
517 : : cost was chosen so that we will follow at most 1
518 : : conditional branch. */
519 : :
520 : 0 : jump_count += 4;
521 : 0 : if (jump_count >= 10)
522 : : break;
523 : :
524 : 0 : mark_referenced_resources (insn, &needed, true);
525 : :
526 : : /* For an annulled branch, mark_set_resources ignores slots
527 : : filled by instructions from the target. This is correct
528 : : if the branch is not taken. Since we are following both
529 : : paths from the branch, we must also compute correct info
530 : : if the branch is taken. We do this by inverting all of
531 : : the INSN_FROM_TARGET_P bits, calling mark_set_resources,
532 : : and then inverting the INSN_FROM_TARGET_P bits again. */
533 : :
534 : 0 : if (GET_CODE (PATTERN (insn)) == SEQUENCE
535 : 0 : && INSN_ANNULLED_BRANCH_P (this_jump_insn))
536 : : {
537 : 0 : rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
538 : 0 : for (i = 1; i < seq->len (); i++)
539 : 0 : INSN_FROM_TARGET_P (seq->element (i))
540 : 0 : = ! INSN_FROM_TARGET_P (seq->element (i));
541 : :
542 : 0 : target_set = set;
543 : 0 : mark_set_resources (insn, &target_set, 0,
544 : : MARK_SRC_DEST_CALL);
545 : :
546 : 0 : for (i = 1; i < seq->len (); i++)
547 : 0 : INSN_FROM_TARGET_P (seq->element (i))
548 : 0 : = ! INSN_FROM_TARGET_P (seq->element (i));
549 : :
550 : 0 : mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
551 : : }
552 : : else
553 : : {
554 : 0 : mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
555 : 0 : target_set = set;
556 : : }
557 : :
558 : 0 : target_res = *res;
559 : 0 : scratch = target_set.regs & ~needed.regs;
560 : 0 : target_res.regs &= ~scratch;
561 : :
562 : 0 : fallthrough_res = *res;
563 : 0 : scratch = set.regs & ~needed.regs;
564 : 0 : fallthrough_res.regs &= ~scratch;
565 : :
566 : 0 : if (!ANY_RETURN_P (this_jump_insn->jump_label ()))
567 : 0 : find_dead_or_set_registers
568 : 0 : (this_jump_insn->jump_target (),
569 : : &target_res, 0, jump_count, target_set, needed);
570 : 0 : find_dead_or_set_registers (next_insn,
571 : : &fallthrough_res, 0, jump_count,
572 : : set, needed);
573 : 0 : fallthrough_res.regs |= target_res.regs;
574 : 0 : res->regs &= fallthrough_res.regs;
575 : : break;
576 : : }
577 : : else
578 : : break;
579 : : }
580 : : else
581 : : {
582 : : /* Don't try this optimization if we expired our jump count
583 : : above, since that would mean there may be an infinite loop
584 : : in the function being compiled. */
585 : : jump_insn = 0;
586 : : break;
587 : : }
588 : : }
589 : :
590 : 0 : mark_referenced_resources (insn, &needed, true);
591 : 0 : mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
592 : :
593 : 0 : scratch = set.regs & ~needed.regs;
594 : 0 : res->regs &= ~scratch;
595 : : }
596 : :
597 : 0 : return jump_insn;
598 : : }
599 : :
600 : : /* Given X, a part of an insn, and a pointer to a `struct resource',
601 : : RES, indicate which resources are modified by the insn. If
602 : : MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
603 : : set by the called routine.
604 : :
605 : : If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
606 : : objects are being referenced instead of set. */
607 : :
608 : : void
609 : 0 : mark_set_resources (rtx x, struct resources *res, int in_dest,
610 : : enum mark_resource_type mark_type)
611 : : {
612 : 0 : enum rtx_code code;
613 : 0 : int i, j;
614 : 0 : unsigned int r;
615 : 0 : const char *format_ptr;
616 : :
617 : 0 : restart:
618 : :
619 : 0 : code = GET_CODE (x);
620 : :
621 : 0 : switch (code)
622 : : {
623 : : case NOTE:
624 : : case BARRIER:
625 : : case CODE_LABEL:
626 : : case USE:
627 : : CASE_CONST_ANY:
628 : : case LABEL_REF:
629 : : case SYMBOL_REF:
630 : : case CONST:
631 : : case PC:
632 : : case DEBUG_INSN:
633 : : /* These don't set any resources. */
634 : : return;
635 : :
636 : 0 : case CALL_INSN:
637 : : /* Called routine modifies the condition code, memory, any registers
638 : : that aren't saved across calls, global registers and anything
639 : : explicitly CLOBBERed immediately after the CALL_INSN. */
640 : :
641 : 0 : if (mark_type == MARK_SRC_DEST_CALL)
642 : : {
643 : 0 : rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
644 : 0 : rtx link;
645 : :
646 : 0 : res->cc = res->memory = 1;
647 : :
648 : 0 : res->regs |= insn_callee_abi (call_insn).full_reg_clobbers ();
649 : :
650 : 0 : for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
651 : 0 : link; link = XEXP (link, 1))
652 : 0 : if (GET_CODE (XEXP (link, 0)) == CLOBBER)
653 : 0 : mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
654 : : MARK_SRC_DEST);
655 : :
656 : : /* Check for a REG_SETJMP. If it exists, then we must
657 : : assume that this call can clobber any register. */
658 : 0 : if (find_reg_note (call_insn, REG_SETJMP, NULL))
659 : 0 : SET_HARD_REG_SET (res->regs);
660 : : }
661 : :
662 : : /* ... and also what its RTL says it modifies, if anything. */
663 : 0 : gcc_fallthrough ();
664 : :
665 : 0 : case JUMP_INSN:
666 : 0 : case INSN:
667 : :
668 : : /* An insn consisting of just a CLOBBER (or USE) is just for flow
669 : : and doesn't actually do anything, so we ignore it. */
670 : :
671 : 0 : if (mark_type != MARK_SRC_DEST_CALL
672 : : && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
673 : : return;
674 : :
675 : 0 : x = PATTERN (x);
676 : 0 : if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
677 : 0 : goto restart;
678 : : return;
679 : :
680 : 0 : case SET:
681 : : /* If the source of a SET is a CALL, this is actually done by
682 : : the called routine. So only include it if we are to include the
683 : : effects of the calling routine. */
684 : :
685 : 0 : mark_set_resources (SET_DEST (x), res,
686 : : (mark_type == MARK_SRC_DEST_CALL
687 : 0 : || GET_CODE (SET_SRC (x)) != CALL),
688 : : mark_type);
689 : :
690 : 0 : mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
691 : 0 : return;
692 : :
693 : 0 : case CLOBBER:
694 : 0 : mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
695 : 0 : return;
696 : :
697 : 0 : case SEQUENCE:
698 : 0 : {
699 : 0 : rtx_sequence *seq = as_a <rtx_sequence *> (x);
700 : 0 : rtx control = seq->element (0);
701 : 0 : bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
702 : :
703 : 0 : mark_set_resources (control, res, 0, mark_type);
704 : 0 : for (i = seq->len () - 1; i >= 0; --i)
705 : : {
706 : 0 : rtx elt = seq->element (i);
707 : 0 : if (!annul_p && INSN_FROM_TARGET_P (elt))
708 : 0 : mark_set_resources (elt, res, 0, mark_type);
709 : : }
710 : : }
711 : : return;
712 : :
713 : 0 : case POST_INC:
714 : 0 : case PRE_INC:
715 : 0 : case POST_DEC:
716 : 0 : case PRE_DEC:
717 : 0 : mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
718 : 0 : return;
719 : :
720 : 0 : case PRE_MODIFY:
721 : 0 : case POST_MODIFY:
722 : 0 : mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
723 : 0 : mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
724 : 0 : mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
725 : 0 : return;
726 : :
727 : 0 : case SIGN_EXTRACT:
728 : 0 : case ZERO_EXTRACT:
729 : 0 : mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
730 : 0 : mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
731 : 0 : mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
732 : 0 : return;
733 : :
734 : 0 : case MEM:
735 : 0 : if (in_dest)
736 : : {
737 : 0 : res->memory = 1;
738 : 0 : res->volatil |= MEM_VOLATILE_P (x);
739 : : }
740 : :
741 : 0 : mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
742 : 0 : return;
743 : :
744 : 0 : case SUBREG:
745 : 0 : if (in_dest)
746 : : {
747 : 0 : if (!REG_P (SUBREG_REG (x)))
748 : : mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
749 : : else
750 : : {
751 : 0 : unsigned int regno = subreg_regno (x);
752 : 0 : unsigned int last_regno = regno + subreg_nregs (x);
753 : :
754 : 0 : gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
755 : 0 : for (r = regno; r < last_regno; r++)
756 : 0 : SET_HARD_REG_BIT (res->regs, r);
757 : : }
758 : : }
759 : : return;
760 : :
761 : 0 : case REG:
762 : 0 : if (in_dest)
763 : : {
764 : 0 : gcc_assert (HARD_REGISTER_P (x));
765 : 0 : add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
766 : : }
767 : : return;
768 : :
769 : 0 : case UNSPEC_VOLATILE:
770 : 0 : case ASM_INPUT:
771 : : /* Traditional asm's are always volatile. */
772 : 0 : res->volatil = 1;
773 : 0 : return;
774 : :
775 : 0 : case TRAP_IF:
776 : 0 : res->volatil = 1;
777 : 0 : break;
778 : :
779 : 0 : case ASM_OPERANDS:
780 : 0 : res->volatil |= MEM_VOLATILE_P (x);
781 : :
782 : : /* For all ASM_OPERANDS, we must traverse the vector of input operands.
783 : : We cannot just fall through here since then we would be confused
784 : : by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
785 : : traditional asms unlike their normal usage. */
786 : :
787 : 0 : for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
788 : 0 : mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
789 : : MARK_SRC_DEST);
790 : : return;
791 : :
792 : : default:
793 : : break;
794 : : }
795 : :
796 : : /* Process each sub-expression and flag what it needs. */
797 : 0 : format_ptr = GET_RTX_FORMAT (code);
798 : 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++)
799 : 0 : switch (*format_ptr++)
800 : : {
801 : 0 : case 'e':
802 : 0 : mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
803 : 0 : break;
804 : :
805 : : case 'E':
806 : 0 : for (j = 0; j < XVECLEN (x, i); j++)
807 : 0 : mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
808 : : break;
809 : : }
810 : : }
811 : :
812 : : /* Return TRUE if INSN is a return, possibly with a filled delay slot. */
813 : :
814 : : static bool
815 : 0 : return_insn_p (const_rtx insn)
816 : : {
817 : 0 : if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
818 : : return true;
819 : :
820 : 0 : if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
821 : 0 : return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
822 : :
823 : : return false;
824 : : }
825 : :
826 : : /* Set the resources that are live at TARGET.
827 : :
828 : : If TARGET is zero, we refer to the end of the current function and can
829 : : return our precomputed value.
830 : :
831 : : Otherwise, we try to find out what is live by consulting the basic block
832 : : information. This is tricky, because we must consider the actions of
833 : : reload and jump optimization, which occur after the basic block information
834 : : has been computed.
835 : :
836 : : Accordingly, we proceed as follows::
837 : :
838 : : We find the previous BARRIER and look at all immediately following labels
839 : : (with no intervening active insns) to see if any of them start a basic
840 : : block. If we hit the start of the function first, we use block 0.
841 : :
842 : : Once we have found a basic block and a corresponding first insn, we can
843 : : accurately compute the live status (by starting at a label following a
844 : : BARRIER, we are immune to actions taken by reload and jump.) Then we
845 : : scan all insns between that point and our target. For each CLOBBER (or
846 : : for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
847 : : registers are dead. For a SET, mark them as live.
848 : :
849 : : We have to be careful when using REG_DEAD notes because they are not
850 : : updated by such things as find_equiv_reg. So keep track of registers
851 : : marked as dead that haven't been assigned to, and mark them dead at the
852 : : next CODE_LABEL since reload and jump won't propagate values across labels.
853 : :
854 : : If we cannot find the start of a basic block (should be a very rare
855 : : case, if it can happen at all), mark everything as potentially live.
856 : :
857 : : Next, scan forward from TARGET looking for things set or clobbered
858 : : before they are used. These are not live.
859 : :
860 : : Because we can be called many times on the same target, save our results
861 : : in a hash table indexed by INSN_UID. This is only done if the function
862 : : init_resource_info () was invoked before we are called. */
863 : :
864 : : void
865 : 0 : mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
866 : : {
867 : 0 : int b = -1;
868 : 0 : unsigned int i;
869 : 0 : struct target_info *tinfo = NULL;
870 : 0 : rtx_insn *insn;
871 : 0 : rtx jump_target;
872 : 0 : HARD_REG_SET scratch;
873 : 0 : struct resources set, needed;
874 : :
875 : : /* Handle end of function. */
876 : 0 : if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
877 : : {
878 : 0 : *res = end_of_function_needs;
879 : 0 : return;
880 : : }
881 : :
882 : : /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
883 : : instruction. */
884 : 0 : rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
885 : :
886 : : /* Handle return insn. */
887 : 0 : if (return_insn_p (target))
888 : : {
889 : 0 : *res = end_of_function_needs;
890 : 0 : mark_referenced_resources (target, res, false);
891 : 0 : return;
892 : : }
893 : :
894 : : /* We have to assume memory is needed, but the CC isn't. */
895 : 0 : res->memory = 1;
896 : 0 : res->volatil = 0;
897 : 0 : res->cc = 0;
898 : :
899 : : /* See if we have computed this value already. */
900 : 0 : if (target_hash_table != NULL)
901 : : {
902 : 0 : for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
903 : 0 : tinfo; tinfo = tinfo->next)
904 : 0 : if (tinfo->uid == INSN_UID (target))
905 : : break;
906 : :
907 : : /* Start by getting the basic block number. If we have saved
908 : : information, we can get it from there unless the insn at the
909 : : start of the basic block has been deleted. */
910 : 0 : if (tinfo && tinfo->block != -1
911 : 0 : && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
912 : : b = tinfo->block;
913 : : }
914 : :
915 : : if (b == -1)
916 : 0 : b = find_basic_block (target, param_max_delay_slot_live_search);
917 : :
918 : 0 : if (target_hash_table != NULL)
919 : : {
920 : 0 : if (tinfo)
921 : : {
922 : : /* If the information is up-to-date, use it. Otherwise, we will
923 : : update it below. */
924 : 0 : if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
925 : : {
926 : 0 : res->regs = tinfo->live_regs;
927 : 0 : return;
928 : : }
929 : : }
930 : : else
931 : : {
932 : : /* Allocate a place to put our results and chain it into the
933 : : hash table. */
934 : 0 : tinfo = XNEW (struct target_info);
935 : 0 : tinfo->uid = INSN_UID (target);
936 : 0 : tinfo->block = b;
937 : 0 : tinfo->next
938 : 0 : = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
939 : 0 : target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
940 : : }
941 : : }
942 : :
943 : 0 : CLEAR_HARD_REG_SET (pending_dead_regs);
944 : :
945 : : /* If we found a basic block, get the live registers from it and update
946 : : them with anything set or killed between its start and the insn before
947 : : TARGET; this custom life analysis is really about registers so we need
948 : : to use the LR problem. Otherwise, we must assume everything is live. */
949 : 0 : if (b != -1)
950 : : {
951 : 0 : regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
952 : 0 : rtx_insn *start_insn, *stop_insn;
953 : 0 : df_ref def;
954 : :
955 : : /* Compute hard regs live at start of block. */
956 : 0 : REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
957 : 0 : FOR_EACH_ARTIFICIAL_DEF (def, b)
958 : 0 : if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
959 : 0 : SET_HARD_REG_BIT (current_live_regs, DF_REF_REGNO (def));
960 : :
961 : : /* Get starting and ending insn, handling the case where each might
962 : : be a SEQUENCE. */
963 : 0 : start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
964 : 0 : insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
965 : 0 : stop_insn = target;
966 : :
967 : 0 : if (NONJUMP_INSN_P (start_insn)
968 : 0 : && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
969 : 0 : start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
970 : :
971 : 0 : if (NONJUMP_INSN_P (stop_insn)
972 : 0 : && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
973 : 0 : stop_insn = next_insn (PREV_INSN (stop_insn));
974 : :
975 : 0 : for (insn = start_insn; insn != stop_insn;
976 : 0 : insn = next_insn_no_annul (insn))
977 : : {
978 : 0 : rtx link;
979 : 0 : rtx_insn *real_insn = insn;
980 : 0 : enum rtx_code code = GET_CODE (insn);
981 : :
982 : 0 : if (DEBUG_INSN_P (insn))
983 : 0 : continue;
984 : :
985 : : /* If this insn is from the target of a branch, it isn't going to
986 : : be used in the sequel. If it is used in both cases, this
987 : : test will not be true. */
988 : 0 : if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
989 : 0 : && INSN_FROM_TARGET_P (insn))
990 : 0 : continue;
991 : :
992 : : /* If this insn is a USE made by update_block, we care about the
993 : : underlying insn. */
994 : 0 : if (code == INSN
995 : 0 : && GET_CODE (PATTERN (insn)) == USE
996 : 0 : && INSN_P (XEXP (PATTERN (insn), 0)))
997 : 0 : real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
998 : :
999 : 0 : if (CALL_P (real_insn))
1000 : : {
1001 : : /* Values in call-clobbered registers survive a COND_EXEC CALL
1002 : : if that is not executed; this matters for resoure use because
1003 : : they may be used by a complementarily (or more strictly)
1004 : : predicated instruction, or if the CALL is NORETURN. */
1005 : 0 : if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1006 : : {
1007 : 0 : HARD_REG_SET regs_invalidated_by_this_call
1008 : 0 : = insn_callee_abi (real_insn).full_reg_clobbers ();
1009 : : /* CALL clobbers all call-used regs that aren't fixed except
1010 : : sp, ap, and fp. Do this before setting the result of the
1011 : : call live. */
1012 : 0 : current_live_regs &= ~regs_invalidated_by_this_call;
1013 : : }
1014 : :
1015 : : /* A CALL_INSN sets any global register live, since it may
1016 : : have been modified by the call. */
1017 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1018 : 0 : if (global_regs[i])
1019 : 0 : SET_HARD_REG_BIT (current_live_regs, i);
1020 : : }
1021 : :
1022 : : /* Mark anything killed in an insn to be deadened at the next
1023 : : label. Ignore USE insns; the only REG_DEAD notes will be for
1024 : : parameters. But they might be early. A CALL_INSN will usually
1025 : : clobber registers used for parameters. It isn't worth bothering
1026 : : with the unlikely case when it won't. */
1027 : 0 : if ((NONJUMP_INSN_P (real_insn)
1028 : 0 : && GET_CODE (PATTERN (real_insn)) != USE
1029 : 0 : && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1030 : 0 : || JUMP_P (real_insn)
1031 : 0 : || CALL_P (real_insn))
1032 : : {
1033 : 0 : for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1034 : 0 : if (REG_NOTE_KIND (link) == REG_DEAD
1035 : 0 : && REG_P (XEXP (link, 0))
1036 : 0 : && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1037 : 0 : add_to_hard_reg_set (&pending_dead_regs,
1038 : 0 : GET_MODE (XEXP (link, 0)),
1039 : 0 : REGNO (XEXP (link, 0)));
1040 : :
1041 : 0 : note_stores (real_insn, update_live_status, NULL);
1042 : :
1043 : : /* If any registers were unused after this insn, kill them.
1044 : : These notes will always be accurate. */
1045 : 0 : for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1046 : 0 : if (REG_NOTE_KIND (link) == REG_UNUSED
1047 : 0 : && REG_P (XEXP (link, 0))
1048 : 0 : && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1049 : 0 : remove_from_hard_reg_set (¤t_live_regs,
1050 : 0 : GET_MODE (XEXP (link, 0)),
1051 : 0 : REGNO (XEXP (link, 0)));
1052 : : }
1053 : :
1054 : 0 : else if (LABEL_P (real_insn))
1055 : : {
1056 : 0 : basic_block bb;
1057 : :
1058 : : /* A label clobbers the pending dead registers since neither
1059 : : reload nor jump will propagate a value across a label. */
1060 : 0 : current_live_regs &= ~pending_dead_regs;
1061 : 0 : CLEAR_HARD_REG_SET (pending_dead_regs);
1062 : :
1063 : : /* We must conservatively assume that all registers that used
1064 : : to be live here still are. The fallthrough edge may have
1065 : : left a live register uninitialized. */
1066 : 0 : bb = BLOCK_FOR_INSN (real_insn);
1067 : 0 : if (bb)
1068 : : {
1069 : : HARD_REG_SET extra_live;
1070 : :
1071 : 0 : REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1072 : 0 : current_live_regs |= extra_live;
1073 : : }
1074 : : }
1075 : :
1076 : : /* The beginning of the epilogue corresponds to the end of the
1077 : : RTL chain when there are no epilogue insns. Certain resources
1078 : : are implicitly required at that point. */
1079 : 0 : else if (NOTE_P (real_insn)
1080 : 0 : && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1081 : 0 : current_live_regs |= start_of_epilogue_needs.regs;
1082 : : }
1083 : :
1084 : 0 : res->regs = current_live_regs;
1085 : 0 : if (tinfo != NULL)
1086 : : {
1087 : 0 : tinfo->block = b;
1088 : 0 : tinfo->bb_tick = bb_ticks[b];
1089 : : }
1090 : : }
1091 : : else
1092 : : /* We didn't find the start of a basic block. Assume everything
1093 : : in use. This should happen only extremely rarely. */
1094 : 0 : SET_HARD_REG_SET (res->regs);
1095 : :
1096 : 0 : CLEAR_RESOURCE (&set);
1097 : 0 : CLEAR_RESOURCE (&needed);
1098 : :
1099 : 0 : rtx_insn *jump_insn = find_dead_or_set_registers (target, res, &jump_target,
1100 : : 0, set, needed);
1101 : :
1102 : : /* If we hit an unconditional branch, we have another way of finding out
1103 : : what is live: we can see what is live at the branch target and include
1104 : : anything used but not set before the branch. We add the live
1105 : : resources found using the test below to those found until now. */
1106 : :
1107 : 0 : if (jump_insn)
1108 : : {
1109 : 0 : struct resources new_resources;
1110 : 0 : rtx_insn *stop_insn = next_active_insn (jump_insn);
1111 : :
1112 : 0 : if (!ANY_RETURN_P (jump_target))
1113 : 0 : jump_target = next_active_insn (as_a<rtx_insn *> (jump_target));
1114 : 0 : mark_target_live_regs (insns, jump_target, &new_resources);
1115 : 0 : CLEAR_RESOURCE (&set);
1116 : 0 : CLEAR_RESOURCE (&needed);
1117 : :
1118 : : /* Include JUMP_INSN in the needed registers. */
1119 : 0 : for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1120 : : {
1121 : 0 : mark_referenced_resources (insn, &needed, true);
1122 : :
1123 : 0 : scratch = needed.regs & ~set.regs;
1124 : 0 : new_resources.regs |= scratch;
1125 : :
1126 : 0 : mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1127 : : }
1128 : :
1129 : 0 : res->regs |= new_resources.regs;
1130 : : }
1131 : :
1132 : 0 : if (tinfo != NULL)
1133 : 0 : tinfo->live_regs = res->regs;
1134 : : }
1135 : :
1136 : : /* Initialize the resources required by mark_target_live_regs ().
1137 : : This should be invoked before the first call to mark_target_live_regs. */
1138 : :
1139 : : void
1140 : 0 : init_resource_info (rtx_insn *epilogue_insn)
1141 : : {
1142 : 0 : int i;
1143 : 0 : basic_block bb;
1144 : :
1145 : : /* Indicate what resources are required to be valid at the end of the current
1146 : : function. The condition code never is and memory always is.
1147 : : The stack pointer is needed unless EXIT_IGNORE_STACK is true
1148 : : and there is an epilogue that restores the original stack pointer
1149 : : from the frame pointer. Registers used to return the function value
1150 : : are needed. Registers holding global variables are needed. */
1151 : :
1152 : 0 : end_of_function_needs.cc = 0;
1153 : 0 : end_of_function_needs.memory = 1;
1154 : 0 : CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1155 : :
1156 : 0 : if (frame_pointer_needed)
1157 : : {
1158 : 0 : SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1159 : 0 : if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
1160 : 0 : SET_HARD_REG_BIT (end_of_function_needs.regs,
1161 : : HARD_FRAME_POINTER_REGNUM);
1162 : : }
1163 : 0 : if (!(frame_pointer_needed
1164 : : && EXIT_IGNORE_STACK
1165 : 0 : && epilogue_insn
1166 : 0 : && !crtl->sp_is_unchanging))
1167 : 0 : SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1168 : :
1169 : 0 : if (crtl->return_rtx != 0)
1170 : 0 : mark_referenced_resources (crtl->return_rtx,
1171 : : &end_of_function_needs, true);
1172 : :
1173 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1174 : 0 : if (global_regs[i] || df_epilogue_uses_p (i))
1175 : 0 : SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1176 : :
1177 : : /* The registers required to be live at the end of the function are
1178 : : represented in the flow information as being dead just prior to
1179 : : reaching the end of the function. For example, the return of a value
1180 : : might be represented by a USE of the return register immediately
1181 : : followed by an unconditional jump to the return label where the
1182 : : return label is the end of the RTL chain. The end of the RTL chain
1183 : : is then taken to mean that the return register is live.
1184 : :
1185 : : This sequence is no longer maintained when epilogue instructions are
1186 : : added to the RTL chain. To reconstruct the original meaning, the
1187 : : start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1188 : : point where these registers become live (start_of_epilogue_needs).
1189 : : If epilogue instructions are present, the registers set by those
1190 : : instructions won't have been processed by flow. Thus, those
1191 : : registers are additionally required at the end of the RTL chain
1192 : : (end_of_function_needs). */
1193 : :
1194 : 0 : start_of_epilogue_needs = end_of_function_needs;
1195 : :
1196 : 0 : while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1197 : : {
1198 : 0 : mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1199 : : MARK_SRC_DEST_CALL);
1200 : 0 : if (return_insn_p (epilogue_insn))
1201 : : break;
1202 : : }
1203 : :
1204 : : /* Filter-out the flags register from those additionally required
1205 : : registers. */
1206 : 0 : if (targetm.flags_regnum != INVALID_REGNUM)
1207 : 0 : CLEAR_HARD_REG_BIT (end_of_function_needs.regs, targetm.flags_regnum);
1208 : :
1209 : : /* Allocate and initialize the tables used by mark_target_live_regs. */
1210 : 0 : target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1211 : 0 : bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
1212 : :
1213 : : /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */
1214 : 0 : FOR_EACH_BB_FN (bb, cfun)
1215 : 0 : if (LABEL_P (BB_HEAD (bb)))
1216 : 0 : BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1217 : 0 : }
1218 : :
1219 : : /* Free up the resources allocated to mark_target_live_regs (). This
1220 : : should be invoked after the last call to mark_target_live_regs (). */
1221 : :
1222 : : void
1223 : 0 : free_resource_info (void)
1224 : : {
1225 : 0 : basic_block bb;
1226 : :
1227 : 0 : if (target_hash_table != NULL)
1228 : : {
1229 : : int i;
1230 : :
1231 : 0 : for (i = 0; i < TARGET_HASH_PRIME; ++i)
1232 : : {
1233 : 0 : struct target_info *ti = target_hash_table[i];
1234 : :
1235 : 0 : while (ti)
1236 : : {
1237 : 0 : struct target_info *next = ti->next;
1238 : 0 : free (ti);
1239 : 0 : ti = next;
1240 : : }
1241 : : }
1242 : :
1243 : 0 : free (target_hash_table);
1244 : 0 : target_hash_table = NULL;
1245 : : }
1246 : :
1247 : 0 : if (bb_ticks != NULL)
1248 : : {
1249 : 0 : free (bb_ticks);
1250 : 0 : bb_ticks = NULL;
1251 : : }
1252 : :
1253 : 0 : FOR_EACH_BB_FN (bb, cfun)
1254 : 0 : if (LABEL_P (BB_HEAD (bb)))
1255 : 0 : BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1256 : 0 : }
1257 : :
1258 : : /* Clear any hashed information that we have stored for INSN. */
1259 : :
1260 : : void
1261 : 0 : clear_hashed_info_for_insn (rtx_insn *insn)
1262 : : {
1263 : 0 : struct target_info *tinfo;
1264 : :
1265 : 0 : if (target_hash_table != NULL)
1266 : : {
1267 : 0 : for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1268 : 0 : tinfo; tinfo = tinfo->next)
1269 : 0 : if (tinfo->uid == INSN_UID (insn))
1270 : : break;
1271 : :
1272 : 0 : if (tinfo)
1273 : 0 : tinfo->block = -1;
1274 : : }
1275 : 0 : }
1276 : :
1277 : : /* Clear any hashed information that we have stored for instructions
1278 : : between INSN and the next BARRIER that follow a JUMP or a LABEL. */
1279 : :
1280 : : void
1281 : 0 : clear_hashed_info_until_next_barrier (rtx_insn *insn)
1282 : : {
1283 : 0 : while (insn && !BARRIER_P (insn))
1284 : : {
1285 : 0 : if (JUMP_P (insn) || LABEL_P (insn))
1286 : : {
1287 : 0 : rtx_insn *next = next_active_insn (insn);
1288 : 0 : if (next)
1289 : 0 : clear_hashed_info_for_insn (next);
1290 : : }
1291 : :
1292 : 0 : insn = next_nonnote_insn (insn);
1293 : : }
1294 : 0 : }
1295 : :
1296 : : /* Increment the tick count for the basic block that contains INSN. */
1297 : :
1298 : : void
1299 : 0 : incr_ticks_for_insn (rtx_insn *insn)
1300 : : {
1301 : 0 : int b = find_basic_block (insn, param_max_delay_slot_live_search);
1302 : :
1303 : 0 : if (b != -1)
1304 : 0 : bb_ticks[b]++;
1305 : 0 : }
1306 : :
1307 : : /* Add TRIAL to the set of resources used at the end of the current
1308 : : function. */
1309 : : void
1310 : 0 : mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1311 : : {
1312 : 0 : mark_referenced_resources (trial, &end_of_function_needs,
1313 : : include_delayed_effects);
1314 : 0 : }
|