Line data Source code
1 : /* Reload pseudo regs into hard regs for insns that require hard regs.
2 : Copyright (C) 1987-2026 Free Software Foundation, Inc.
3 :
4 : This file is part of GCC.
5 :
6 : GCC is free software; you can redistribute it and/or modify it under
7 : the terms of the GNU General Public License as published by the Free
8 : Software Foundation; either version 3, or (at your option) any later
9 : version.
10 :
11 : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : for more details.
15 :
16 : You should have received a copy of the GNU General Public License
17 : along with GCC; see the file COPYING3. If not see
18 : <http://www.gnu.org/licenses/>. */
19 :
20 : #include "config.h"
21 : #include "system.h"
22 : #include "coretypes.h"
23 : #include "backend.h"
24 : #include "target.h"
25 : #include "rtl.h"
26 : #include "tree.h"
27 : #include "predict.h"
28 : #include "df.h"
29 : #include "memmodel.h"
30 : #include "tm_p.h"
31 : #include "optabs.h"
32 : #include "regs.h"
33 : #include "ira.h"
34 : #include "recog.h"
35 :
36 : #include "rtl-error.h"
37 : #include "expr.h"
38 : #include "addresses.h"
39 : #include "cfgrtl.h"
40 : #include "cfgbuild.h"
41 : #include "reload.h"
42 : #include "except.h"
43 : #include "dumpfile.h"
44 : #include "rtl-iter.h"
45 : #include "function-abi.h"
46 :
47 : /* This file contains the reload pass of the compiler, which is
48 : run after register allocation has been done. It checks that
49 : each insn is valid (operands required to be in registers really
50 : are in registers of the proper class) and fixes up invalid ones
51 : by copying values temporarily into registers for the insns
52 : that need them.
53 :
54 : The results of register allocation are described by the vector
55 : reg_renumber; the insns still contain pseudo regs, but reg_renumber
56 : can be used to find which hard reg, if any, a pseudo reg is in.
57 :
58 : The technique we always use is to free up a few hard regs that are
59 : called ``reload regs'', and for each place where a pseudo reg
60 : must be in a hard reg, copy it temporarily into one of the reload regs.
61 :
62 : Reload regs are allocated locally for every instruction that needs
63 : reloads. When there are pseudos which are allocated to a register that
64 : has been chosen as a reload reg, such pseudos must be ``spilled''.
65 : This means that they go to other hard regs, or to stack slots if no other
66 : available hard regs can be found. Spilling can invalidate more
67 : insns, requiring additional need for reloads, so we must keep checking
68 : until the process stabilizes.
69 :
70 : For machines with different classes of registers, we must keep track
71 : of the register class needed for each reload, and make sure that
72 : we allocate enough reload registers of each class.
73 :
74 : The file reload.cc contains the code that checks one insn for
75 : validity and reports the reloads that it needs. This file
76 : is in charge of scanning the entire rtl code, accumulating the
77 : reload needs, spilling, assigning reload registers to use for
78 : fixing up each insn, and generating the new insns to copy values
79 : into the reload registers. */
80 :
81 : struct target_reload default_target_reload;
82 : #if SWITCHABLE_TARGET
83 : struct target_reload *this_target_reload = &default_target_reload;
84 : #endif
85 :
86 : #define spill_indirect_levels \
87 : (this_target_reload->x_spill_indirect_levels)
88 :
89 : /* During reload_as_needed, element N contains a REG rtx for the hard reg
90 : into which reg N has been reloaded (perhaps for a previous insn). */
91 : static rtx *reg_last_reload_reg;
92 :
93 : /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
94 : for an output reload that stores into reg N. */
95 : static regset_head reg_has_output_reload;
96 :
97 : /* Indicates which hard regs are reload-registers for an output reload
98 : in the current insn. */
99 : static HARD_REG_SET reg_is_output_reload;
100 :
101 : /* Widest mode in which each pseudo reg is referred to (via subreg). */
102 : static machine_mode *reg_max_ref_mode;
103 :
104 : /* Vector to remember old contents of reg_renumber before spilling. */
105 : static short *reg_old_renumber;
106 :
107 : /* During reload_as_needed, element N contains the last pseudo regno reloaded
108 : into hard register N. If that pseudo reg occupied more than one register,
109 : reg_reloaded_contents points to that pseudo for each spill register in
110 : use; all of these must remain set for an inheritance to occur. */
111 : static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
112 :
113 : /* During reload_as_needed, element N contains the insn for which
114 : hard register N was last used. Its contents are significant only
115 : when reg_reloaded_valid is set for this register. */
116 : static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
117 :
118 : /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
119 : static HARD_REG_SET reg_reloaded_valid;
120 : /* Indicate if the register was dead at the end of the reload.
121 : This is only valid if reg_reloaded_contents is set and valid. */
122 : static HARD_REG_SET reg_reloaded_dead;
123 :
124 : /* Number of spill-regs so far; number of valid elements of spill_regs. */
125 : static int n_spills;
126 :
127 : /* In parallel with spill_regs, contains REG rtx's for those regs.
128 : Holds the last rtx used for any given reg, or 0 if it has never
129 : been used for spilling yet. This rtx is reused, provided it has
130 : the proper mode. */
131 : static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
132 :
133 : /* In parallel with spill_regs, contains nonzero for a spill reg
134 : that was stored after the last time it was used.
135 : The precise value is the insn generated to do the store. */
136 : static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
137 :
138 : /* This is the register that was stored with spill_reg_store. This is a
139 : copy of reload_out / reload_out_reg when the value was stored; if
140 : reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
141 : static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
142 :
143 : /* This table is the inverse mapping of spill_regs:
144 : indexed by hard reg number,
145 : it contains the position of that reg in spill_regs,
146 : or -1 for something that is not in spill_regs.
147 :
148 : ?!? This is no longer accurate. */
149 : static short spill_reg_order[FIRST_PSEUDO_REGISTER];
150 :
151 : /* This reg set indicates registers that can't be used as spill registers for
152 : the currently processed insn. These are the hard registers which are live
153 : during the insn, but not allocated to pseudos, as well as fixed
154 : registers. */
155 : static HARD_REG_SET bad_spill_regs;
156 :
157 : /* These are the hard registers that can't be used as spill register for any
158 : insn. This includes registers used for user variables and registers that
159 : we can't eliminate. A register that appears in this set also can't be used
160 : to retry register allocation. */
161 : static HARD_REG_SET bad_spill_regs_global;
162 :
163 : /* Describes order of use of registers for reloading
164 : of spilled pseudo-registers. `n_spills' is the number of
165 : elements that are actually valid; new ones are added at the end.
166 :
167 : Both spill_regs and spill_reg_order are used on two occasions:
168 : once during find_reload_regs, where they keep track of the spill registers
169 : for a single insn, but also during reload_as_needed where they show all
170 : the registers ever used by reload. For the latter case, the information
171 : is calculated during finish_spills. */
172 : static short spill_regs[FIRST_PSEUDO_REGISTER];
173 :
174 : /* This vector of reg sets indicates, for each pseudo, which hard registers
175 : may not be used for retrying global allocation because the register was
176 : formerly spilled from one of them. If we allowed reallocating a pseudo to
177 : a register that it was already allocated to, reload might not
178 : terminate. */
179 : static HARD_REG_SET *pseudo_previous_regs;
180 :
181 : /* This vector of reg sets indicates, for each pseudo, which hard
182 : registers may not be used for retrying global allocation because they
183 : are used as spill registers during one of the insns in which the
184 : pseudo is live. */
185 : static HARD_REG_SET *pseudo_forbidden_regs;
186 :
187 : /* All hard regs that have been used as spill registers for any insn are
188 : marked in this set. */
189 : static HARD_REG_SET used_spill_regs;
190 :
191 : /* Index of last register assigned as a spill register. We allocate in
192 : a round-robin fashion. */
193 : static int last_spill_reg;
194 :
195 : /* Record the stack slot for each spilled hard register. */
196 : static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
197 :
198 : /* Width allocated so far for that stack slot. */
199 : static poly_uint64 spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
200 :
201 : /* Record which pseudos needed to be spilled. */
202 : static regset_head spilled_pseudos;
203 :
204 : /* Record which pseudos changed their allocation in finish_spills. */
205 : static regset_head changed_allocation_pseudos;
206 :
207 : /* Used for communication between order_regs_for_reload and count_pseudo.
208 : Used to avoid counting one pseudo twice. */
209 : static regset_head pseudos_counted;
210 :
211 : /* First uid used by insns created by reload in this function.
212 : Used in find_equiv_reg. */
213 : int reload_first_uid;
214 :
215 : /* Flag set by local-alloc or global-alloc if anything is live in
216 : a call-clobbered reg across calls. */
217 : int caller_save_needed;
218 :
219 : /* Set to 1 while reload_as_needed is operating.
220 : Required by some machines to handle any generated moves differently. */
221 : int reload_in_progress = 0;
222 :
223 : /* This obstack is used for allocation of rtl during register elimination.
224 : The allocated storage can be freed once find_reloads has processed the
225 : insn. */
226 : static struct obstack reload_obstack;
227 :
228 : /* Points to the beginning of the reload_obstack. All insn_chain structures
229 : are allocated first. */
230 : static char *reload_startobj;
231 :
232 : /* The point after all insn_chain structures. Used to quickly deallocate
233 : memory allocated in copy_reloads during calculate_needs_all_insns. */
234 : static char *reload_firstobj;
235 :
236 : /* This points before all local rtl generated by register elimination.
237 : Used to quickly free all memory after processing one insn. */
238 : static char *reload_insn_firstobj;
239 :
240 : /* List of insn_chain instructions, one for every insn that reload needs to
241 : examine. */
242 : class insn_chain *reload_insn_chain;
243 :
244 : /* TRUE if we potentially left dead insns in the insn stream and want to
245 : run DCE immediately after reload, FALSE otherwise. */
246 : static bool need_dce;
247 :
248 : /* List of all insns needing reloads. */
249 : static class insn_chain *insns_need_reload;
250 :
251 : /* This structure is used to record information about register eliminations.
252 : Each array entry describes one possible way of eliminating a register
253 : in favor of another. If there is more than one way of eliminating a
254 : particular register, the most preferred should be specified first. */
255 :
256 : struct elim_table
257 : {
258 : int from; /* Register number to be eliminated. */
259 : int to; /* Register number used as replacement. */
260 : poly_int64 initial_offset; /* Initial difference between values. */
261 : int can_eliminate; /* Nonzero if this elimination can be done. */
262 : int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
263 : target hook in previous scan over insns
264 : made by reload. */
265 : poly_int64 offset; /* Current offset between the two regs. */
266 : poly_int64 previous_offset; /* Offset at end of previous insn. */
267 : int ref_outside_mem; /* "to" has been referenced outside a MEM. */
268 : rtx from_rtx; /* REG rtx for the register to be eliminated.
269 : We cannot simply compare the number since
270 : we might then spuriously replace a hard
271 : register corresponding to a pseudo
272 : assigned to the reg to be eliminated. */
273 : rtx to_rtx; /* REG rtx for the replacement. */
274 : };
275 :
276 : static struct elim_table *reg_eliminate = 0;
277 :
278 : /* This is an intermediate structure to initialize the table. It has
279 : exactly the members provided by ELIMINABLE_REGS. */
280 : static const struct elim_table_1
281 : {
282 : const int from;
283 : const int to;
284 : } reg_eliminate_1[] =
285 :
286 : /* Reload and LRA don't agree on how a multi-register frame pointer
287 : is represented for elimination. See avr.h for a use case. */
288 : #ifdef RELOAD_ELIMINABLE_REGS
289 : RELOAD_ELIMINABLE_REGS;
290 : #else
291 : ELIMINABLE_REGS;
292 : #endif
293 :
294 : #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
295 :
296 : /* Record the number of pending eliminations that have an offset not equal
297 : to their initial offset. If nonzero, we use a new copy of each
298 : replacement result in any insns encountered. */
299 : int num_not_at_initial_offset;
300 :
301 : /* Count the number of registers that we may be able to eliminate. */
302 : static int num_eliminable;
303 : /* And the number of registers that are equivalent to a constant that
304 : can be eliminated to frame_pointer / arg_pointer + constant. */
305 : static int num_eliminable_invariants;
306 :
307 : /* For each label, we record the offset of each elimination. If we reach
308 : a label by more than one path and an offset differs, we cannot do the
309 : elimination. This information is indexed by the difference of the
310 : number of the label and the first label number. We can't offset the
311 : pointer itself as this can cause problems on machines with segmented
312 : memory. The first table is an array of flags that records whether we
313 : have yet encountered a label and the second table is an array of arrays,
314 : one entry in the latter array for each elimination. */
315 :
316 : static int first_label_num;
317 : static char *offsets_known_at;
318 : static poly_int64 (*offsets_at)[NUM_ELIMINABLE_REGS];
319 :
320 : vec<reg_equivs_t, va_gc> *reg_equivs;
321 :
322 : /* Stack of addresses where an rtx has been changed. We can undo the
323 : changes by popping items off the stack and restoring the original
324 : value at each location.
325 :
326 : We use this simplistic undo capability rather than copy_rtx as copy_rtx
327 : will not make a deep copy of a normally sharable rtx, such as
328 : (const (plus (symbol_ref) (const_int))). If such an expression appears
329 : as R1 in gen_reload_chain_without_interm_reg_p, then a shared
330 : rtx expression would be changed. See PR 42431. */
331 :
332 : typedef rtx *rtx_p;
333 : static vec<rtx_p> substitute_stack;
334 :
335 : /* Number of labels in the current function. */
336 :
337 : static int num_labels;
338 :
339 : static void replace_pseudos_in (rtx *, machine_mode, rtx);
340 : static void maybe_fix_stack_asms (void);
341 : static void copy_reloads (class insn_chain *);
342 : static void calculate_needs_all_insns (int);
343 : static int find_reg (class insn_chain *, int);
344 : static void find_reload_regs (class insn_chain *);
345 : static void select_reload_regs (void);
346 : static void delete_caller_save_insns (void);
347 :
348 : static void spill_failure (rtx_insn *, enum reg_class);
349 : static void count_spilled_pseudo (int, int, int);
350 : static void delete_dead_insn (rtx_insn *);
351 : static void alter_reg (int, int, bool);
352 : static void set_label_offsets (rtx, rtx_insn *, int);
353 : static void check_eliminable_occurrences (rtx);
354 : static void elimination_effects (rtx, machine_mode);
355 : static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
356 : static int eliminate_regs_in_insn (rtx_insn *, int);
357 : static void update_eliminable_offsets (void);
358 : static void mark_not_eliminable (rtx, const_rtx, void *);
359 : static void set_initial_elim_offsets (void);
360 : static bool verify_initial_elim_offsets (void);
361 : static void set_initial_label_offsets (void);
362 : static void set_offsets_for_label (rtx_insn *);
363 : static void init_eliminable_invariants (rtx_insn *, bool);
364 : static void init_elim_table (void);
365 : static void free_reg_equiv (void);
366 : static void update_eliminables (HARD_REG_SET *);
367 : static bool update_eliminables_and_spill (void);
368 : static void elimination_costs_in_insn (rtx_insn *);
369 : static void spill_hard_reg (unsigned int, int);
370 : static int finish_spills (int);
371 : static void scan_paradoxical_subregs (rtx);
372 : static void count_pseudo (int);
373 : static void order_regs_for_reload (class insn_chain *);
374 : static void reload_as_needed (int);
375 : static void forget_old_reloads_1 (rtx, const_rtx, void *);
376 : static void forget_marked_reloads (regset);
377 : static int reload_reg_class_lower (const void *, const void *);
378 : static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
379 : machine_mode);
380 : static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
381 : machine_mode);
382 : static int reload_reg_free_p (unsigned int, int, enum reload_type);
383 : static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
384 : rtx, rtx, int, int);
385 : static int free_for_value_p (int, machine_mode, int, enum reload_type,
386 : rtx, rtx, int, int);
387 : static int allocate_reload_reg (class insn_chain *, int, int);
388 : static int conflicts_with_override (rtx);
389 : static void failed_reload (rtx_insn *, int);
390 : static int set_reload_reg (int, int);
391 : static void choose_reload_regs_init (class insn_chain *, rtx *);
392 : static void choose_reload_regs (class insn_chain *);
393 : static void emit_input_reload_insns (class insn_chain *, struct reload *,
394 : rtx, int);
395 : static void emit_output_reload_insns (class insn_chain *, struct reload *,
396 : int);
397 : static void do_input_reload (class insn_chain *, struct reload *, int);
398 : static void do_output_reload (class insn_chain *, struct reload *, int);
399 : static void emit_reload_insns (class insn_chain *);
400 : static void delete_output_reload (rtx_insn *, int, int, rtx);
401 : static void delete_address_reloads (rtx_insn *, rtx_insn *);
402 : static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
403 : static void inc_for_reload (rtx, rtx, rtx, poly_int64);
404 : static void substitute (rtx *, const_rtx, rtx);
405 : static bool gen_reload_chain_without_interm_reg_p (int, int);
406 : static int reloads_conflict (int, int);
407 : static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
408 : static rtx_insn *emit_insn_if_valid_for_reload (rtx);
409 :
410 : /* Initialize the reload pass. This is called at the beginning of compilation
411 : and may be called again if the target is reinitialized. */
412 :
413 : void
414 0 : init_reload (void)
415 : {
416 0 : int i;
417 :
418 : /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
419 : Set spill_indirect_levels to the number of levels such addressing is
420 : permitted, zero if it is not permitted at all. */
421 :
422 0 : rtx tem
423 0 : = gen_rtx_MEM (Pmode,
424 0 : gen_rtx_PLUS (Pmode,
425 : gen_rtx_REG (Pmode,
426 : LAST_VIRTUAL_REGISTER + 1),
427 : gen_int_mode (4, Pmode)));
428 0 : spill_indirect_levels = 0;
429 :
430 0 : while (memory_address_p (QImode, tem))
431 : {
432 0 : spill_indirect_levels++;
433 0 : tem = gen_rtx_MEM (Pmode, tem);
434 : }
435 :
436 : /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
437 :
438 0 : tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
439 0 : indirect_symref_ok = memory_address_p (QImode, tem);
440 :
441 : /* See if reg+reg is a valid (and offsettable) address. */
442 :
443 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
444 : {
445 0 : tem = gen_rtx_PLUS (Pmode,
446 : gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
447 : gen_rtx_REG (Pmode, i));
448 :
449 : /* This way, we make sure that reg+reg is an offsettable address. */
450 0 : tem = plus_constant (Pmode, tem, 4);
451 :
452 0 : for (int mode = 0; mode < MAX_MACHINE_MODE; mode++)
453 0 : if (!double_reg_address_ok[mode]
454 0 : && memory_address_p ((enum machine_mode)mode, tem))
455 0 : double_reg_address_ok[mode] = 1;
456 : }
457 :
458 : /* Initialize obstack for our rtl allocation. */
459 0 : if (reload_startobj == NULL)
460 : {
461 0 : gcc_obstack_init (&reload_obstack);
462 0 : reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
463 : }
464 :
465 0 : INIT_REG_SET (&spilled_pseudos);
466 0 : INIT_REG_SET (&changed_allocation_pseudos);
467 0 : INIT_REG_SET (&pseudos_counted);
468 0 : }
469 :
470 : /* List of insn chains that are currently unused. */
471 : static class insn_chain *unused_insn_chains = 0;
472 :
473 : /* Allocate an empty insn_chain structure. */
474 : class insn_chain *
475 0 : new_insn_chain (void)
476 : {
477 0 : class insn_chain *c;
478 :
479 0 : if (unused_insn_chains == 0)
480 : {
481 0 : c = XOBNEW (&reload_obstack, class insn_chain);
482 0 : INIT_REG_SET (&c->live_throughout);
483 0 : INIT_REG_SET (&c->dead_or_set);
484 : }
485 : else
486 : {
487 0 : c = unused_insn_chains;
488 0 : unused_insn_chains = c->next;
489 : }
490 0 : c->is_caller_save_insn = 0;
491 0 : c->need_operand_change = 0;
492 0 : c->need_reload = 0;
493 0 : c->need_elim = 0;
494 0 : return c;
495 : }
496 :
497 : /* Small utility function to set all regs in hard reg set TO which are
498 : allocated to pseudos in regset FROM. */
499 :
500 : void
501 5446494 : compute_use_by_pseudos (HARD_REG_SET *to, regset from)
502 : {
503 5446494 : unsigned int regno;
504 5446494 : reg_set_iterator rsi;
505 :
506 5446494 : EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
507 : {
508 0 : int r = reg_renumber[regno];
509 :
510 0 : if (r < 0)
511 : {
512 : /* reload_combine uses the information from DF_LIVE_IN,
513 : which might still contain registers that have not
514 : actually been allocated since they have an
515 : equivalence. */
516 0 : gcc_assert (ira_conflicts_p || reload_completed);
517 : }
518 : else
519 0 : add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
520 : }
521 5446494 : }
522 :
523 : /* Replace all pseudos found in LOC with their corresponding
524 : equivalences. */
525 :
526 : static void
527 0 : replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
528 : {
529 0 : rtx x = *loc;
530 0 : enum rtx_code code;
531 0 : const char *fmt;
532 0 : int i, j;
533 :
534 0 : if (! x)
535 : return;
536 :
537 0 : code = GET_CODE (x);
538 0 : if (code == REG)
539 : {
540 0 : unsigned int regno = REGNO (x);
541 :
542 0 : if (regno < FIRST_PSEUDO_REGISTER)
543 : return;
544 :
545 0 : x = eliminate_regs_1 (x, mem_mode, usage, true, false);
546 0 : if (x != *loc)
547 : {
548 0 : *loc = x;
549 0 : replace_pseudos_in (loc, mem_mode, usage);
550 0 : return;
551 : }
552 :
553 0 : if (reg_equiv_constant (regno))
554 0 : *loc = reg_equiv_constant (regno);
555 0 : else if (reg_equiv_invariant (regno))
556 0 : *loc = reg_equiv_invariant (regno);
557 0 : else if (reg_equiv_mem (regno))
558 0 : *loc = reg_equiv_mem (regno);
559 0 : else if (reg_equiv_address (regno))
560 0 : *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
561 : else
562 : {
563 0 : gcc_assert (!REG_P (regno_reg_rtx[regno])
564 : || REGNO (regno_reg_rtx[regno]) != regno);
565 0 : *loc = regno_reg_rtx[regno];
566 : }
567 :
568 0 : return;
569 : }
570 0 : else if (code == MEM)
571 : {
572 0 : replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
573 0 : return;
574 : }
575 :
576 : /* Process each of our operands recursively. */
577 0 : fmt = GET_RTX_FORMAT (code);
578 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
579 0 : if (*fmt == 'e')
580 0 : replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
581 0 : else if (*fmt == 'E')
582 0 : for (j = 0; j < XVECLEN (x, i); j++)
583 0 : replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
584 : }
585 :
586 : /* Determine if the current function has an exception receiver block
587 : that reaches the exit block via non-exceptional edges */
588 :
589 : static bool
590 0 : has_nonexceptional_receiver (void)
591 : {
592 0 : edge e;
593 0 : edge_iterator ei;
594 0 : basic_block *tos, *worklist, bb;
595 :
596 : /* If we're not optimizing, then just err on the safe side. */
597 0 : if (!optimize)
598 : return true;
599 :
600 : /* First determine which blocks can reach exit via normal paths. */
601 0 : tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
602 :
603 0 : FOR_EACH_BB_FN (bb, cfun)
604 0 : bb->flags &= ~BB_REACHABLE;
605 :
606 : /* Place the exit block on our worklist. */
607 0 : EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
608 0 : *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
609 :
610 : /* Iterate: find everything reachable from what we've already seen. */
611 0 : while (tos != worklist)
612 : {
613 0 : bb = *--tos;
614 :
615 0 : FOR_EACH_EDGE (e, ei, bb->preds)
616 0 : if (!(e->flags & EDGE_ABNORMAL))
617 : {
618 0 : basic_block src = e->src;
619 :
620 0 : if (!(src->flags & BB_REACHABLE))
621 : {
622 0 : src->flags |= BB_REACHABLE;
623 0 : *tos++ = src;
624 : }
625 : }
626 : }
627 0 : free (worklist);
628 :
629 : /* Now see if there's a reachable block with an exceptional incoming
630 : edge. */
631 0 : FOR_EACH_BB_FN (bb, cfun)
632 0 : if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
633 : return true;
634 :
635 : /* No exceptional block reached exit unexceptionally. */
636 : return false;
637 : }
638 :
639 : /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
640 : zero elements) to MAX_REG_NUM elements.
641 :
642 : Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
643 : void
644 2798136 : grow_reg_equivs (void)
645 : {
646 2798136 : int old_size = vec_safe_length (reg_equivs);
647 2798136 : int max_regno = max_reg_num ();
648 2798136 : int i;
649 2798136 : reg_equivs_t ze;
650 :
651 2798136 : memset (&ze, 0, sizeof (reg_equivs_t));
652 2798136 : vec_safe_reserve (reg_equivs, max_regno);
653 207629000 : for (i = old_size; i < max_regno; i++)
654 202032728 : reg_equivs->quick_insert (i, ze);
655 2798136 : }
656 :
657 :
658 : /* Global variables used by reload and its subroutines. */
659 :
660 : /* The current basic block while in calculate_elim_costs_all_insns. */
661 : static basic_block elim_bb;
662 :
663 : /* Set during calculate_needs if an insn needs register elimination. */
664 : static int something_needs_elimination;
665 : /* Set during calculate_needs if an insn needs an operand changed. */
666 : static int something_needs_operands_changed;
667 : /* Set by alter_regs if we spilled a register to the stack. */
668 : static bool something_was_spilled;
669 :
670 : /* Nonzero means we couldn't get enough spill regs. */
671 : static int failure;
672 :
673 : /* Temporary array of pseudo-register number. */
674 : static int *temp_pseudo_reg_arr;
675 :
676 : /* If a pseudo has no hard reg, delete the insns that made the equivalence.
677 : If that insn didn't set the register (i.e., it copied the register to
678 : memory), just delete that insn instead of the equivalencing insn plus
679 : anything now dead. If we call delete_dead_insn on that insn, we may
680 : delete the insn that actually sets the register if the register dies
681 : there and that is incorrect. */
682 : static void
683 0 : remove_init_insns ()
684 : {
685 0 : for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
686 : {
687 0 : if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
688 : {
689 : rtx list;
690 0 : for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
691 : {
692 0 : rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
693 :
694 : /* If we already deleted the insn or if it may trap, we can't
695 : delete it. The latter case shouldn't happen, but can
696 : if an insn has a variable address, gets a REG_EH_REGION
697 : note added to it, and then gets converted into a load
698 : from a constant address. */
699 0 : if (NOTE_P (equiv_insn)
700 0 : || can_throw_internal (equiv_insn))
701 : ;
702 0 : else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
703 0 : delete_dead_insn (equiv_insn);
704 : else
705 0 : SET_INSN_DELETED (equiv_insn);
706 : }
707 : }
708 : }
709 0 : }
710 :
711 : /* Return true if remove_init_insns will delete INSN. */
712 : static bool
713 0 : will_delete_init_insn_p (rtx_insn *insn)
714 : {
715 0 : rtx set = single_set (insn);
716 0 : if (!set || !REG_P (SET_DEST (set)))
717 : return false;
718 0 : unsigned regno = REGNO (SET_DEST (set));
719 :
720 0 : if (can_throw_internal (insn))
721 : return false;
722 :
723 0 : if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
724 : return false;
725 :
726 0 : for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
727 : {
728 0 : rtx equiv_insn = XEXP (list, 0);
729 0 : if (equiv_insn == insn)
730 : return true;
731 : }
732 : return false;
733 : }
734 :
735 : /* Main entry point for the reload pass.
736 :
737 : FIRST is the first insn of the function being compiled.
738 :
739 : GLOBAL nonzero means we were called from global_alloc
740 : and should attempt to reallocate any pseudoregs that we
741 : displace from hard regs we will use for reloads.
742 : If GLOBAL is zero, we do not have enough information to do that,
743 : so any pseudo reg that is spilled must go to the stack.
744 :
745 : Return value is TRUE if reload likely left dead insns in the
746 : stream and a DCE pass should be run to elimiante them. Else the
747 : return value is FALSE. */
748 :
749 : bool
750 0 : reload (rtx_insn *first, int global)
751 : {
752 0 : int i, n;
753 0 : rtx_insn *insn;
754 0 : struct elim_table *ep;
755 0 : basic_block bb;
756 0 : bool inserted;
757 :
758 : /* Make sure even insns with volatile mem refs are recognizable. */
759 0 : init_recog ();
760 :
761 0 : failure = 0;
762 :
763 0 : reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
764 :
765 : /* Make sure that the last insn in the chain
766 : is not something that needs reloading. */
767 0 : emit_note (NOTE_INSN_DELETED);
768 :
769 : /* Enable find_equiv_reg to distinguish insns made by reload. */
770 0 : reload_first_uid = get_max_uid ();
771 :
772 : /* Initialize the secondary memory table. */
773 0 : clear_secondary_mem ();
774 :
775 : /* We don't have a stack slot for any spill reg yet. */
776 0 : memset (spill_stack_slot, 0, sizeof spill_stack_slot);
777 0 : memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
778 :
779 : /* Initialize the save area information for caller-save, in case some
780 : are needed. */
781 0 : init_save_areas ();
782 :
783 : /* Compute which hard registers are now in use
784 : as homes for pseudo registers.
785 : This is done here rather than (eg) in global_alloc
786 : because this point is reached even if not optimizing. */
787 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
788 0 : mark_home_live (i);
789 :
790 : /* A function that has a nonlocal label that can reach the exit
791 : block via non-exceptional paths must save all call-saved
792 : registers. */
793 0 : if (cfun->has_nonlocal_label
794 0 : && has_nonexceptional_receiver ())
795 0 : crtl->saves_all_registers = 1;
796 :
797 0 : if (crtl->saves_all_registers)
798 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
799 0 : if (! crtl->abi->clobbers_full_reg_p (i)
800 0 : && ! fixed_regs[i]
801 0 : && ! LOCAL_REGNO (i))
802 0 : df_set_regs_ever_live (i, true);
803 :
804 : /* Find all the pseudo registers that didn't get hard regs
805 : but do have known equivalent constants or memory slots.
806 : These include parameters (known equivalent to parameter slots)
807 : and cse'd or loop-moved constant memory addresses.
808 :
809 : Record constant equivalents in reg_equiv_constant
810 : so they will be substituted by find_reloads.
811 : Record memory equivalents in reg_mem_equiv so they can
812 : be substituted eventually by altering the REG-rtx's. */
813 :
814 0 : grow_reg_equivs ();
815 0 : reg_old_renumber = XCNEWVEC (short, max_regno);
816 0 : memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
817 0 : pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
818 0 : pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
819 :
820 0 : CLEAR_HARD_REG_SET (bad_spill_regs_global);
821 :
822 0 : init_eliminable_invariants (first, true);
823 0 : init_elim_table ();
824 :
825 : /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
826 : stack slots to the pseudos that lack hard regs or equivalents.
827 : Do not touch virtual registers. */
828 :
829 0 : temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
830 0 : for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
831 0 : temp_pseudo_reg_arr[n++] = i;
832 :
833 0 : if (ira_conflicts_p)
834 : /* Ask IRA to order pseudo-registers for better stack slot
835 : sharing. */
836 0 : ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode);
837 :
838 0 : for (i = 0; i < n; i++)
839 0 : alter_reg (temp_pseudo_reg_arr[i], -1, false);
840 :
841 : /* If we have some registers we think can be eliminated, scan all insns to
842 : see if there is an insn that sets one of these registers to something
843 : other than itself plus a constant. If so, the register cannot be
844 : eliminated. Doing this scan here eliminates an extra pass through the
845 : main reload loop in the most common case where register elimination
846 : cannot be done. */
847 0 : for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
848 0 : if (INSN_P (insn))
849 0 : note_pattern_stores (PATTERN (insn), mark_not_eliminable, NULL);
850 :
851 0 : maybe_fix_stack_asms ();
852 :
853 0 : insns_need_reload = 0;
854 0 : something_needs_elimination = 0;
855 :
856 : /* Initialize to -1, which means take the first spill register. */
857 0 : last_spill_reg = -1;
858 :
859 : /* Spill any hard regs that we know we can't eliminate. */
860 0 : CLEAR_HARD_REG_SET (used_spill_regs);
861 : /* There can be multiple ways to eliminate a register;
862 : they should be listed adjacently.
863 : Elimination for any register fails only if all possible ways fail. */
864 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; )
865 : {
866 0 : int from = ep->from;
867 0 : int can_eliminate = 0;
868 0 : do
869 : {
870 0 : can_eliminate |= ep->can_eliminate;
871 0 : ep++;
872 : }
873 0 : while (ep < ®_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
874 0 : if (! can_eliminate)
875 0 : spill_hard_reg (from, 1);
876 : }
877 :
878 0 : if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
879 0 : spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
880 :
881 0 : finish_spills (global);
882 :
883 : /* From now on, we may need to generate moves differently. We may also
884 : allow modifications of insns which cause them to not be recognized.
885 : Any such modifications will be cleaned up during reload itself. */
886 0 : reload_in_progress = 1;
887 :
888 : /* This loop scans the entire function each go-round
889 : and repeats until one repetition spills no additional hard regs. */
890 0 : for (;;)
891 : {
892 0 : int something_changed;
893 0 : poly_int64 starting_frame_size;
894 :
895 0 : starting_frame_size = get_frame_size ();
896 0 : something_was_spilled = false;
897 :
898 0 : set_initial_elim_offsets ();
899 0 : set_initial_label_offsets ();
900 :
901 : /* For each pseudo register that has an equivalent location defined,
902 : try to eliminate any eliminable registers (such as the frame pointer)
903 : assuming initial offsets for the replacement register, which
904 : is the normal case.
905 :
906 : If the resulting location is directly addressable, substitute
907 : the MEM we just got directly for the old REG.
908 :
909 : If it is not addressable but is a constant or the sum of a hard reg
910 : and constant, it is probably not addressable because the constant is
911 : out of range, in that case record the address; we will generate
912 : hairy code to compute the address in a register each time it is
913 : needed. Similarly if it is a hard register, but one that is not
914 : valid as an address register.
915 :
916 : If the location is not addressable, but does not have one of the
917 : above forms, assign a stack slot. We have to do this to avoid the
918 : potential of producing lots of reloads if, e.g., a location involves
919 : a pseudo that didn't get a hard register and has an equivalent memory
920 : location that also involves a pseudo that didn't get a hard register.
921 :
922 : Perhaps at some point we will improve reload_when_needed handling
923 : so this problem goes away. But that's very hairy. */
924 :
925 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
926 0 : if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
927 : {
928 0 : rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
929 : NULL_RTX);
930 :
931 0 : if (strict_memory_address_addr_space_p
932 0 : (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
933 0 : MEM_ADDR_SPACE (x)))
934 0 : reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
935 0 : else if (CONSTANT_P (XEXP (x, 0))
936 0 : || (REG_P (XEXP (x, 0))
937 0 : && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
938 0 : || (GET_CODE (XEXP (x, 0)) == PLUS
939 0 : && REG_P (XEXP (XEXP (x, 0), 0))
940 0 : && (REGNO (XEXP (XEXP (x, 0), 0))
941 : < FIRST_PSEUDO_REGISTER)
942 0 : && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
943 0 : reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
944 : else
945 : {
946 : /* Make a new stack slot. Then indicate that something
947 : changed so we go back and recompute offsets for
948 : eliminable registers because the allocation of memory
949 : below might change some offset. reg_equiv_{mem,address}
950 : will be set up for this pseudo on the next pass around
951 : the loop. */
952 0 : reg_equiv_memory_loc (i) = 0;
953 0 : reg_equiv_init (i) = 0;
954 0 : alter_reg (i, -1, true);
955 : }
956 : }
957 :
958 0 : if (caller_save_needed)
959 0 : setup_save_areas ();
960 :
961 0 : if (maybe_ne (starting_frame_size, 0) && crtl->stack_alignment_needed)
962 : {
963 : /* If we have a stack frame, we must align it now. The
964 : stack size may be a part of the offset computation for
965 : register elimination. So if this changes the stack size,
966 : then repeat the elimination bookkeeping. We don't
967 : realign when there is no stack, as that will cause a
968 : stack frame when none is needed should
969 : TARGET_STARTING_FRAME_OFFSET not be already aligned to
970 : STACK_BOUNDARY. */
971 0 : assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
972 : }
973 : /* If we allocated another stack slot, redo elimination bookkeeping. */
974 0 : if (something_was_spilled
975 0 : || maybe_ne (starting_frame_size, get_frame_size ()))
976 : {
977 0 : if (update_eliminables_and_spill ())
978 0 : finish_spills (0);
979 0 : continue;
980 : }
981 :
982 0 : if (caller_save_needed)
983 : {
984 0 : save_call_clobbered_regs ();
985 : /* That might have allocated new insn_chain structures. */
986 0 : reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
987 : }
988 :
989 0 : calculate_needs_all_insns (global);
990 :
991 0 : if (! ira_conflicts_p)
992 : /* Don't do it for IRA. We need this info because we don't
993 : change live_throughout and dead_or_set for chains when IRA
994 : is used. */
995 0 : CLEAR_REG_SET (&spilled_pseudos);
996 :
997 0 : something_changed = 0;
998 :
999 : /* If we allocated any new memory locations, make another pass
1000 : since it might have changed elimination offsets. */
1001 0 : if (something_was_spilled
1002 0 : || maybe_ne (starting_frame_size, get_frame_size ()))
1003 : something_changed = 1;
1004 :
1005 : /* Even if the frame size remained the same, we might still have
1006 : changed elimination offsets, e.g. if find_reloads called
1007 : force_const_mem requiring the back end to allocate a constant
1008 : pool base register that needs to be saved on the stack. */
1009 0 : else if (!verify_initial_elim_offsets ())
1010 0 : something_changed = 1;
1011 :
1012 0 : if (update_eliminables_and_spill ())
1013 : {
1014 0 : finish_spills (0);
1015 0 : something_changed = 1;
1016 : }
1017 : else
1018 : {
1019 0 : select_reload_regs ();
1020 0 : if (failure)
1021 0 : goto failed;
1022 0 : if (insns_need_reload)
1023 0 : something_changed |= finish_spills (global);
1024 : }
1025 :
1026 0 : if (! something_changed)
1027 : break;
1028 :
1029 0 : if (caller_save_needed)
1030 0 : delete_caller_save_insns ();
1031 :
1032 0 : obstack_free (&reload_obstack, reload_firstobj);
1033 : }
1034 :
1035 : /* If global-alloc was run, notify it of any register eliminations we have
1036 : done. */
1037 0 : if (global)
1038 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1039 0 : if (ep->can_eliminate)
1040 0 : mark_elimination (ep->from, ep->to);
1041 :
1042 0 : remove_init_insns ();
1043 :
1044 : /* Use the reload registers where necessary
1045 : by generating move instructions to move the must-be-register
1046 : values into or out of the reload registers. */
1047 :
1048 0 : if (insns_need_reload != 0 || something_needs_elimination
1049 0 : || something_needs_operands_changed)
1050 : {
1051 0 : poly_int64 old_frame_size = get_frame_size ();
1052 :
1053 0 : reload_as_needed (global);
1054 :
1055 0 : gcc_assert (known_eq (old_frame_size, get_frame_size ()));
1056 :
1057 0 : gcc_assert (verify_initial_elim_offsets ());
1058 : }
1059 :
1060 : /* If we were able to eliminate the frame pointer, show that it is no
1061 : longer live at the start of any basic block. If it ls live by
1062 : virtue of being in a pseudo, that pseudo will be marked live
1063 : and hence the frame pointer will be known to be live via that
1064 : pseudo. */
1065 :
1066 0 : if (! frame_pointer_needed)
1067 0 : FOR_EACH_BB_FN (bb, cfun)
1068 0 : bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1069 :
1070 : /* Come here (with failure set nonzero) if we can't get enough spill
1071 : regs. */
1072 0 : failed:
1073 :
1074 0 : CLEAR_REG_SET (&changed_allocation_pseudos);
1075 0 : CLEAR_REG_SET (&spilled_pseudos);
1076 0 : reload_in_progress = 0;
1077 :
1078 : /* Now eliminate all pseudo regs by modifying them into
1079 : their equivalent memory references.
1080 : The REG-rtx's for the pseudos are modified in place,
1081 : so all insns that used to refer to them now refer to memory.
1082 :
1083 : For a reg that has a reg_equiv_address, all those insns
1084 : were changed by reloading so that no insns refer to it any longer;
1085 : but the DECL_RTL of a variable decl may refer to it,
1086 : and if so this causes the debugging info to mention the variable. */
1087 :
1088 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1089 : {
1090 0 : rtx addr = 0;
1091 :
1092 0 : if (reg_equiv_mem (i))
1093 0 : addr = XEXP (reg_equiv_mem (i), 0);
1094 :
1095 0 : if (reg_equiv_address (i))
1096 0 : addr = reg_equiv_address (i);
1097 :
1098 0 : if (addr)
1099 : {
1100 0 : if (reg_renumber[i] < 0)
1101 : {
1102 0 : rtx reg = regno_reg_rtx[i];
1103 :
1104 0 : REG_USERVAR_P (reg) = 0;
1105 0 : PUT_CODE (reg, MEM);
1106 0 : XEXP (reg, 0) = addr;
1107 0 : if (reg_equiv_memory_loc (i))
1108 0 : MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1109 : else
1110 0 : MEM_ATTRS (reg) = 0;
1111 0 : MEM_NOTRAP_P (reg) = 1;
1112 : }
1113 0 : else if (reg_equiv_mem (i))
1114 0 : XEXP (reg_equiv_mem (i), 0) = addr;
1115 : }
1116 :
1117 : /* We don't want complex addressing modes in debug insns
1118 : if simpler ones will do, so delegitimize equivalences
1119 : in debug insns. */
1120 0 : if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0)
1121 : {
1122 0 : rtx reg = regno_reg_rtx[i];
1123 0 : rtx equiv = 0;
1124 0 : df_ref use, next;
1125 :
1126 0 : if (reg_equiv_constant (i))
1127 : equiv = reg_equiv_constant (i);
1128 0 : else if (reg_equiv_invariant (i))
1129 : equiv = reg_equiv_invariant (i);
1130 0 : else if (reg && MEM_P (reg))
1131 0 : equiv = targetm.delegitimize_address (reg);
1132 0 : else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1133 : equiv = reg;
1134 :
1135 0 : if (equiv == reg)
1136 0 : continue;
1137 :
1138 0 : for (use = DF_REG_USE_CHAIN (i); use; use = next)
1139 : {
1140 0 : insn = DF_REF_INSN (use);
1141 :
1142 : /* Make sure the next ref is for a different instruction,
1143 : so that we're not affected by the rescan. */
1144 0 : next = DF_REF_NEXT_REG (use);
1145 0 : while (next && DF_REF_INSN (next) == insn)
1146 0 : next = DF_REF_NEXT_REG (next);
1147 :
1148 0 : if (DEBUG_BIND_INSN_P (insn))
1149 : {
1150 0 : if (!equiv)
1151 : {
1152 0 : INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1153 0 : df_insn_rescan_debug_internal (insn);
1154 : }
1155 : else
1156 0 : INSN_VAR_LOCATION_LOC (insn)
1157 0 : = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1158 : reg, equiv);
1159 : }
1160 : }
1161 : }
1162 : }
1163 :
1164 : /* We must set reload_completed now since the cleanup_subreg_operands call
1165 : below will re-recognize each insn and reload may have generated insns
1166 : which are only valid during and after reload. */
1167 0 : reload_completed = 1;
1168 :
1169 : /* Make a pass over all the insns and delete all USEs which we inserted
1170 : only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1171 : notes. Delete all CLOBBER insns, except those that refer to the return
1172 : value and the special mem:BLK CLOBBERs added to prevent the scheduler
1173 : from misarranging variable-array code, and simplify (subreg (reg))
1174 : operands. Strip and regenerate REG_INC notes that may have been moved
1175 : around. */
1176 :
1177 0 : for (insn = first; insn; insn = NEXT_INSN (insn))
1178 0 : if (INSN_P (insn))
1179 : {
1180 0 : rtx *pnote;
1181 :
1182 0 : if (CALL_P (insn))
1183 0 : replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1184 : VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1185 :
1186 0 : if ((GET_CODE (PATTERN (insn)) == USE
1187 : /* We mark with QImode USEs introduced by reload itself. */
1188 0 : && (GET_MODE (insn) == QImode
1189 0 : || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1190 0 : || (GET_CODE (PATTERN (insn)) == CLOBBER
1191 0 : && (!MEM_P (XEXP (PATTERN (insn), 0))
1192 0 : || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1193 0 : || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1194 0 : && XEXP (XEXP (PATTERN (insn), 0), 0)
1195 0 : != stack_pointer_rtx))
1196 0 : && (!REG_P (XEXP (PATTERN (insn), 0))
1197 0 : || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1198 : {
1199 0 : delete_insn (insn);
1200 0 : continue;
1201 : }
1202 :
1203 : /* Some CLOBBERs may survive until here and still reference unassigned
1204 : pseudos with const equivalent, which may in turn cause ICE in later
1205 : passes if the reference remains in place. */
1206 0 : if (GET_CODE (PATTERN (insn)) == CLOBBER)
1207 0 : replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1208 0 : VOIDmode, PATTERN (insn));
1209 :
1210 : /* Discard obvious no-ops, even without -O. This optimization
1211 : is fast and doesn't interfere with debugging. */
1212 0 : if (NONJUMP_INSN_P (insn)
1213 0 : && GET_CODE (PATTERN (insn)) == SET
1214 0 : && REG_P (SET_SRC (PATTERN (insn)))
1215 0 : && REG_P (SET_DEST (PATTERN (insn)))
1216 0 : && (REGNO (SET_SRC (PATTERN (insn)))
1217 0 : == REGNO (SET_DEST (PATTERN (insn)))))
1218 : {
1219 0 : delete_insn (insn);
1220 0 : continue;
1221 : }
1222 :
1223 0 : pnote = ®_NOTES (insn);
1224 0 : while (*pnote != 0)
1225 : {
1226 0 : if (REG_NOTE_KIND (*pnote) == REG_DEAD
1227 0 : || REG_NOTE_KIND (*pnote) == REG_UNUSED
1228 0 : || REG_NOTE_KIND (*pnote) == REG_INC)
1229 0 : *pnote = XEXP (*pnote, 1);
1230 : else
1231 0 : pnote = &XEXP (*pnote, 1);
1232 : }
1233 :
1234 0 : if (AUTO_INC_DEC)
1235 : add_auto_inc_notes (insn, PATTERN (insn));
1236 :
1237 : /* Simplify (subreg (reg)) if it appears as an operand. */
1238 0 : cleanup_subreg_operands (insn);
1239 :
1240 : /* Clean up invalid ASMs so that they don't confuse later passes.
1241 : See PR 21299. */
1242 0 : if (asm_noperands (PATTERN (insn)) >= 0)
1243 : {
1244 0 : extract_insn (insn);
1245 0 : if (!constrain_operands (1, get_enabled_alternatives (insn)))
1246 : {
1247 0 : error_for_asm (insn,
1248 : "%<asm%> operand has impossible constraints");
1249 0 : delete_insn (insn);
1250 0 : continue;
1251 : }
1252 : }
1253 : }
1254 :
1255 0 : free (temp_pseudo_reg_arr);
1256 :
1257 : /* Indicate that we no longer have known memory locations or constants. */
1258 0 : free_reg_equiv ();
1259 :
1260 0 : free (reg_max_ref_mode);
1261 0 : free (reg_old_renumber);
1262 0 : free (pseudo_previous_regs);
1263 0 : free (pseudo_forbidden_regs);
1264 :
1265 0 : CLEAR_HARD_REG_SET (used_spill_regs);
1266 0 : for (i = 0; i < n_spills; i++)
1267 0 : SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1268 :
1269 : /* Free all the insn_chain structures at once. */
1270 0 : obstack_free (&reload_obstack, reload_startobj);
1271 0 : unused_insn_chains = 0;
1272 :
1273 0 : inserted = fixup_abnormal_edges ();
1274 :
1275 : /* Split basic blocks if we've possibly turned single trapping insn
1276 : into multiple ones or otherwise the backend requested to do so. */
1277 0 : if (cfun->can_throw_non_call_exceptions
1278 0 : || cfun->split_basic_blocks_after_reload)
1279 : {
1280 0 : auto_sbitmap blocks (last_basic_block_for_fn (cfun));
1281 0 : bitmap_ones (blocks);
1282 0 : find_many_sub_basic_blocks (blocks);
1283 0 : }
1284 :
1285 0 : if (inserted)
1286 0 : commit_edge_insertions ();
1287 :
1288 : /* Replacing pseudos with their memory equivalents might have
1289 : created shared rtx. Subsequent passes would get confused
1290 : by this, so unshare everything here. */
1291 0 : unshare_all_rtl_again (first);
1292 :
1293 : #ifdef STACK_BOUNDARY
1294 : /* init_emit has set the alignment of the hard frame pointer
1295 : to STACK_BOUNDARY. It is very likely no longer valid if
1296 : the hard frame pointer was used for register allocation. */
1297 0 : if (!frame_pointer_needed)
1298 0 : REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1299 : #endif
1300 :
1301 0 : substitute_stack.release ();
1302 :
1303 0 : gcc_assert (bitmap_empty_p (&spilled_pseudos));
1304 :
1305 0 : reload_completed = !failure;
1306 :
1307 0 : return need_dce;
1308 : }
1309 :
1310 : /* Yet another special case. Unfortunately, reg-stack forces people to
1311 : write incorrect clobbers in asm statements. These clobbers must not
1312 : cause the register to appear in bad_spill_regs, otherwise we'll call
1313 : fatal_insn later. We clear the corresponding regnos in the live
1314 : register sets to avoid this.
1315 : The whole thing is rather sick, I'm afraid. */
1316 :
1317 : static void
1318 0 : maybe_fix_stack_asms (void)
1319 : {
1320 : #ifdef STACK_REGS
1321 0 : const char *constraints[MAX_RECOG_OPERANDS];
1322 0 : machine_mode operand_mode[MAX_RECOG_OPERANDS];
1323 0 : class insn_chain *chain;
1324 :
1325 0 : for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1326 : {
1327 0 : int i, noperands;
1328 0 : HARD_REG_SET clobbered, allowed;
1329 0 : rtx pat;
1330 :
1331 0 : if (! INSN_P (chain->insn)
1332 0 : || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1333 0 : continue;
1334 0 : pat = PATTERN (chain->insn);
1335 0 : if (GET_CODE (pat) != PARALLEL)
1336 0 : continue;
1337 :
1338 0 : CLEAR_HARD_REG_SET (clobbered);
1339 0 : CLEAR_HARD_REG_SET (allowed);
1340 :
1341 : /* First, make a mask of all stack regs that are clobbered. */
1342 0 : for (i = 0; i < XVECLEN (pat, 0); i++)
1343 : {
1344 0 : rtx t = XVECEXP (pat, 0, i);
1345 0 : if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1346 0 : SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1347 : }
1348 :
1349 : /* Get the operand values and constraints out of the insn. */
1350 0 : decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1351 : constraints, operand_mode, NULL);
1352 :
1353 : /* For every operand, see what registers are allowed. */
1354 0 : for (i = 0; i < noperands; i++)
1355 : {
1356 0 : const char *p = constraints[i];
1357 : /* For every alternative, we compute the class of registers allowed
1358 : for reloading in CLS, and merge its contents into the reg set
1359 : ALLOWED. */
1360 0 : int cls = (int) NO_REGS;
1361 :
1362 0 : for (;;)
1363 : {
1364 0 : char c = *p;
1365 :
1366 0 : if (c == '\0' || c == ',' || c == '#')
1367 : {
1368 : /* End of one alternative - mark the regs in the current
1369 : class, and reset the class. */
1370 0 : allowed |= reg_class_contents[cls];
1371 0 : cls = NO_REGS;
1372 0 : p++;
1373 0 : if (c == '#')
1374 0 : do {
1375 0 : c = *p++;
1376 0 : } while (c != '\0' && c != ',');
1377 0 : if (c == '\0')
1378 : break;
1379 0 : continue;
1380 : }
1381 :
1382 0 : switch (c)
1383 : {
1384 0 : case 'g':
1385 0 : cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1386 0 : break;
1387 :
1388 0 : default:
1389 0 : enum constraint_num cn = lookup_constraint (p);
1390 0 : if (insn_extra_address_constraint (cn))
1391 0 : cls = (int) reg_class_subunion[cls]
1392 0 : [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1393 0 : ADDRESS, SCRATCH, chain->insn)];
1394 : else
1395 0 : cls = (int) reg_class_subunion[cls]
1396 0 : [reg_class_for_constraint (cn)];
1397 : break;
1398 : }
1399 0 : p += CONSTRAINT_LEN (c, p);
1400 : }
1401 : }
1402 : /* Those of the registers which are clobbered, but allowed by the
1403 : constraints, must be usable as reload registers. So clear them
1404 : out of the life information. */
1405 0 : allowed &= clobbered;
1406 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1407 0 : if (TEST_HARD_REG_BIT (allowed, i))
1408 : {
1409 0 : CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1410 0 : CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1411 : }
1412 : }
1413 :
1414 : #endif
1415 0 : }
1416 :
1417 : /* Copy the global variables n_reloads and rld into the corresponding elts
1418 : of CHAIN. */
1419 : static void
1420 0 : copy_reloads (class insn_chain *chain)
1421 : {
1422 0 : chain->n_reloads = n_reloads;
1423 0 : chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1424 0 : memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1425 0 : reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1426 0 : }
1427 :
1428 : /* Walk the chain of insns, and determine for each whether it needs reloads
1429 : and/or eliminations. Build the corresponding insns_need_reload list, and
1430 : set something_needs_elimination as appropriate. */
1431 : static void
1432 0 : calculate_needs_all_insns (int global)
1433 : {
1434 0 : class insn_chain **pprev_reload = &insns_need_reload;
1435 0 : class insn_chain *chain, *next = 0;
1436 :
1437 0 : something_needs_elimination = 0;
1438 :
1439 0 : reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1440 0 : for (chain = reload_insn_chain; chain != 0; chain = next)
1441 : {
1442 0 : rtx_insn *insn = chain->insn;
1443 :
1444 0 : next = chain->next;
1445 :
1446 : /* Clear out the shortcuts. */
1447 0 : chain->n_reloads = 0;
1448 0 : chain->need_elim = 0;
1449 0 : chain->need_reload = 0;
1450 0 : chain->need_operand_change = 0;
1451 :
1452 : /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1453 : include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1454 : what effects this has on the known offsets at labels. */
1455 :
1456 0 : if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1457 0 : || (INSN_P (insn) && REG_NOTES (insn) != 0))
1458 0 : set_label_offsets (insn, insn, 0);
1459 :
1460 0 : if (INSN_P (insn))
1461 : {
1462 0 : rtx old_body = PATTERN (insn);
1463 0 : int old_code = INSN_CODE (insn);
1464 0 : rtx old_notes = REG_NOTES (insn);
1465 0 : int did_elimination = 0;
1466 0 : int operands_changed = 0;
1467 :
1468 : /* Skip insns that only set an equivalence. */
1469 0 : if (will_delete_init_insn_p (insn))
1470 0 : continue;
1471 :
1472 : /* If needed, eliminate any eliminable registers. */
1473 0 : if (num_eliminable || num_eliminable_invariants)
1474 0 : did_elimination = eliminate_regs_in_insn (insn, 0);
1475 :
1476 : /* Analyze the instruction. */
1477 0 : operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1478 : global, spill_reg_order);
1479 :
1480 : /* If a no-op set needs more than one reload, this is likely
1481 : to be something that needs input address reloads. We
1482 : can't get rid of this cleanly later, and it is of no use
1483 : anyway, so discard it now.
1484 : We only do this when expensive_optimizations is enabled,
1485 : since this complements reload inheritance / output
1486 : reload deletion, and it can make debugging harder. */
1487 0 : if (flag_expensive_optimizations && n_reloads > 1)
1488 : {
1489 0 : rtx set = single_set (insn);
1490 0 : if (set
1491 0 : &&
1492 0 : ((SET_SRC (set) == SET_DEST (set)
1493 0 : && REG_P (SET_SRC (set))
1494 0 : && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1495 0 : || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1496 0 : && reg_renumber[REGNO (SET_SRC (set))] < 0
1497 0 : && reg_renumber[REGNO (SET_DEST (set))] < 0
1498 0 : && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1499 0 : && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1500 0 : && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1501 0 : reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1502 : {
1503 0 : if (ira_conflicts_p)
1504 : /* Inform IRA about the insn deletion. */
1505 0 : ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1506 0 : REGNO (SET_SRC (set)));
1507 0 : delete_insn (insn);
1508 : /* Delete it from the reload chain. */
1509 0 : if (chain->prev)
1510 0 : chain->prev->next = next;
1511 : else
1512 0 : reload_insn_chain = next;
1513 0 : if (next)
1514 0 : next->prev = chain->prev;
1515 0 : chain->next = unused_insn_chains;
1516 0 : unused_insn_chains = chain;
1517 0 : continue;
1518 : }
1519 : }
1520 0 : if (num_eliminable)
1521 0 : update_eliminable_offsets ();
1522 :
1523 : /* Remember for later shortcuts which insns had any reloads or
1524 : register eliminations. */
1525 0 : chain->need_elim = did_elimination;
1526 0 : chain->need_reload = n_reloads > 0;
1527 0 : chain->need_operand_change = operands_changed;
1528 :
1529 : /* Discard any register replacements done. */
1530 0 : if (did_elimination)
1531 : {
1532 0 : obstack_free (&reload_obstack, reload_insn_firstobj);
1533 0 : PATTERN (insn) = old_body;
1534 0 : INSN_CODE (insn) = old_code;
1535 0 : REG_NOTES (insn) = old_notes;
1536 0 : something_needs_elimination = 1;
1537 : }
1538 :
1539 0 : something_needs_operands_changed |= operands_changed;
1540 :
1541 0 : if (n_reloads != 0)
1542 : {
1543 0 : copy_reloads (chain);
1544 0 : *pprev_reload = chain;
1545 0 : pprev_reload = &chain->next_need_reload;
1546 : }
1547 : }
1548 : }
1549 0 : *pprev_reload = 0;
1550 0 : }
1551 :
1552 : /* This function is called from the register allocator to set up estimates
1553 : for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1554 : an invariant. The structure is similar to calculate_needs_all_insns. */
1555 :
1556 : void
1557 0 : calculate_elim_costs_all_insns (void)
1558 : {
1559 0 : int *reg_equiv_init_cost;
1560 0 : basic_block bb;
1561 0 : int i;
1562 :
1563 0 : reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1564 0 : init_elim_table ();
1565 0 : init_eliminable_invariants (get_insns (), false);
1566 :
1567 0 : set_initial_elim_offsets ();
1568 0 : set_initial_label_offsets ();
1569 :
1570 0 : FOR_EACH_BB_FN (bb, cfun)
1571 : {
1572 0 : rtx_insn *insn;
1573 0 : elim_bb = bb;
1574 :
1575 0 : FOR_BB_INSNS (bb, insn)
1576 : {
1577 : /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1578 : include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1579 : what effects this has on the known offsets at labels. */
1580 :
1581 0 : if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1582 0 : || (INSN_P (insn) && REG_NOTES (insn) != 0))
1583 0 : set_label_offsets (insn, insn, 0);
1584 :
1585 0 : if (INSN_P (insn))
1586 : {
1587 0 : rtx set = single_set (insn);
1588 :
1589 : /* Skip insns that only set an equivalence. */
1590 0 : if (set && REG_P (SET_DEST (set))
1591 0 : && reg_renumber[REGNO (SET_DEST (set))] < 0
1592 0 : && (reg_equiv_constant (REGNO (SET_DEST (set)))
1593 0 : || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1594 : {
1595 0 : unsigned regno = REGNO (SET_DEST (set));
1596 0 : rtx_insn_list *init = reg_equiv_init (regno);
1597 0 : if (init)
1598 : {
1599 0 : rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1600 : false, true);
1601 0 : machine_mode mode = GET_MODE (SET_DEST (set));
1602 0 : int cost = set_src_cost (t, mode,
1603 0 : optimize_bb_for_speed_p (bb));
1604 0 : int freq = REG_FREQ_FROM_BB (bb);
1605 :
1606 0 : reg_equiv_init_cost[regno] = cost * freq;
1607 0 : continue;
1608 0 : }
1609 : }
1610 : /* If needed, eliminate any eliminable registers. */
1611 0 : if (num_eliminable || num_eliminable_invariants)
1612 0 : elimination_costs_in_insn (insn);
1613 :
1614 0 : if (num_eliminable)
1615 0 : update_eliminable_offsets ();
1616 : }
1617 : }
1618 : }
1619 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1620 : {
1621 0 : if (reg_equiv_invariant (i))
1622 : {
1623 0 : if (reg_equiv_init (i))
1624 : {
1625 0 : int cost = reg_equiv_init_cost[i];
1626 0 : if (dump_file)
1627 0 : fprintf (dump_file,
1628 : "Reg %d has equivalence, initial gains %d\n", i, cost);
1629 0 : if (cost != 0)
1630 0 : ira_adjust_equiv_reg_cost (i, cost);
1631 : }
1632 : else
1633 : {
1634 0 : if (dump_file)
1635 0 : fprintf (dump_file,
1636 : "Reg %d had equivalence, but can't be eliminated\n",
1637 : i);
1638 0 : ira_adjust_equiv_reg_cost (i, 0);
1639 : }
1640 : }
1641 : }
1642 :
1643 0 : free (reg_equiv_init_cost);
1644 0 : free (offsets_known_at);
1645 0 : free (offsets_at);
1646 0 : offsets_at = NULL;
1647 0 : offsets_known_at = NULL;
1648 0 : }
1649 :
1650 : /* Comparison function for qsort to decide which of two reloads
1651 : should be handled first. *P1 and *P2 are the reload numbers. */
1652 :
1653 : static int
1654 0 : reload_reg_class_lower (const void *r1p, const void *r2p)
1655 : {
1656 0 : int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1657 0 : int t;
1658 :
1659 : /* Consider required reloads before optional ones. */
1660 0 : t = rld[r1].optional - rld[r2].optional;
1661 0 : if (t != 0)
1662 : return t;
1663 :
1664 : /* Count all solitary classes before non-solitary ones. */
1665 0 : t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1666 0 : - (reg_class_size[(int) rld[r1].rclass] == 1));
1667 0 : if (t != 0)
1668 : return t;
1669 :
1670 : /* Aside from solitaires, consider all multi-reg groups first. */
1671 0 : t = rld[r2].nregs - rld[r1].nregs;
1672 0 : if (t != 0)
1673 : return t;
1674 :
1675 : /* Consider reloads in order of increasing reg-class number. */
1676 0 : t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1677 0 : if (t != 0)
1678 : return t;
1679 :
1680 : /* If reloads are equally urgent, sort by reload number,
1681 : so that the results of qsort leave nothing to chance. */
1682 0 : return r1 - r2;
1683 : }
1684 :
1685 : /* The cost of spilling each hard reg. */
1686 : static int spill_cost[FIRST_PSEUDO_REGISTER];
1687 :
1688 : /* When spilling multiple hard registers, we use SPILL_COST for the first
1689 : spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1690 : only the first hard reg for a multi-reg pseudo. */
1691 : static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1692 :
1693 : /* Map of hard regno to pseudo regno currently occupying the hard
1694 : reg. */
1695 : static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1696 :
1697 : /* Update the spill cost arrays, considering that pseudo REG is live. */
1698 :
1699 : static void
1700 0 : count_pseudo (int reg)
1701 : {
1702 0 : int freq = REG_FREQ (reg);
1703 0 : int r = reg_renumber[reg];
1704 0 : int nregs;
1705 :
1706 : /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1707 0 : if (ira_conflicts_p && r < 0)
1708 : return;
1709 :
1710 0 : if (REGNO_REG_SET_P (&pseudos_counted, reg)
1711 0 : || REGNO_REG_SET_P (&spilled_pseudos, reg))
1712 0 : return;
1713 :
1714 0 : SET_REGNO_REG_SET (&pseudos_counted, reg);
1715 :
1716 0 : gcc_assert (r >= 0);
1717 :
1718 0 : spill_add_cost[r] += freq;
1719 0 : nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1720 0 : while (nregs-- > 0)
1721 : {
1722 0 : hard_regno_to_pseudo_regno[r + nregs] = reg;
1723 0 : spill_cost[r + nregs] += freq;
1724 : }
1725 : }
1726 :
1727 : /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1728 : contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1729 :
1730 : static void
1731 0 : order_regs_for_reload (class insn_chain *chain)
1732 : {
1733 0 : unsigned i;
1734 0 : HARD_REG_SET used_by_pseudos;
1735 0 : HARD_REG_SET used_by_pseudos2;
1736 0 : reg_set_iterator rsi;
1737 :
1738 0 : bad_spill_regs = fixed_reg_set;
1739 :
1740 0 : memset (spill_cost, 0, sizeof spill_cost);
1741 0 : memset (spill_add_cost, 0, sizeof spill_add_cost);
1742 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1743 0 : hard_regno_to_pseudo_regno[i] = -1;
1744 :
1745 : /* Count number of uses of each hard reg by pseudo regs allocated to it
1746 : and then order them by decreasing use. First exclude hard registers
1747 : that are live in or across this insn. */
1748 :
1749 0 : REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1750 0 : REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1751 0 : bad_spill_regs |= used_by_pseudos;
1752 0 : bad_spill_regs |= used_by_pseudos2;
1753 :
1754 : /* Now find out which pseudos are allocated to it, and update
1755 : hard_reg_n_uses. */
1756 0 : CLEAR_REG_SET (&pseudos_counted);
1757 :
1758 0 : EXECUTE_IF_SET_IN_REG_SET
1759 : (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1760 : {
1761 0 : count_pseudo (i);
1762 : }
1763 0 : EXECUTE_IF_SET_IN_REG_SET
1764 : (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1765 : {
1766 0 : count_pseudo (i);
1767 : }
1768 0 : CLEAR_REG_SET (&pseudos_counted);
1769 0 : }
1770 :
1771 : /* Vector of reload-numbers showing the order in which the reloads should
1772 : be processed. */
1773 : static short reload_order[MAX_RELOADS];
1774 :
1775 : /* This is used to keep track of the spill regs used in one insn. */
1776 : static HARD_REG_SET used_spill_regs_local;
1777 :
1778 : /* We decided to spill hard register SPILLED, which has a size of
1779 : SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1780 : is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1781 : update SPILL_COST/SPILL_ADD_COST. */
1782 :
1783 : static void
1784 0 : count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1785 : {
1786 0 : int freq = REG_FREQ (reg);
1787 0 : int r = reg_renumber[reg];
1788 0 : int nregs;
1789 :
1790 : /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1791 0 : if (ira_conflicts_p && r < 0)
1792 : return;
1793 :
1794 0 : gcc_assert (r >= 0);
1795 :
1796 0 : nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1797 :
1798 0 : if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1799 0 : || spilled + spilled_nregs <= r || r + nregs <= spilled)
1800 : return;
1801 :
1802 0 : SET_REGNO_REG_SET (&spilled_pseudos, reg);
1803 :
1804 0 : spill_add_cost[r] -= freq;
1805 0 : while (nregs-- > 0)
1806 : {
1807 0 : hard_regno_to_pseudo_regno[r + nregs] = -1;
1808 0 : spill_cost[r + nregs] -= freq;
1809 : }
1810 : }
1811 :
1812 : /* Find reload register to use for reload number ORDER. */
1813 :
1814 : static int
1815 0 : find_reg (class insn_chain *chain, int order)
1816 : {
1817 0 : int rnum = reload_order[order];
1818 0 : struct reload *rl = rld + rnum;
1819 0 : int best_cost = INT_MAX;
1820 0 : int best_reg = -1;
1821 0 : unsigned int i, j, n;
1822 0 : int k;
1823 0 : HARD_REG_SET not_usable;
1824 0 : HARD_REG_SET used_by_other_reload;
1825 0 : reg_set_iterator rsi;
1826 0 : static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1827 0 : static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1828 :
1829 0 : not_usable = (bad_spill_regs
1830 0 : | bad_spill_regs_global
1831 0 : | ~reg_class_contents[rl->rclass]);
1832 :
1833 0 : CLEAR_HARD_REG_SET (used_by_other_reload);
1834 0 : for (k = 0; k < order; k++)
1835 : {
1836 0 : int other = reload_order[k];
1837 :
1838 0 : if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1839 0 : for (j = 0; j < rld[other].nregs; j++)
1840 0 : SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1841 : }
1842 :
1843 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1844 : {
1845 : #ifdef REG_ALLOC_ORDER
1846 0 : unsigned int regno = reg_alloc_order[i];
1847 : #else
1848 : unsigned int regno = i;
1849 : #endif
1850 :
1851 0 : if (! TEST_HARD_REG_BIT (not_usable, regno)
1852 0 : && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1853 0 : && targetm.hard_regno_mode_ok (regno, rl->mode))
1854 : {
1855 0 : int this_cost = spill_cost[regno];
1856 0 : int ok = 1;
1857 0 : unsigned int this_nregs = hard_regno_nregs (regno, rl->mode);
1858 :
1859 0 : for (j = 1; j < this_nregs; j++)
1860 : {
1861 0 : this_cost += spill_add_cost[regno + j];
1862 0 : if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1863 0 : || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1864 : ok = 0;
1865 : }
1866 0 : if (! ok)
1867 0 : continue;
1868 :
1869 0 : if (ira_conflicts_p)
1870 : {
1871 : /* Ask IRA to find a better pseudo-register for
1872 : spilling. */
1873 0 : for (n = j = 0; j < this_nregs; j++)
1874 : {
1875 0 : int r = hard_regno_to_pseudo_regno[regno + j];
1876 :
1877 0 : if (r < 0)
1878 0 : continue;
1879 0 : if (n == 0 || regno_pseudo_regs[n - 1] != r)
1880 0 : regno_pseudo_regs[n++] = r;
1881 : }
1882 0 : regno_pseudo_regs[n++] = -1;
1883 0 : if (best_reg < 0
1884 0 : || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1885 : best_regno_pseudo_regs,
1886 : rl->in, rl->out,
1887 : chain->insn))
1888 : {
1889 0 : best_reg = regno;
1890 0 : for (j = 0;; j++)
1891 : {
1892 0 : best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1893 0 : if (regno_pseudo_regs[j] < 0)
1894 : break;
1895 : }
1896 : }
1897 0 : continue;
1898 0 : }
1899 :
1900 0 : if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1901 0 : this_cost--;
1902 0 : if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1903 0 : this_cost--;
1904 0 : if (this_cost < best_cost
1905 : /* Among registers with equal cost, prefer caller-saved ones, or
1906 : use REG_ALLOC_ORDER if it is defined. */
1907 0 : || (this_cost == best_cost
1908 : #ifdef REG_ALLOC_ORDER
1909 0 : && (inv_reg_alloc_order[regno]
1910 0 : < inv_reg_alloc_order[best_reg])
1911 : #else
1912 : && crtl->abi->clobbers_full_reg_p (regno)
1913 : && !crtl->abi->clobbers_full_reg_p (best_reg)
1914 : #endif
1915 : ))
1916 : {
1917 0 : best_reg = regno;
1918 0 : best_cost = this_cost;
1919 : }
1920 : }
1921 : }
1922 0 : if (best_reg == -1)
1923 : return 0;
1924 :
1925 0 : if (dump_file)
1926 0 : fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1927 :
1928 0 : rl->nregs = hard_regno_nregs (best_reg, rl->mode);
1929 0 : rl->regno = best_reg;
1930 :
1931 0 : EXECUTE_IF_SET_IN_REG_SET
1932 : (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1933 : {
1934 0 : count_spilled_pseudo (best_reg, rl->nregs, j);
1935 : }
1936 :
1937 0 : EXECUTE_IF_SET_IN_REG_SET
1938 : (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1939 : {
1940 0 : count_spilled_pseudo (best_reg, rl->nregs, j);
1941 : }
1942 :
1943 0 : for (i = 0; i < rl->nregs; i++)
1944 : {
1945 0 : gcc_assert (spill_cost[best_reg + i] == 0);
1946 0 : gcc_assert (spill_add_cost[best_reg + i] == 0);
1947 0 : gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1948 0 : SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1949 : }
1950 : return 1;
1951 : }
1952 :
1953 : /* Find more reload regs to satisfy the remaining need of an insn, which
1954 : is given by CHAIN.
1955 : Do it by ascending class number, since otherwise a reg
1956 : might be spilled for a big class and might fail to count
1957 : for a smaller class even though it belongs to that class. */
1958 :
1959 : static void
1960 0 : find_reload_regs (class insn_chain *chain)
1961 : {
1962 0 : int i;
1963 :
1964 : /* In order to be certain of getting the registers we need,
1965 : we must sort the reloads into order of increasing register class.
1966 : Then our grabbing of reload registers will parallel the process
1967 : that provided the reload registers. */
1968 0 : for (i = 0; i < chain->n_reloads; i++)
1969 : {
1970 : /* Show whether this reload already has a hard reg. */
1971 0 : if (chain->rld[i].reg_rtx)
1972 : {
1973 0 : chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx);
1974 0 : chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx);
1975 : }
1976 : else
1977 0 : chain->rld[i].regno = -1;
1978 0 : reload_order[i] = i;
1979 : }
1980 :
1981 0 : n_reloads = chain->n_reloads;
1982 0 : memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1983 :
1984 0 : CLEAR_HARD_REG_SET (used_spill_regs_local);
1985 :
1986 0 : if (dump_file)
1987 0 : fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1988 :
1989 0 : qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1990 :
1991 : /* Compute the order of preference for hard registers to spill. */
1992 :
1993 0 : order_regs_for_reload (chain);
1994 :
1995 0 : for (i = 0; i < n_reloads; i++)
1996 : {
1997 0 : int r = reload_order[i];
1998 :
1999 : /* Ignore reloads that got marked inoperative. */
2000 0 : if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2001 0 : && ! rld[r].optional
2002 0 : && rld[r].regno == -1)
2003 0 : if (! find_reg (chain, i))
2004 : {
2005 0 : if (dump_file)
2006 0 : fprintf (dump_file, "reload failure for reload %d\n", r);
2007 0 : spill_failure (chain->insn, rld[r].rclass);
2008 0 : failure = 1;
2009 0 : return;
2010 : }
2011 : }
2012 :
2013 0 : chain->used_spill_regs = used_spill_regs_local;
2014 0 : used_spill_regs |= used_spill_regs_local;
2015 :
2016 0 : memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2017 : }
2018 :
2019 : static void
2020 0 : select_reload_regs (void)
2021 : {
2022 0 : class insn_chain *chain;
2023 :
2024 : /* Try to satisfy the needs for each insn. */
2025 0 : for (chain = insns_need_reload; chain != 0;
2026 0 : chain = chain->next_need_reload)
2027 0 : find_reload_regs (chain);
2028 0 : }
2029 :
2030 : /* Delete all insns that were inserted by emit_caller_save_insns during
2031 : this iteration. */
2032 : static void
2033 0 : delete_caller_save_insns (void)
2034 : {
2035 0 : class insn_chain *c = reload_insn_chain;
2036 :
2037 0 : while (c != 0)
2038 : {
2039 0 : while (c != 0 && c->is_caller_save_insn)
2040 : {
2041 0 : class insn_chain *next = c->next;
2042 0 : rtx_insn *insn = c->insn;
2043 :
2044 0 : if (c == reload_insn_chain)
2045 0 : reload_insn_chain = next;
2046 0 : delete_insn (insn);
2047 :
2048 0 : if (next)
2049 0 : next->prev = c->prev;
2050 0 : if (c->prev)
2051 0 : c->prev->next = next;
2052 0 : c->next = unused_insn_chains;
2053 0 : unused_insn_chains = c;
2054 0 : c = next;
2055 : }
2056 0 : if (c != 0)
2057 0 : c = c->next;
2058 : }
2059 0 : }
2060 :
2061 : /* Handle the failure to find a register to spill.
2062 : INSN should be one of the insns which needed this particular spill reg. */
2063 :
2064 : static void
2065 0 : spill_failure (rtx_insn *insn, enum reg_class rclass)
2066 : {
2067 0 : if (asm_noperands (PATTERN (insn)) >= 0)
2068 0 : error_for_asm (insn, "cannot find a register in class %qs while "
2069 : "reloading %<asm%>",
2070 0 : reg_class_names[rclass]);
2071 : else
2072 : {
2073 0 : error ("unable to find a register to spill in class %qs",
2074 0 : reg_class_names[rclass]);
2075 :
2076 0 : if (dump_file)
2077 : {
2078 0 : fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2079 0 : debug_reload_to_stream (dump_file);
2080 : }
2081 0 : fatal_insn ("this is the insn:", insn);
2082 : }
2083 0 : }
2084 :
2085 : /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2086 : data that is dead in INSN. */
2087 :
2088 : static void
2089 0 : delete_dead_insn (rtx_insn *insn)
2090 : {
2091 0 : rtx_insn *prev = prev_active_insn (insn);
2092 0 : rtx prev_dest;
2093 :
2094 : /* If the previous insn sets a register that dies in our insn make
2095 : a note that we want to run DCE immediately after reload.
2096 :
2097 : We used to delete the previous insn & recurse, but that's wrong for
2098 : block local equivalences. Instead of trying to figure out the exact
2099 : circumstances where we can delete the potentially dead insns, just
2100 : let DCE do the job. */
2101 0 : if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2102 0 : && GET_CODE (PATTERN (prev)) == SET
2103 0 : && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2104 0 : && reg_mentioned_p (prev_dest, PATTERN (insn))
2105 0 : && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2106 0 : && ! side_effects_p (SET_SRC (PATTERN (prev))))
2107 0 : need_dce = 1;
2108 :
2109 0 : SET_INSN_DELETED (insn);
2110 0 : }
2111 :
2112 : /* Modify the home of pseudo-reg I.
2113 : The new home is present in reg_renumber[I].
2114 :
2115 : FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2116 : or it may be -1, meaning there is none or it is not relevant.
2117 : This is used so that all pseudos spilled from a given hard reg
2118 : can share one stack slot. */
2119 :
2120 : static void
2121 0 : alter_reg (int i, int from_reg, bool dont_share_p)
2122 : {
2123 : /* When outputting an inline function, this can happen
2124 : for a reg that isn't actually used. */
2125 0 : if (regno_reg_rtx[i] == 0)
2126 : return;
2127 :
2128 : /* If the reg got changed to a MEM at rtl-generation time,
2129 : ignore it. */
2130 0 : if (!REG_P (regno_reg_rtx[i]))
2131 : return;
2132 :
2133 : /* Modify the reg-rtx to contain the new hard reg
2134 : number or else to contain its pseudo reg number. */
2135 0 : SET_REGNO (regno_reg_rtx[i],
2136 : reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2137 :
2138 : /* If we have a pseudo that is needed but has no hard reg or equivalent,
2139 : allocate a stack slot for it. */
2140 :
2141 0 : if (reg_renumber[i] < 0
2142 0 : && REG_N_REFS (i) > 0
2143 0 : && reg_equiv_constant (i) == 0
2144 0 : && (reg_equiv_invariant (i) == 0
2145 0 : || reg_equiv_init (i) == 0)
2146 0 : && reg_equiv_memory_loc (i) == 0)
2147 : {
2148 0 : rtx x = NULL_RTX;
2149 0 : machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2150 0 : poly_uint64 inherent_size = GET_MODE_SIZE (mode);
2151 0 : unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2152 0 : machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
2153 0 : poly_uint64 total_size = GET_MODE_SIZE (wider_mode);
2154 : /* ??? Seems strange to derive the minimum alignment from the size,
2155 : but that's the traditional behavior. For polynomial-size modes,
2156 : the natural extension is to use the minimum possible size. */
2157 0 : unsigned int min_align
2158 0 : = constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i]));
2159 0 : poly_int64 adjust = 0;
2160 :
2161 0 : something_was_spilled = true;
2162 :
2163 0 : if (ira_conflicts_p)
2164 : {
2165 : /* Mark the spill for IRA. */
2166 0 : SET_REGNO_REG_SET (&spilled_pseudos, i);
2167 0 : if (!dont_share_p)
2168 0 : x = ira_reuse_stack_slot (i, inherent_size, total_size);
2169 : }
2170 :
2171 0 : if (x)
2172 : ;
2173 :
2174 : /* Each pseudo reg has an inherent size which comes from its own mode,
2175 : and a total size which provides room for paradoxical subregs
2176 : which refer to the pseudo reg in wider modes.
2177 :
2178 : We can use a slot already allocated if it provides both
2179 : enough inherent space and enough total space.
2180 : Otherwise, we allocate a new slot, making sure that it has no less
2181 : inherent space, and no less total space, then the previous slot. */
2182 0 : else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2183 : {
2184 0 : rtx stack_slot;
2185 :
2186 : /* The sizes are taken from a subreg operation, which guarantees
2187 : that they're ordered. */
2188 0 : gcc_checking_assert (ordered_p (total_size, inherent_size));
2189 :
2190 : /* No known place to spill from => no slot to reuse. */
2191 0 : x = assign_stack_local (mode, total_size,
2192 : min_align > inherent_align
2193 0 : || maybe_gt (total_size, inherent_size)
2194 : ? -1 : 0);
2195 :
2196 0 : stack_slot = x;
2197 :
2198 : /* Cancel the big-endian correction done in assign_stack_local.
2199 : Get the address of the beginning of the slot. This is so we
2200 : can do a big-endian correction unconditionally below. */
2201 0 : if (BYTES_BIG_ENDIAN)
2202 : {
2203 : adjust = inherent_size - total_size;
2204 : if (maybe_ne (adjust, 0))
2205 : {
2206 : poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2207 : machine_mode mem_mode
2208 : = int_mode_for_size (total_bits, 1).else_blk ();
2209 : stack_slot = adjust_address_nv (x, mem_mode, adjust);
2210 : }
2211 : }
2212 :
2213 0 : if (! dont_share_p && ira_conflicts_p)
2214 : /* Inform IRA about allocation a new stack slot. */
2215 0 : ira_mark_new_stack_slot (stack_slot, i, total_size);
2216 : }
2217 :
2218 : /* Reuse a stack slot if possible. */
2219 0 : else if (spill_stack_slot[from_reg] != 0
2220 0 : && known_ge (spill_stack_slot_width[from_reg], total_size)
2221 0 : && known_ge (GET_MODE_SIZE
2222 : (GET_MODE (spill_stack_slot[from_reg])),
2223 : inherent_size)
2224 0 : && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2225 : x = spill_stack_slot[from_reg];
2226 :
2227 : /* Allocate a bigger slot. */
2228 : else
2229 : {
2230 : /* Compute maximum size needed, both for inherent size
2231 : and for total size. */
2232 0 : rtx stack_slot;
2233 :
2234 0 : if (spill_stack_slot[from_reg])
2235 : {
2236 0 : if (partial_subreg_p (mode,
2237 0 : GET_MODE (spill_stack_slot[from_reg])))
2238 0 : mode = GET_MODE (spill_stack_slot[from_reg]);
2239 0 : total_size = ordered_max (total_size,
2240 0 : spill_stack_slot_width[from_reg]);
2241 0 : if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2242 : min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2243 : }
2244 :
2245 : /* The sizes are taken from a subreg operation, which guarantees
2246 : that they're ordered. */
2247 0 : gcc_checking_assert (ordered_p (total_size, inherent_size));
2248 :
2249 : /* Make a slot with that size. */
2250 0 : x = assign_stack_local (mode, total_size,
2251 : min_align > inherent_align
2252 0 : || maybe_gt (total_size, inherent_size)
2253 : ? -1 : 0);
2254 0 : stack_slot = x;
2255 :
2256 : /* Cancel the big-endian correction done in assign_stack_local.
2257 : Get the address of the beginning of the slot. This is so we
2258 : can do a big-endian correction unconditionally below. */
2259 0 : if (BYTES_BIG_ENDIAN)
2260 : {
2261 : adjust = GET_MODE_SIZE (mode) - total_size;
2262 : if (maybe_ne (adjust, 0))
2263 : {
2264 : poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2265 : machine_mode mem_mode
2266 : = int_mode_for_size (total_bits, 1).else_blk ();
2267 : stack_slot = adjust_address_nv (x, mem_mode, adjust);
2268 : }
2269 : }
2270 :
2271 0 : spill_stack_slot[from_reg] = stack_slot;
2272 0 : spill_stack_slot_width[from_reg] = total_size;
2273 : }
2274 :
2275 : /* On a big endian machine, the "address" of the slot
2276 : is the address of the low part that fits its inherent mode. */
2277 0 : adjust += subreg_size_lowpart_offset (inherent_size, total_size);
2278 :
2279 : /* If we have any adjustment to make, or if the stack slot is the
2280 : wrong mode, make a new stack slot. */
2281 0 : x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2282 :
2283 : /* Set all of the memory attributes as appropriate for a spill. */
2284 0 : set_mem_attrs_for_spill (x);
2285 :
2286 : /* Save the stack slot for later. */
2287 0 : reg_equiv_memory_loc (i) = x;
2288 : }
2289 : }
2290 :
2291 : /* Mark the slots in regs_ever_live for the hard regs used by
2292 : pseudo-reg number REGNO, accessed in MODE. */
2293 :
2294 : static void
2295 0 : mark_home_live_1 (int regno, machine_mode mode)
2296 : {
2297 0 : int i, lim;
2298 :
2299 0 : i = reg_renumber[regno];
2300 0 : if (i < 0)
2301 : return;
2302 0 : lim = end_hard_regno (mode, i);
2303 0 : while (i < lim)
2304 0 : df_set_regs_ever_live (i++, true);
2305 : }
2306 :
2307 : /* Mark the slots in regs_ever_live for the hard regs
2308 : used by pseudo-reg number REGNO. */
2309 :
2310 : void
2311 0 : mark_home_live (int regno)
2312 : {
2313 0 : if (reg_renumber[regno] >= 0)
2314 0 : mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2315 0 : }
2316 :
2317 : /* This function handles the tracking of elimination offsets around branches.
2318 :
2319 : X is a piece of RTL being scanned.
2320 :
2321 : INSN is the insn that it came from, if any.
2322 :
2323 : INITIAL_P is nonzero if we are to set the offset to be the initial
2324 : offset and zero if we are setting the offset of the label to be the
2325 : current offset. */
2326 :
2327 : static void
2328 0 : set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2329 : {
2330 0 : enum rtx_code code = GET_CODE (x);
2331 0 : rtx tem;
2332 0 : unsigned int i;
2333 0 : struct elim_table *p;
2334 :
2335 0 : switch (code)
2336 : {
2337 0 : case LABEL_REF:
2338 0 : if (LABEL_REF_NONLOCAL_P (x))
2339 : return;
2340 :
2341 0 : x = label_ref_label (x);
2342 :
2343 : /* fall through */
2344 :
2345 0 : case CODE_LABEL:
2346 : /* If we know nothing about this label, set the desired offsets. Note
2347 : that this sets the offset at a label to be the offset before a label
2348 : if we don't know anything about the label. This is not correct for
2349 : the label after a BARRIER, but is the best guess we can make. If
2350 : we guessed wrong, we will suppress an elimination that might have
2351 : been possible had we been able to guess correctly. */
2352 :
2353 0 : if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2354 : {
2355 0 : for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2356 0 : offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2357 0 : = (initial_p ? reg_eliminate[i].initial_offset
2358 0 : : reg_eliminate[i].offset);
2359 0 : offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2360 : }
2361 :
2362 : /* Otherwise, if this is the definition of a label and it is
2363 : preceded by a BARRIER, set our offsets to the known offset of
2364 : that label. */
2365 :
2366 0 : else if (x == insn
2367 0 : && (tem = prev_nonnote_insn (insn)) != 0
2368 0 : && BARRIER_P (tem))
2369 0 : set_offsets_for_label (insn);
2370 : else
2371 : /* If neither of the above cases is true, compare each offset
2372 : with those previously recorded and suppress any eliminations
2373 : where the offsets disagree. */
2374 :
2375 0 : for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2376 0 : if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i],
2377 0 : (initial_p ? reg_eliminate[i].initial_offset
2378 0 : : reg_eliminate[i].offset)))
2379 0 : reg_eliminate[i].can_eliminate = 0;
2380 :
2381 : return;
2382 :
2383 0 : case JUMP_TABLE_DATA:
2384 0 : set_label_offsets (PATTERN (insn), insn, initial_p);
2385 0 : return;
2386 :
2387 0 : case JUMP_INSN:
2388 0 : set_label_offsets (PATTERN (insn), insn, initial_p);
2389 :
2390 : /* fall through */
2391 :
2392 0 : case INSN:
2393 0 : case CALL_INSN:
2394 : /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2395 : to indirectly and hence must have all eliminations at their
2396 : initial offsets. */
2397 0 : for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2398 0 : if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2399 0 : set_label_offsets (XEXP (tem, 0), insn, 1);
2400 : return;
2401 :
2402 : case PARALLEL:
2403 : case ADDR_VEC:
2404 : case ADDR_DIFF_VEC:
2405 : /* Each of the labels in the parallel or address vector must be
2406 : at their initial offsets. We want the first field for PARALLEL
2407 : and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2408 :
2409 0 : for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2410 0 : set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2411 : insn, initial_p);
2412 : return;
2413 :
2414 0 : case SET:
2415 : /* We only care about setting PC. If the source is not RETURN,
2416 : IF_THEN_ELSE, or a label, disable any eliminations not at
2417 : their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2418 : isn't one of those possibilities. For branches to a label,
2419 : call ourselves recursively.
2420 :
2421 : Note that this can disable elimination unnecessarily when we have
2422 : a non-local goto since it will look like a non-constant jump to
2423 : someplace in the current function. This isn't a significant
2424 : problem since such jumps will normally be when all elimination
2425 : pairs are back to their initial offsets. */
2426 :
2427 0 : if (SET_DEST (x) != pc_rtx)
2428 : return;
2429 :
2430 0 : switch (GET_CODE (SET_SRC (x)))
2431 : {
2432 : case PC:
2433 : case RETURN:
2434 : return;
2435 :
2436 : case LABEL_REF:
2437 : set_label_offsets (SET_SRC (x), insn, initial_p);
2438 : return;
2439 :
2440 0 : case IF_THEN_ELSE:
2441 0 : tem = XEXP (SET_SRC (x), 1);
2442 0 : if (GET_CODE (tem) == LABEL_REF)
2443 0 : set_label_offsets (label_ref_label (tem), insn, initial_p);
2444 0 : else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2445 : break;
2446 :
2447 0 : tem = XEXP (SET_SRC (x), 2);
2448 0 : if (GET_CODE (tem) == LABEL_REF)
2449 0 : set_label_offsets (label_ref_label (tem), insn, initial_p);
2450 0 : else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2451 : break;
2452 : return;
2453 :
2454 : default:
2455 : break;
2456 : }
2457 :
2458 : /* If we reach here, all eliminations must be at their initial
2459 : offset because we are doing a jump to a variable address. */
2460 0 : for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2461 0 : if (maybe_ne (p->offset, p->initial_offset))
2462 0 : p->can_eliminate = 0;
2463 : break;
2464 :
2465 : default:
2466 : break;
2467 : }
2468 : }
2469 :
2470 : /* This function examines every reg that occurs in X and adjusts the
2471 : costs for its elimination which are gathered by IRA. INSN is the
2472 : insn in which X occurs. We do not recurse into MEM expressions. */
2473 :
2474 : static void
2475 0 : note_reg_elim_costly (const_rtx x, rtx insn)
2476 : {
2477 0 : subrtx_iterator::array_type array;
2478 0 : FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2479 : {
2480 0 : const_rtx x = *iter;
2481 0 : if (MEM_P (x))
2482 0 : iter.skip_subrtxes ();
2483 0 : else if (REG_P (x)
2484 0 : && REGNO (x) >= FIRST_PSEUDO_REGISTER
2485 0 : && reg_equiv_init (REGNO (x))
2486 0 : && reg_equiv_invariant (REGNO (x)))
2487 : {
2488 0 : rtx t = reg_equiv_invariant (REGNO (x));
2489 0 : rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2490 0 : int cost = set_src_cost (new_rtx, Pmode,
2491 0 : optimize_bb_for_speed_p (elim_bb));
2492 0 : int freq = REG_FREQ_FROM_BB (elim_bb);
2493 :
2494 0 : if (cost != 0)
2495 0 : ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2496 : }
2497 : }
2498 0 : }
2499 :
2500 : /* Scan X and replace any eliminable registers (such as fp) with a
2501 : replacement (such as sp), plus an offset.
2502 :
2503 : MEM_MODE is the mode of an enclosing MEM. We need this to know how
2504 : much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2505 : MEM, we are allowed to replace a sum of a register and the constant zero
2506 : with the register, which we cannot do outside a MEM. In addition, we need
2507 : to record the fact that a register is referenced outside a MEM.
2508 :
2509 : If INSN is an insn, it is the insn containing X. If we replace a REG
2510 : in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2511 : CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2512 : the REG is being modified.
2513 :
2514 : Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2515 : That's used when we eliminate in expressions stored in notes.
2516 : This means, do not set ref_outside_mem even if the reference
2517 : is outside of MEMs.
2518 :
2519 : If FOR_COSTS is true, we are being called before reload in order to
2520 : estimate the costs of keeping registers with an equivalence unallocated.
2521 :
2522 : REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2523 : replacements done assuming all offsets are at their initial values. If
2524 : they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2525 : encounter, return the actual location so that find_reloads will do
2526 : the proper thing. */
2527 :
2528 : static rtx
2529 0 : eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2530 : bool may_use_invariant, bool for_costs)
2531 : {
2532 0 : enum rtx_code code = GET_CODE (x);
2533 0 : struct elim_table *ep;
2534 0 : int regno;
2535 0 : rtx new_rtx;
2536 0 : int i, j;
2537 0 : const char *fmt;
2538 0 : int copied = 0;
2539 :
2540 0 : if (! current_function_decl)
2541 : return x;
2542 :
2543 0 : switch (code)
2544 : {
2545 : CASE_CONST_ANY:
2546 : case CONST:
2547 : case SYMBOL_REF:
2548 : case CODE_LABEL:
2549 : case PC:
2550 : case ASM_INPUT:
2551 : case ADDR_VEC:
2552 : case ADDR_DIFF_VEC:
2553 : case RETURN:
2554 : return x;
2555 :
2556 0 : case REG:
2557 0 : regno = REGNO (x);
2558 :
2559 : /* First handle the case where we encounter a bare register that
2560 : is eliminable. Replace it with a PLUS. */
2561 0 : if (regno < FIRST_PSEUDO_REGISTER)
2562 : {
2563 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2564 : ep++)
2565 0 : if (ep->from_rtx == x && ep->can_eliminate)
2566 0 : return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2567 :
2568 : }
2569 0 : else if (reg_renumber && reg_renumber[regno] < 0
2570 0 : && reg_equivs
2571 0 : && reg_equiv_invariant (regno))
2572 : {
2573 0 : if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2574 0 : return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2575 0 : mem_mode, insn, true, for_costs);
2576 : /* There exists at least one use of REGNO that cannot be
2577 : eliminated. Prevent the defining insn from being deleted. */
2578 0 : reg_equiv_init (regno) = NULL;
2579 0 : if (!for_costs)
2580 0 : alter_reg (regno, -1, true);
2581 : }
2582 : return x;
2583 :
2584 : /* You might think handling MINUS in a manner similar to PLUS is a
2585 : good idea. It is not. It has been tried multiple times and every
2586 : time the change has had to have been reverted.
2587 :
2588 : Other parts of reload know a PLUS is special (gen_reload for example)
2589 : and require special code to handle code a reloaded PLUS operand.
2590 :
2591 : Also consider backends where the flags register is clobbered by a
2592 : MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2593 : lea instruction comes to mind). If we try to reload a MINUS, we
2594 : may kill the flags register that was holding a useful value.
2595 :
2596 : So, please before trying to handle MINUS, consider reload as a
2597 : whole instead of this little section as well as the backend issues. */
2598 0 : case PLUS:
2599 : /* If this is the sum of an eliminable register and a constant, rework
2600 : the sum. */
2601 0 : if (REG_P (XEXP (x, 0))
2602 0 : && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2603 0 : && CONSTANT_P (XEXP (x, 1)))
2604 : {
2605 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2606 : ep++)
2607 0 : if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2608 : {
2609 : /* The only time we want to replace a PLUS with a REG (this
2610 : occurs when the constant operand of the PLUS is the negative
2611 : of the offset) is when we are inside a MEM. We won't want
2612 : to do so at other times because that would change the
2613 : structure of the insn in a way that reload can't handle.
2614 : We special-case the commonest situation in
2615 : eliminate_regs_in_insn, so just replace a PLUS with a
2616 : PLUS here, unless inside a MEM. In DEBUG_INSNs, it is
2617 : always ok to replace a PLUS with just a REG. */
2618 0 : if ((mem_mode != 0 || (insn && DEBUG_INSN_P (insn)))
2619 0 : && CONST_INT_P (XEXP (x, 1))
2620 0 : && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset))
2621 0 : return ep->to_rtx;
2622 : else
2623 0 : return gen_rtx_PLUS (Pmode, ep->to_rtx,
2624 : plus_constant (Pmode, XEXP (x, 1),
2625 : ep->previous_offset));
2626 : }
2627 :
2628 : /* If the register is not eliminable, we are done since the other
2629 : operand is a constant. */
2630 : return x;
2631 : }
2632 :
2633 : /* If this is part of an address, we want to bring any constant to the
2634 : outermost PLUS. We will do this by doing register replacement in
2635 : our operands and seeing if a constant shows up in one of them.
2636 :
2637 : Note that there is no risk of modifying the structure of the insn,
2638 : since we only get called for its operands, thus we are either
2639 : modifying the address inside a MEM, or something like an address
2640 : operand of a load-address insn. */
2641 :
2642 0 : {
2643 0 : rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2644 : for_costs);
2645 0 : rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2646 : for_costs);
2647 :
2648 0 : if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2649 : {
2650 : /* If one side is a PLUS and the other side is a pseudo that
2651 : didn't get a hard register but has a reg_equiv_constant,
2652 : we must replace the constant here since it may no longer
2653 : be in the position of any operand. */
2654 0 : if (GET_CODE (new0) == PLUS && REG_P (new1)
2655 0 : && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2656 0 : && reg_renumber[REGNO (new1)] < 0
2657 0 : && reg_equivs
2658 0 : && reg_equiv_constant (REGNO (new1)) != 0)
2659 : new1 = reg_equiv_constant (REGNO (new1));
2660 0 : else if (GET_CODE (new1) == PLUS && REG_P (new0)
2661 0 : && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2662 0 : && reg_renumber[REGNO (new0)] < 0
2663 0 : && reg_equiv_constant (REGNO (new0)) != 0)
2664 : new0 = reg_equiv_constant (REGNO (new0));
2665 :
2666 0 : new_rtx = form_sum (GET_MODE (x), new0, new1);
2667 :
2668 : /* As above, if we are not inside a MEM we do not want to
2669 : turn a PLUS into something else. We might try to do so here
2670 : for an addition of 0 if we aren't optimizing. */
2671 0 : if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2672 0 : return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2673 : else
2674 : return new_rtx;
2675 : }
2676 : }
2677 : return x;
2678 :
2679 0 : case MULT:
2680 : /* If this is the product of an eliminable register and a
2681 : constant, apply the distribute law and move the constant out
2682 : so that we have (plus (mult ..) ..). This is needed in order
2683 : to keep load-address insns valid. This case is pathological.
2684 : We ignore the possibility of overflow here. */
2685 0 : if (REG_P (XEXP (x, 0))
2686 0 : && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2687 0 : && CONST_INT_P (XEXP (x, 1)))
2688 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2689 : ep++)
2690 0 : if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2691 : {
2692 0 : if (! mem_mode
2693 : /* Refs inside notes or in DEBUG_INSNs don't count for
2694 : this purpose. */
2695 0 : && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2696 0 : || GET_CODE (insn) == INSN_LIST
2697 0 : || DEBUG_INSN_P (insn))))
2698 0 : ep->ref_outside_mem = 1;
2699 :
2700 0 : return
2701 0 : plus_constant (Pmode,
2702 0 : gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2703 0 : ep->previous_offset * INTVAL (XEXP (x, 1)));
2704 : }
2705 :
2706 : /* fall through */
2707 :
2708 0 : case CALL:
2709 0 : case COMPARE:
2710 : /* See comments before PLUS about handling MINUS. */
2711 0 : case MINUS:
2712 0 : case DIV: case UDIV:
2713 0 : case MOD: case UMOD:
2714 0 : case AND: case IOR: case XOR:
2715 0 : case ROTATERT: case ROTATE:
2716 0 : case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2717 0 : case NE: case EQ:
2718 0 : case GE: case GT: case GEU: case GTU:
2719 0 : case LE: case LT: case LEU: case LTU:
2720 0 : {
2721 0 : rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2722 : for_costs);
2723 0 : rtx new1 = XEXP (x, 1)
2724 0 : ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2725 : for_costs) : 0;
2726 :
2727 0 : if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2728 0 : return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2729 : }
2730 : return x;
2731 :
2732 0 : case EXPR_LIST:
2733 : /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2734 0 : if (XEXP (x, 0))
2735 : {
2736 0 : new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2737 : for_costs);
2738 0 : if (new_rtx != XEXP (x, 0))
2739 : {
2740 : /* If this is a REG_DEAD note, it is not valid anymore.
2741 : Using the eliminated version could result in creating a
2742 : REG_DEAD note for the stack or frame pointer. */
2743 0 : if (REG_NOTE_KIND (x) == REG_DEAD)
2744 0 : return (XEXP (x, 1)
2745 0 : ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2746 : for_costs)
2747 : : NULL_RTX);
2748 :
2749 0 : x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2750 : }
2751 : }
2752 :
2753 : /* fall through */
2754 :
2755 0 : case INSN_LIST:
2756 0 : case INT_LIST:
2757 : /* Now do eliminations in the rest of the chain. If this was
2758 : an EXPR_LIST, this might result in allocating more memory than is
2759 : strictly needed, but it simplifies the code. */
2760 0 : if (XEXP (x, 1))
2761 : {
2762 0 : new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2763 : for_costs);
2764 0 : if (new_rtx != XEXP (x, 1))
2765 0 : return
2766 0 : gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2767 : }
2768 : return x;
2769 :
2770 : case PRE_INC:
2771 : case POST_INC:
2772 : case PRE_DEC:
2773 : case POST_DEC:
2774 : /* We do not support elimination of a register that is modified.
2775 : elimination_effects has already make sure that this does not
2776 : happen. */
2777 : return x;
2778 :
2779 0 : case PRE_MODIFY:
2780 0 : case POST_MODIFY:
2781 : /* We do not support elimination of a register that is modified.
2782 : elimination_effects has already make sure that this does not
2783 : happen. The only remaining case we need to consider here is
2784 : that the increment value may be an eliminable register. */
2785 0 : if (GET_CODE (XEXP (x, 1)) == PLUS
2786 0 : && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2787 : {
2788 0 : rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2789 : insn, true, for_costs);
2790 :
2791 0 : if (new_rtx != XEXP (XEXP (x, 1), 1))
2792 0 : return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2793 : gen_rtx_PLUS (GET_MODE (x),
2794 : XEXP (x, 0), new_rtx));
2795 : }
2796 : return x;
2797 :
2798 0 : case STRICT_LOW_PART:
2799 0 : case NEG: case NOT:
2800 0 : case SIGN_EXTEND: case ZERO_EXTEND:
2801 0 : case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2802 0 : case FLOAT: case FIX:
2803 0 : case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2804 0 : case ABS:
2805 0 : case SQRT:
2806 0 : case FFS:
2807 0 : case CLZ:
2808 0 : case CTZ:
2809 0 : case POPCOUNT:
2810 0 : case PARITY:
2811 0 : case BSWAP:
2812 0 : new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2813 : for_costs);
2814 0 : if (new_rtx != XEXP (x, 0))
2815 0 : return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2816 : return x;
2817 :
2818 0 : case SUBREG:
2819 : /* Similar to above processing, but preserve SUBREG_BYTE.
2820 : Convert (subreg (mem)) to (mem) if not paradoxical.
2821 : Also, if we have a non-paradoxical (subreg (pseudo)) and the
2822 : pseudo didn't get a hard reg, we must replace this with the
2823 : eliminated version of the memory location because push_reload
2824 : may do the replacement in certain circumstances. */
2825 0 : if (REG_P (SUBREG_REG (x))
2826 0 : && !paradoxical_subreg_p (x)
2827 0 : && reg_equivs
2828 0 : && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2829 : {
2830 : new_rtx = SUBREG_REG (x);
2831 : }
2832 : else
2833 0 : new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2834 :
2835 0 : if (new_rtx != SUBREG_REG (x))
2836 : {
2837 0 : poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x));
2838 0 : poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2839 :
2840 0 : if (MEM_P (new_rtx)
2841 0 : && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx))
2842 : /* On RISC machines, combine can create rtl of the form
2843 : (set (subreg:m1 (reg:m2 R) 0) ...)
2844 : where m1 < m2, and expects something interesting to
2845 : happen to the entire word. Moreover, it will use the
2846 : (reg:m2 R) later, expecting all bits to be preserved.
2847 : So if the number of words is the same, preserve the
2848 : subreg so that push_reload can see it. */
2849 : && !(WORD_REGISTER_OPERATIONS
2850 : && known_equal_after_align_down (x_size - 1,
2851 : new_size - 1,
2852 : UNITS_PER_WORD)))
2853 0 : || known_eq (x_size, new_size))
2854 : )
2855 0 : return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2856 0 : else if (insn && GET_CODE (insn) == DEBUG_INSN)
2857 0 : return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2858 : else
2859 0 : return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2860 : }
2861 :
2862 : return x;
2863 :
2864 0 : case MEM:
2865 : /* Our only special processing is to pass the mode of the MEM to our
2866 : recursive call and copy the flags. While we are here, handle this
2867 : case more efficiently. */
2868 :
2869 0 : new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2870 : for_costs);
2871 0 : if (for_costs
2872 0 : && memory_address_p (GET_MODE (x), XEXP (x, 0))
2873 0 : && !memory_address_p (GET_MODE (x), new_rtx))
2874 0 : note_reg_elim_costly (XEXP (x, 0), insn);
2875 :
2876 0 : return replace_equiv_address_nv (x, new_rtx);
2877 :
2878 0 : case USE:
2879 : /* Handle insn_list USE that a call to a pure function may generate. */
2880 0 : new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2881 : for_costs);
2882 0 : if (new_rtx != XEXP (x, 0))
2883 0 : return gen_rtx_USE (GET_MODE (x), new_rtx);
2884 : return x;
2885 :
2886 0 : case CLOBBER:
2887 0 : case ASM_OPERANDS:
2888 0 : gcc_assert (insn && DEBUG_INSN_P (insn));
2889 : break;
2890 :
2891 0 : case SET:
2892 0 : gcc_unreachable ();
2893 :
2894 : default:
2895 : break;
2896 : }
2897 :
2898 : /* Process each of our operands recursively. If any have changed, make a
2899 : copy of the rtx. */
2900 0 : fmt = GET_RTX_FORMAT (code);
2901 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2902 : {
2903 0 : if (*fmt == 'e')
2904 : {
2905 0 : new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2906 : for_costs);
2907 0 : if (new_rtx != XEXP (x, i) && ! copied)
2908 : {
2909 0 : x = shallow_copy_rtx (x);
2910 0 : copied = 1;
2911 : }
2912 0 : XEXP (x, i) = new_rtx;
2913 : }
2914 0 : else if (*fmt == 'E')
2915 : {
2916 : int copied_vec = 0;
2917 0 : for (j = 0; j < XVECLEN (x, i); j++)
2918 : {
2919 0 : new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2920 : for_costs);
2921 0 : if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2922 : {
2923 0 : rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2924 0 : XVEC (x, i)->elem);
2925 0 : if (! copied)
2926 : {
2927 0 : x = shallow_copy_rtx (x);
2928 0 : copied = 1;
2929 : }
2930 0 : XVEC (x, i) = new_v;
2931 0 : copied_vec = 1;
2932 : }
2933 0 : XVECEXP (x, i, j) = new_rtx;
2934 : }
2935 : }
2936 : }
2937 :
2938 : return x;
2939 : }
2940 :
2941 : rtx
2942 0 : eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2943 : {
2944 0 : if (reg_eliminate == NULL)
2945 : {
2946 0 : gcc_assert (targetm.no_register_allocation);
2947 : return x;
2948 : }
2949 0 : return eliminate_regs_1 (x, mem_mode, insn, false, false);
2950 : }
2951 :
2952 : /* Scan rtx X for modifications of elimination target registers. Update
2953 : the table of eliminables to reflect the changed state. MEM_MODE is
2954 : the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2955 :
2956 : static void
2957 0 : elimination_effects (rtx x, machine_mode mem_mode)
2958 : {
2959 0 : enum rtx_code code = GET_CODE (x);
2960 0 : struct elim_table *ep;
2961 0 : int regno;
2962 0 : int i, j;
2963 0 : const char *fmt;
2964 :
2965 0 : switch (code)
2966 : {
2967 : CASE_CONST_ANY:
2968 : case CONST:
2969 : case SYMBOL_REF:
2970 : case CODE_LABEL:
2971 : case PC:
2972 : case ASM_INPUT:
2973 : case ADDR_VEC:
2974 : case ADDR_DIFF_VEC:
2975 : case RETURN:
2976 : return;
2977 :
2978 0 : case REG:
2979 0 : regno = REGNO (x);
2980 :
2981 : /* First handle the case where we encounter a bare register that
2982 : is eliminable. Replace it with a PLUS. */
2983 0 : if (regno < FIRST_PSEUDO_REGISTER)
2984 : {
2985 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2986 : ep++)
2987 0 : if (ep->from_rtx == x && ep->can_eliminate)
2988 : {
2989 0 : if (! mem_mode)
2990 0 : ep->ref_outside_mem = 1;
2991 0 : return;
2992 : }
2993 :
2994 : }
2995 0 : else if (reg_renumber[regno] < 0
2996 0 : && reg_equivs
2997 0 : && reg_equiv_constant (regno)
2998 0 : && ! function_invariant_p (reg_equiv_constant (regno)))
2999 0 : elimination_effects (reg_equiv_constant (regno), mem_mode);
3000 : return;
3001 :
3002 0 : case PRE_INC:
3003 0 : case POST_INC:
3004 0 : case PRE_DEC:
3005 0 : case POST_DEC:
3006 0 : case POST_MODIFY:
3007 0 : case PRE_MODIFY:
3008 : /* If we modify the source of an elimination rule, disable it. */
3009 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3010 0 : if (ep->from_rtx == XEXP (x, 0))
3011 0 : ep->can_eliminate = 0;
3012 :
3013 : /* If we modify the target of an elimination rule by adding a constant,
3014 : update its offset. If we modify the target in any other way, we'll
3015 : have to disable the rule as well. */
3016 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3017 0 : if (ep->to_rtx == XEXP (x, 0))
3018 : {
3019 0 : poly_int64 size = GET_MODE_SIZE (mem_mode);
3020 :
3021 : /* If more bytes than MEM_MODE are pushed, account for them. */
3022 : #ifdef PUSH_ROUNDING
3023 0 : if (ep->to_rtx == stack_pointer_rtx)
3024 0 : size = PUSH_ROUNDING (size);
3025 : #endif
3026 0 : if (code == PRE_DEC || code == POST_DEC)
3027 0 : ep->offset += size;
3028 0 : else if (code == PRE_INC || code == POST_INC)
3029 0 : ep->offset -= size;
3030 0 : else if (code == PRE_MODIFY || code == POST_MODIFY)
3031 : {
3032 0 : if (GET_CODE (XEXP (x, 1)) == PLUS
3033 0 : && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3034 0 : && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3035 0 : ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3036 : else
3037 0 : ep->can_eliminate = 0;
3038 : }
3039 : }
3040 :
3041 : /* These two aren't unary operators. */
3042 0 : if (code == POST_MODIFY || code == PRE_MODIFY)
3043 : break;
3044 :
3045 : /* Fall through to generic unary operation case. */
3046 0 : gcc_fallthrough ();
3047 0 : case STRICT_LOW_PART:
3048 0 : case NEG: case NOT:
3049 0 : case SIGN_EXTEND: case ZERO_EXTEND:
3050 0 : case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3051 0 : case FLOAT: case FIX:
3052 0 : case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3053 0 : case ABS:
3054 0 : case SQRT:
3055 0 : case FFS:
3056 0 : case CLZ:
3057 0 : case CTZ:
3058 0 : case POPCOUNT:
3059 0 : case PARITY:
3060 0 : case BSWAP:
3061 0 : elimination_effects (XEXP (x, 0), mem_mode);
3062 0 : return;
3063 :
3064 0 : case SUBREG:
3065 0 : if (REG_P (SUBREG_REG (x))
3066 0 : && !paradoxical_subreg_p (x)
3067 0 : && reg_equivs
3068 0 : && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3069 : return;
3070 :
3071 : elimination_effects (SUBREG_REG (x), mem_mode);
3072 : return;
3073 :
3074 0 : case USE:
3075 : /* If using a register that is the source of an eliminate we still
3076 : think can be performed, note it cannot be performed since we don't
3077 : know how this register is used. */
3078 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3079 0 : if (ep->from_rtx == XEXP (x, 0))
3080 0 : ep->can_eliminate = 0;
3081 :
3082 0 : elimination_effects (XEXP (x, 0), mem_mode);
3083 0 : return;
3084 :
3085 0 : case CLOBBER:
3086 : /* If clobbering a register that is the replacement register for an
3087 : elimination we still think can be performed, note that it cannot
3088 : be performed. Otherwise, we need not be concerned about it. */
3089 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3090 0 : if (ep->to_rtx == XEXP (x, 0))
3091 0 : ep->can_eliminate = 0;
3092 :
3093 0 : elimination_effects (XEXP (x, 0), mem_mode);
3094 0 : return;
3095 :
3096 0 : case SET:
3097 : /* Check for setting a register that we know about. */
3098 0 : if (REG_P (SET_DEST (x)))
3099 : {
3100 : /* See if this is setting the replacement register for an
3101 : elimination.
3102 :
3103 : If DEST is the hard frame pointer, we do nothing because we
3104 : assume that all assignments to the frame pointer are for
3105 : non-local gotos and are being done at a time when they are valid
3106 : and do not disturb anything else. Some machines want to
3107 : eliminate a fake argument pointer (or even a fake frame pointer)
3108 : with either the real frame or the stack pointer. Assignments to
3109 : the hard frame pointer must not prevent this elimination. */
3110 :
3111 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3112 : ep++)
3113 0 : if (ep->to_rtx == SET_DEST (x)
3114 0 : && SET_DEST (x) != hard_frame_pointer_rtx)
3115 : {
3116 : /* If it is being incremented, adjust the offset. Otherwise,
3117 : this elimination can't be done. */
3118 0 : rtx src = SET_SRC (x);
3119 :
3120 0 : if (GET_CODE (src) == PLUS
3121 0 : && XEXP (src, 0) == SET_DEST (x)
3122 0 : && CONST_INT_P (XEXP (src, 1)))
3123 0 : ep->offset -= INTVAL (XEXP (src, 1));
3124 : else
3125 0 : ep->can_eliminate = 0;
3126 : }
3127 : }
3128 :
3129 0 : elimination_effects (SET_DEST (x), VOIDmode);
3130 0 : elimination_effects (SET_SRC (x), VOIDmode);
3131 0 : return;
3132 :
3133 0 : case MEM:
3134 : /* Our only special processing is to pass the mode of the MEM to our
3135 : recursive call. */
3136 0 : elimination_effects (XEXP (x, 0), GET_MODE (x));
3137 0 : return;
3138 :
3139 : default:
3140 : break;
3141 : }
3142 :
3143 0 : fmt = GET_RTX_FORMAT (code);
3144 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3145 : {
3146 0 : if (*fmt == 'e')
3147 0 : elimination_effects (XEXP (x, i), mem_mode);
3148 0 : else if (*fmt == 'E')
3149 0 : for (j = 0; j < XVECLEN (x, i); j++)
3150 0 : elimination_effects (XVECEXP (x, i, j), mem_mode);
3151 : }
3152 : }
3153 :
3154 : /* Descend through rtx X and verify that no references to eliminable registers
3155 : remain. If any do remain, mark the involved register as not
3156 : eliminable. */
3157 :
3158 : static void
3159 0 : check_eliminable_occurrences (rtx x)
3160 : {
3161 0 : const char *fmt;
3162 0 : int i;
3163 0 : enum rtx_code code;
3164 :
3165 0 : if (x == 0)
3166 : return;
3167 :
3168 0 : code = GET_CODE (x);
3169 :
3170 0 : if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3171 : {
3172 0 : struct elim_table *ep;
3173 :
3174 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3175 0 : if (ep->from_rtx == x)
3176 0 : ep->can_eliminate = 0;
3177 : return;
3178 : }
3179 :
3180 0 : fmt = GET_RTX_FORMAT (code);
3181 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3182 : {
3183 0 : if (*fmt == 'e')
3184 0 : check_eliminable_occurrences (XEXP (x, i));
3185 0 : else if (*fmt == 'E')
3186 : {
3187 : int j;
3188 0 : for (j = 0; j < XVECLEN (x, i); j++)
3189 0 : check_eliminable_occurrences (XVECEXP (x, i, j));
3190 : }
3191 : }
3192 : }
3193 :
3194 : /* Scan INSN and eliminate all eliminable registers in it.
3195 :
3196 : If REPLACE is nonzero, do the replacement destructively. Also
3197 : delete the insn as dead it if it is setting an eliminable register.
3198 :
3199 : If REPLACE is zero, do all our allocations in reload_obstack.
3200 :
3201 : If no eliminations were done and this insn doesn't require any elimination
3202 : processing (these are not identical conditions: it might be updating sp,
3203 : but not referencing fp; this needs to be seen during reload_as_needed so
3204 : that the offset between fp and sp can be taken into consideration), zero
3205 : is returned. Otherwise, 1 is returned. */
3206 :
3207 : static int
3208 0 : eliminate_regs_in_insn (rtx_insn *insn, int replace)
3209 : {
3210 0 : int icode = recog_memoized (insn);
3211 0 : rtx old_body = PATTERN (insn);
3212 0 : int insn_is_asm = asm_noperands (old_body) >= 0;
3213 0 : rtx old_set = single_set (insn);
3214 0 : rtx new_body;
3215 0 : int val = 0;
3216 0 : int i;
3217 0 : rtx substed_operand[MAX_RECOG_OPERANDS];
3218 0 : rtx orig_operand[MAX_RECOG_OPERANDS];
3219 0 : struct elim_table *ep;
3220 0 : rtx plus_src, plus_cst_src;
3221 :
3222 0 : if (! insn_is_asm && icode < 0)
3223 : {
3224 0 : gcc_assert (DEBUG_INSN_P (insn)
3225 : || GET_CODE (PATTERN (insn)) == USE
3226 : || GET_CODE (PATTERN (insn)) == CLOBBER
3227 : || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3228 0 : if (DEBUG_BIND_INSN_P (insn))
3229 0 : INSN_VAR_LOCATION_LOC (insn)
3230 0 : = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3231 0 : return 0;
3232 : }
3233 :
3234 : /* We allow one special case which happens to work on all machines we
3235 : currently support: a single set with the source or a REG_EQUAL
3236 : note being a PLUS of an eliminable register and a constant. */
3237 0 : plus_src = plus_cst_src = 0;
3238 0 : if (old_set && REG_P (SET_DEST (old_set)))
3239 : {
3240 0 : if (GET_CODE (SET_SRC (old_set)) == PLUS)
3241 0 : plus_src = SET_SRC (old_set);
3242 : /* First see if the source is of the form (plus (...) CST). */
3243 0 : if (plus_src
3244 0 : && CONST_INT_P (XEXP (plus_src, 1)))
3245 : plus_cst_src = plus_src;
3246 0 : else if (REG_P (SET_SRC (old_set))
3247 0 : || plus_src)
3248 : {
3249 : /* Otherwise, see if we have a REG_EQUAL note of the form
3250 : (plus (...) CST). */
3251 0 : rtx links;
3252 0 : for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3253 : {
3254 0 : if ((REG_NOTE_KIND (links) == REG_EQUAL
3255 0 : || REG_NOTE_KIND (links) == REG_EQUIV)
3256 0 : && GET_CODE (XEXP (links, 0)) == PLUS
3257 0 : && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3258 : {
3259 : plus_cst_src = XEXP (links, 0);
3260 : break;
3261 : }
3262 : }
3263 : }
3264 :
3265 : /* Check that the first operand of the PLUS is a hard reg or
3266 : the lowpart subreg of one. */
3267 0 : if (plus_cst_src)
3268 : {
3269 0 : rtx reg = XEXP (plus_cst_src, 0);
3270 0 : if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3271 0 : reg = SUBREG_REG (reg);
3272 :
3273 0 : if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3274 : plus_cst_src = 0;
3275 : }
3276 : }
3277 0 : if (plus_cst_src)
3278 : {
3279 0 : rtx reg = XEXP (plus_cst_src, 0);
3280 0 : poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1));
3281 :
3282 0 : if (GET_CODE (reg) == SUBREG)
3283 0 : reg = SUBREG_REG (reg);
3284 :
3285 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3286 0 : if (ep->from_rtx == reg && ep->can_eliminate)
3287 : {
3288 0 : rtx to_rtx = ep->to_rtx;
3289 0 : offset += ep->offset;
3290 0 : offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3291 :
3292 0 : if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3293 0 : to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3294 : to_rtx);
3295 : /* If we have a nonzero offset, and the source is already
3296 : a simple REG, the following transformation would
3297 : increase the cost of the insn by replacing a simple REG
3298 : with (plus (reg sp) CST). So try only when we already
3299 : had a PLUS before. */
3300 0 : if (known_eq (offset, 0) || plus_src)
3301 : {
3302 0 : rtx new_src = plus_constant (GET_MODE (to_rtx),
3303 : to_rtx, offset);
3304 :
3305 0 : new_body = old_body;
3306 0 : if (! replace)
3307 : {
3308 0 : new_body = copy_insn (old_body);
3309 0 : if (REG_NOTES (insn))
3310 0 : REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3311 : }
3312 0 : PATTERN (insn) = new_body;
3313 0 : old_set = single_set (insn);
3314 :
3315 : /* First see if this insn remains valid when we make the
3316 : change. If not, try to replace the whole pattern with
3317 : a simple set (this may help if the original insn was a
3318 : PARALLEL that was only recognized as single_set due to
3319 : REG_UNUSED notes). If this isn't valid either, keep
3320 : the INSN_CODE the same and let reload fix it up. */
3321 0 : if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3322 : {
3323 0 : rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3324 :
3325 0 : if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3326 0 : SET_SRC (old_set) = new_src;
3327 : }
3328 : }
3329 : else
3330 : break;
3331 :
3332 0 : val = 1;
3333 : /* This can't have an effect on elimination offsets, so skip right
3334 : to the end. */
3335 0 : goto done;
3336 : }
3337 : }
3338 :
3339 : /* Determine the effects of this insn on elimination offsets. */
3340 0 : elimination_effects (old_body, VOIDmode);
3341 :
3342 : /* Eliminate all eliminable registers occurring in operands that
3343 : can be handled by reload. */
3344 0 : extract_insn (insn);
3345 0 : for (i = 0; i < recog_data.n_operands; i++)
3346 : {
3347 0 : orig_operand[i] = recog_data.operand[i];
3348 0 : substed_operand[i] = recog_data.operand[i];
3349 :
3350 : /* For an asm statement, every operand is eliminable. */
3351 0 : if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3352 : {
3353 0 : bool is_set_src, in_plus;
3354 :
3355 : /* Check for setting a register that we know about. */
3356 0 : if (recog_data.operand_type[i] != OP_IN
3357 0 : && REG_P (orig_operand[i]))
3358 : {
3359 : /* If we are assigning to a register that can be eliminated, it
3360 : must be as part of a PARALLEL, since the code above handles
3361 : single SETs. We must indicate that we can no longer
3362 : eliminate this reg. */
3363 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3364 : ep++)
3365 0 : if (ep->from_rtx == orig_operand[i])
3366 0 : ep->can_eliminate = 0;
3367 : }
3368 :
3369 : /* Companion to the above plus substitution, we can allow
3370 : invariants as the source of a plain move. */
3371 0 : is_set_src = false;
3372 0 : if (old_set
3373 0 : && recog_data.operand_loc[i] == &SET_SRC (old_set))
3374 0 : is_set_src = true;
3375 0 : in_plus = false;
3376 0 : if (plus_src
3377 0 : && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3378 0 : || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3379 0 : in_plus = true;
3380 :
3381 0 : substed_operand[i]
3382 0 : = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3383 : replace ? insn : NULL_RTX,
3384 0 : is_set_src || in_plus, false);
3385 0 : if (substed_operand[i] != orig_operand[i])
3386 0 : val = 1;
3387 : /* Terminate the search in check_eliminable_occurrences at
3388 : this point. */
3389 0 : *recog_data.operand_loc[i] = 0;
3390 :
3391 : /* If an output operand changed from a REG to a MEM and INSN is an
3392 : insn, write a CLOBBER insn. */
3393 0 : if (recog_data.operand_type[i] != OP_IN
3394 0 : && REG_P (orig_operand[i])
3395 0 : && MEM_P (substed_operand[i])
3396 0 : && replace)
3397 0 : emit_insn_after (gen_clobber (orig_operand[i]), insn);
3398 : }
3399 : }
3400 :
3401 0 : for (i = 0; i < recog_data.n_dups; i++)
3402 0 : *recog_data.dup_loc[i]
3403 0 : = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3404 :
3405 : /* If any eliminable remain, they aren't eliminable anymore. */
3406 0 : check_eliminable_occurrences (old_body);
3407 :
3408 : /* Substitute the operands; the new values are in the substed_operand
3409 : array. */
3410 0 : for (i = 0; i < recog_data.n_operands; i++)
3411 0 : *recog_data.operand_loc[i] = substed_operand[i];
3412 0 : for (i = 0; i < recog_data.n_dups; i++)
3413 0 : *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3414 :
3415 : /* If we are replacing a body that was a (set X (plus Y Z)), try to
3416 : re-recognize the insn. We do this in case we had a simple addition
3417 : but now can do this as a load-address. This saves an insn in this
3418 : common case.
3419 : If re-recognition fails, the old insn code number will still be used,
3420 : and some register operands may have changed into PLUS expressions.
3421 : These will be handled by find_reloads by loading them into a register
3422 : again. */
3423 :
3424 0 : if (val)
3425 : {
3426 : /* If we aren't replacing things permanently and we changed something,
3427 : make another copy to ensure that all the RTL is new. Otherwise
3428 : things can go wrong if find_reload swaps commutative operands
3429 : and one is inside RTL that has been copied while the other is not. */
3430 0 : new_body = old_body;
3431 0 : if (! replace)
3432 : {
3433 0 : new_body = copy_insn (old_body);
3434 0 : if (REG_NOTES (insn))
3435 0 : REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3436 : }
3437 0 : PATTERN (insn) = new_body;
3438 :
3439 : /* If we had a move insn but now we don't, rerecognize it. This will
3440 : cause spurious re-recognition if the old move had a PARALLEL since
3441 : the new one still will, but we can't call single_set without
3442 : having put NEW_BODY into the insn and the re-recognition won't
3443 : hurt in this rare case. */
3444 : /* ??? Why this huge if statement - why don't we just rerecognize the
3445 : thing always? */
3446 0 : if (! insn_is_asm
3447 0 : && old_set != 0
3448 0 : && ((REG_P (SET_SRC (old_set))
3449 0 : && (GET_CODE (new_body) != SET
3450 0 : || !REG_P (SET_SRC (new_body))))
3451 : /* If this was a load from or store to memory, compare
3452 : the MEM in recog_data.operand to the one in the insn.
3453 : If they are not equal, then rerecognize the insn. */
3454 : || (old_set != 0
3455 0 : && ((MEM_P (SET_SRC (old_set))
3456 0 : && SET_SRC (old_set) != recog_data.operand[1])
3457 0 : || (MEM_P (SET_DEST (old_set))
3458 0 : && SET_DEST (old_set) != recog_data.operand[0])))
3459 : /* If this was an add insn before, rerecognize. */
3460 0 : || GET_CODE (SET_SRC (old_set)) == PLUS))
3461 : {
3462 0 : int new_icode = recog (PATTERN (insn), insn, 0);
3463 0 : if (new_icode >= 0)
3464 0 : INSN_CODE (insn) = new_icode;
3465 : }
3466 : }
3467 :
3468 : /* Restore the old body. If there were any changes to it, we made a copy
3469 : of it while the changes were still in place, so we'll correctly return
3470 : a modified insn below. */
3471 0 : if (! replace)
3472 : {
3473 : /* Restore the old body. */
3474 0 : for (i = 0; i < recog_data.n_operands; i++)
3475 : /* Restoring a top-level match_parallel would clobber the new_body
3476 : we installed in the insn. */
3477 0 : if (recog_data.operand_loc[i] != &PATTERN (insn))
3478 0 : *recog_data.operand_loc[i] = orig_operand[i];
3479 0 : for (i = 0; i < recog_data.n_dups; i++)
3480 0 : *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3481 : }
3482 :
3483 : /* Update all elimination pairs to reflect the status after the current
3484 : insn. The changes we make were determined by the earlier call to
3485 : elimination_effects.
3486 :
3487 : We also detect cases where register elimination cannot be done,
3488 : namely, if a register would be both changed and referenced outside a MEM
3489 : in the resulting insn since such an insn is often undefined and, even if
3490 : not, we cannot know what meaning will be given to it. Note that it is
3491 : valid to have a register used in an address in an insn that changes it
3492 : (presumably with a pre- or post-increment or decrement).
3493 :
3494 : If anything changes, return nonzero. */
3495 :
3496 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3497 : {
3498 0 : if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3499 0 : ep->can_eliminate = 0;
3500 :
3501 0 : ep->ref_outside_mem = 0;
3502 :
3503 0 : if (maybe_ne (ep->previous_offset, ep->offset))
3504 0 : val = 1;
3505 : }
3506 :
3507 0 : done:
3508 : /* If we changed something, perform elimination in REG_NOTES. This is
3509 : needed even when REPLACE is zero because a REG_DEAD note might refer
3510 : to a register that we eliminate and could cause a different number
3511 : of spill registers to be needed in the final reload pass than in
3512 : the pre-passes. */
3513 0 : if (val && REG_NOTES (insn) != 0)
3514 0 : REG_NOTES (insn)
3515 0 : = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3516 : false);
3517 :
3518 : return val;
3519 : }
3520 :
3521 : /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3522 : register allocator. INSN is the instruction we need to examine, we perform
3523 : eliminations in its operands and record cases where eliminating a reg with
3524 : an invariant equivalence would add extra cost. */
3525 :
3526 : #pragma GCC diagnostic push
3527 : #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3528 : static void
3529 0 : elimination_costs_in_insn (rtx_insn *insn)
3530 : {
3531 0 : int icode = recog_memoized (insn);
3532 0 : rtx old_body = PATTERN (insn);
3533 0 : int insn_is_asm = asm_noperands (old_body) >= 0;
3534 0 : rtx old_set = single_set (insn);
3535 0 : int i;
3536 0 : rtx orig_operand[MAX_RECOG_OPERANDS];
3537 0 : rtx orig_dup[MAX_RECOG_OPERANDS];
3538 0 : struct elim_table *ep;
3539 0 : rtx plus_src, plus_cst_src;
3540 0 : bool sets_reg_p;
3541 :
3542 0 : if (! insn_is_asm && icode < 0)
3543 : {
3544 0 : gcc_assert (DEBUG_INSN_P (insn)
3545 : || GET_CODE (PATTERN (insn)) == USE
3546 : || GET_CODE (PATTERN (insn)) == CLOBBER
3547 : || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3548 : return;
3549 : }
3550 :
3551 0 : if (old_set != 0 && REG_P (SET_DEST (old_set))
3552 0 : && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3553 : {
3554 : /* Check for setting an eliminable register. */
3555 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3556 0 : if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3557 : return;
3558 : }
3559 :
3560 : /* We allow one special case which happens to work on all machines we
3561 : currently support: a single set with the source or a REG_EQUAL
3562 : note being a PLUS of an eliminable register and a constant. */
3563 0 : plus_src = plus_cst_src = 0;
3564 0 : sets_reg_p = false;
3565 0 : if (old_set && REG_P (SET_DEST (old_set)))
3566 : {
3567 0 : sets_reg_p = true;
3568 0 : if (GET_CODE (SET_SRC (old_set)) == PLUS)
3569 0 : plus_src = SET_SRC (old_set);
3570 : /* First see if the source is of the form (plus (...) CST). */
3571 0 : if (plus_src
3572 0 : && CONST_INT_P (XEXP (plus_src, 1)))
3573 0 : plus_cst_src = plus_src;
3574 0 : else if (REG_P (SET_SRC (old_set))
3575 : || plus_src)
3576 : {
3577 : /* Otherwise, see if we have a REG_EQUAL note of the form
3578 : (plus (...) CST). */
3579 0 : rtx links;
3580 0 : for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3581 : {
3582 : if ((REG_NOTE_KIND (links) == REG_EQUAL
3583 : || REG_NOTE_KIND (links) == REG_EQUIV)
3584 : && GET_CODE (XEXP (links, 0)) == PLUS
3585 : && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3586 : {
3587 : plus_cst_src = XEXP (links, 0);
3588 : break;
3589 : }
3590 : }
3591 : }
3592 : }
3593 :
3594 : /* Determine the effects of this insn on elimination offsets. */
3595 0 : elimination_effects (old_body, VOIDmode);
3596 :
3597 : /* Eliminate all eliminable registers occurring in operands that
3598 : can be handled by reload. */
3599 0 : extract_insn (insn);
3600 0 : int n_dups = recog_data.n_dups;
3601 0 : for (i = 0; i < n_dups; i++)
3602 0 : orig_dup[i] = *recog_data.dup_loc[i];
3603 :
3604 0 : int n_operands = recog_data.n_operands;
3605 0 : for (i = 0; i < n_operands; i++)
3606 : {
3607 0 : orig_operand[i] = recog_data.operand[i];
3608 :
3609 : /* For an asm statement, every operand is eliminable. */
3610 0 : if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3611 : {
3612 0 : bool is_set_src, in_plus;
3613 :
3614 : /* Check for setting a register that we know about. */
3615 0 : if (recog_data.operand_type[i] != OP_IN
3616 0 : && REG_P (orig_operand[i]))
3617 : {
3618 : /* If we are assigning to a register that can be eliminated, it
3619 : must be as part of a PARALLEL, since the code above handles
3620 : single SETs. We must indicate that we can no longer
3621 : eliminate this reg. */
3622 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3623 : ep++)
3624 0 : if (ep->from_rtx == orig_operand[i])
3625 0 : ep->can_eliminate = 0;
3626 : }
3627 :
3628 : /* Companion to the above plus substitution, we can allow
3629 : invariants as the source of a plain move. */
3630 0 : is_set_src = false;
3631 0 : if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3632 0 : is_set_src = true;
3633 0 : if (is_set_src && !sets_reg_p)
3634 0 : note_reg_elim_costly (SET_SRC (old_set), insn);
3635 0 : in_plus = false;
3636 0 : if (plus_src && sets_reg_p
3637 0 : && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3638 0 : || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3639 0 : in_plus = true;
3640 :
3641 0 : eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3642 : NULL_RTX,
3643 0 : is_set_src || in_plus, true);
3644 : /* Terminate the search in check_eliminable_occurrences at
3645 : this point. */
3646 0 : *recog_data.operand_loc[i] = 0;
3647 : }
3648 : }
3649 :
3650 0 : for (i = 0; i < n_dups; i++)
3651 0 : *recog_data.dup_loc[i]
3652 0 : = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3653 :
3654 : /* If any eliminable remain, they aren't eliminable anymore. */
3655 0 : check_eliminable_occurrences (old_body);
3656 :
3657 : /* Restore the old body. */
3658 0 : for (i = 0; i < n_operands; i++)
3659 0 : *recog_data.operand_loc[i] = orig_operand[i];
3660 0 : for (i = 0; i < n_dups; i++)
3661 0 : *recog_data.dup_loc[i] = orig_dup[i];
3662 :
3663 : /* Update all elimination pairs to reflect the status after the current
3664 : insn. The changes we make were determined by the earlier call to
3665 : elimination_effects. */
3666 :
3667 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3668 : {
3669 0 : if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3670 0 : ep->can_eliminate = 0;
3671 :
3672 0 : ep->ref_outside_mem = 0;
3673 : }
3674 :
3675 : return;
3676 : }
3677 : #pragma GCC diagnostic pop
3678 :
3679 : /* Loop through all elimination pairs.
3680 : Recalculate the number not at initial offset.
3681 :
3682 : Compute the maximum offset (minimum offset if the stack does not
3683 : grow downward) for each elimination pair. */
3684 :
3685 : static void
3686 0 : update_eliminable_offsets (void)
3687 : {
3688 0 : struct elim_table *ep;
3689 :
3690 0 : num_not_at_initial_offset = 0;
3691 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3692 : {
3693 0 : ep->previous_offset = ep->offset;
3694 0 : if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3695 0 : num_not_at_initial_offset++;
3696 : }
3697 0 : }
3698 :
3699 : /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3700 : replacement we currently believe is valid, mark it as not eliminable if X
3701 : modifies DEST in any way other than by adding a constant integer to it.
3702 :
3703 : If DEST is the frame pointer, we do nothing because we assume that
3704 : all assignments to the hard frame pointer are nonlocal gotos and are being
3705 : done at a time when they are valid and do not disturb anything else.
3706 : Some machines want to eliminate a fake argument pointer with either the
3707 : frame or stack pointer. Assignments to the hard frame pointer must not
3708 : prevent this elimination.
3709 :
3710 : Called via note_stores from reload before starting its passes to scan
3711 : the insns of the function. */
3712 :
3713 : static void
3714 0 : mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3715 : {
3716 0 : unsigned int i;
3717 :
3718 : /* A SUBREG of a hard register here is just changing its mode. We should
3719 : not see a SUBREG of an eliminable hard register, but check just in
3720 : case. */
3721 0 : if (GET_CODE (dest) == SUBREG)
3722 0 : dest = SUBREG_REG (dest);
3723 :
3724 0 : if (dest == hard_frame_pointer_rtx)
3725 : return;
3726 :
3727 0 : for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3728 0 : if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3729 0 : && (GET_CODE (x) != SET
3730 0 : || GET_CODE (SET_SRC (x)) != PLUS
3731 0 : || XEXP (SET_SRC (x), 0) != dest
3732 0 : || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3733 : {
3734 0 : reg_eliminate[i].can_eliminate_previous
3735 0 : = reg_eliminate[i].can_eliminate = 0;
3736 0 : num_eliminable--;
3737 : }
3738 : }
3739 :
3740 : /* Verify that the initial elimination offsets did not change since the
3741 : last call to set_initial_elim_offsets. This is used to catch cases
3742 : where something illegal happened during reload_as_needed that could
3743 : cause incorrect code to be generated if we did not check for it. */
3744 :
3745 : static bool
3746 0 : verify_initial_elim_offsets (void)
3747 : {
3748 0 : poly_int64 t;
3749 0 : struct elim_table *ep;
3750 :
3751 0 : if (!num_eliminable)
3752 : return true;
3753 :
3754 0 : targetm.compute_frame_layout ();
3755 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3756 : {
3757 0 : INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3758 0 : if (maybe_ne (t, ep->initial_offset))
3759 : return false;
3760 : }
3761 :
3762 : return true;
3763 : }
3764 :
3765 : /* Reset all offsets on eliminable registers to their initial values. */
3766 :
3767 : static void
3768 0 : set_initial_elim_offsets (void)
3769 : {
3770 0 : struct elim_table *ep = reg_eliminate;
3771 :
3772 0 : targetm.compute_frame_layout ();
3773 0 : for (; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3774 : {
3775 0 : INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3776 0 : ep->previous_offset = ep->offset = ep->initial_offset;
3777 : }
3778 :
3779 0 : num_not_at_initial_offset = 0;
3780 0 : }
3781 :
3782 : /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3783 :
3784 : static void
3785 0 : set_initial_eh_label_offset (rtx label)
3786 : {
3787 0 : set_label_offsets (label, NULL, 1);
3788 0 : }
3789 :
3790 : /* Initialize the known label offsets.
3791 : Set a known offset for each forced label to be at the initial offset
3792 : of each elimination. We do this because we assume that all
3793 : computed jumps occur from a location where each elimination is
3794 : at its initial offset.
3795 : For all other labels, show that we don't know the offsets. */
3796 :
3797 : static void
3798 0 : set_initial_label_offsets (void)
3799 : {
3800 0 : memset (offsets_known_at, 0, num_labels);
3801 :
3802 0 : unsigned int i;
3803 0 : rtx_insn *insn;
3804 0 : FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
3805 0 : set_label_offsets (insn, NULL, 1);
3806 :
3807 0 : for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3808 0 : if (x->insn ())
3809 0 : set_label_offsets (x->insn (), NULL, 1);
3810 :
3811 0 : for_each_eh_label (set_initial_eh_label_offset);
3812 0 : }
3813 :
3814 : /* Set all elimination offsets to the known values for the code label given
3815 : by INSN. */
3816 :
3817 : static void
3818 0 : set_offsets_for_label (rtx_insn *insn)
3819 : {
3820 0 : unsigned int i;
3821 0 : int label_nr = CODE_LABEL_NUMBER (insn);
3822 0 : struct elim_table *ep;
3823 :
3824 0 : num_not_at_initial_offset = 0;
3825 0 : for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3826 : {
3827 0 : ep->offset = ep->previous_offset
3828 0 : = offsets_at[label_nr - first_label_num][i];
3829 0 : if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3830 0 : num_not_at_initial_offset++;
3831 : }
3832 0 : }
3833 :
3834 : /* See if anything that happened changes which eliminations are valid.
3835 : For example, on the SPARC, whether or not the frame pointer can
3836 : be eliminated can depend on what registers have been used. We need
3837 : not check some conditions again (such as flag_omit_frame_pointer)
3838 : since they can't have changed. */
3839 :
3840 : static void
3841 0 : update_eliminables (HARD_REG_SET *pset)
3842 : {
3843 0 : int previous_frame_pointer_needed = frame_pointer_needed;
3844 0 : struct elim_table *ep;
3845 :
3846 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3847 0 : if ((ep->from == HARD_FRAME_POINTER_REGNUM
3848 0 : && targetm.frame_pointer_required ())
3849 0 : || ! targetm.can_eliminate (ep->from, ep->to)
3850 : )
3851 0 : ep->can_eliminate = 0;
3852 :
3853 : /* Look for the case where we have discovered that we can't replace
3854 : register A with register B and that means that we will now be
3855 : trying to replace register A with register C. This means we can
3856 : no longer replace register C with register B and we need to disable
3857 : such an elimination, if it exists. This occurs often with A == ap,
3858 : B == sp, and C == fp. */
3859 :
3860 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3861 : {
3862 0 : struct elim_table *op;
3863 0 : int new_to = -1;
3864 :
3865 0 : if (! ep->can_eliminate && ep->can_eliminate_previous)
3866 : {
3867 : /* Find the current elimination for ep->from, if there is a
3868 : new one. */
3869 0 : for (op = reg_eliminate;
3870 0 : op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
3871 0 : if (op->from == ep->from && op->can_eliminate)
3872 : {
3873 0 : new_to = op->to;
3874 0 : break;
3875 : }
3876 :
3877 : /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3878 : disable it. */
3879 0 : for (op = reg_eliminate;
3880 0 : op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
3881 0 : if (op->from == new_to && op->to == ep->to)
3882 0 : op->can_eliminate = 0;
3883 : }
3884 : }
3885 :
3886 : /* See if any registers that we thought we could eliminate the previous
3887 : time are no longer eliminable. If so, something has changed and we
3888 : must spill the register. Also, recompute the number of eliminable
3889 : registers and see if the frame pointer is needed; it is if there is
3890 : no elimination of the frame pointer that we can perform. */
3891 :
3892 0 : frame_pointer_needed = 1;
3893 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3894 : {
3895 0 : if (ep->can_eliminate
3896 0 : && ep->from == FRAME_POINTER_REGNUM
3897 0 : && ep->to != HARD_FRAME_POINTER_REGNUM
3898 0 : && (! SUPPORTS_STACK_ALIGNMENT
3899 0 : || ! crtl->stack_realign_needed))
3900 0 : frame_pointer_needed = 0;
3901 :
3902 0 : if (! ep->can_eliminate && ep->can_eliminate_previous)
3903 : {
3904 0 : ep->can_eliminate_previous = 0;
3905 0 : SET_HARD_REG_BIT (*pset, ep->from);
3906 0 : num_eliminable--;
3907 : }
3908 : }
3909 :
3910 : /* If we didn't need a frame pointer last time, but we do now, spill
3911 : the hard frame pointer. */
3912 0 : if (frame_pointer_needed && ! previous_frame_pointer_needed)
3913 0 : SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3914 0 : }
3915 :
3916 : /* Call update_eliminables an spill any registers we can't eliminate anymore.
3917 : Return true iff a register was spilled. */
3918 :
3919 : static bool
3920 0 : update_eliminables_and_spill (void)
3921 : {
3922 0 : int i;
3923 0 : bool did_spill = false;
3924 0 : HARD_REG_SET to_spill;
3925 0 : CLEAR_HARD_REG_SET (to_spill);
3926 0 : update_eliminables (&to_spill);
3927 0 : used_spill_regs &= ~to_spill;
3928 :
3929 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3930 0 : if (TEST_HARD_REG_BIT (to_spill, i))
3931 : {
3932 0 : spill_hard_reg (i, 1);
3933 0 : did_spill = true;
3934 :
3935 : /* Regardless of the state of spills, if we previously had
3936 : a register that we thought we could eliminate, but now
3937 : cannot eliminate, we must run another pass.
3938 :
3939 : Consider pseudos which have an entry in reg_equiv_* which
3940 : reference an eliminable register. We must make another pass
3941 : to update reg_equiv_* so that we do not substitute in the
3942 : old value from when we thought the elimination could be
3943 : performed. */
3944 : }
3945 0 : return did_spill;
3946 : }
3947 :
3948 : /* Return true if X is used as the target register of an elimination. */
3949 :
3950 : bool
3951 0 : elimination_target_reg_p (rtx x)
3952 : {
3953 0 : struct elim_table *ep;
3954 :
3955 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3956 0 : if (ep->to_rtx == x && ep->can_eliminate)
3957 : return true;
3958 :
3959 : return false;
3960 : }
3961 :
3962 : /* Initialize the table of registers to eliminate.
3963 : Pre-condition: global flag frame_pointer_needed has been set before
3964 : calling this function. */
3965 :
3966 : static void
3967 0 : init_elim_table (void)
3968 : {
3969 0 : struct elim_table *ep;
3970 0 : const struct elim_table_1 *ep1;
3971 :
3972 0 : if (!reg_eliminate)
3973 0 : reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
3974 :
3975 0 : num_eliminable = 0;
3976 :
3977 0 : for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3978 0 : ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3979 : {
3980 0 : ep->from = ep1->from;
3981 0 : ep->to = ep1->to;
3982 0 : ep->can_eliminate = ep->can_eliminate_previous
3983 0 : = (targetm.can_eliminate (ep->from, ep->to)
3984 0 : && ! (ep->to == STACK_POINTER_REGNUM
3985 0 : && frame_pointer_needed
3986 0 : && (! SUPPORTS_STACK_ALIGNMENT
3987 0 : || ! stack_realign_fp)));
3988 : }
3989 :
3990 : /* Count the number of eliminable registers and build the FROM and TO
3991 : REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3992 : gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3993 : We depend on this. */
3994 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3995 : {
3996 0 : num_eliminable += ep->can_eliminate;
3997 0 : ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3998 0 : ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3999 : }
4000 0 : }
4001 :
4002 : /* Find all the pseudo registers that didn't get hard regs
4003 : but do have known equivalent constants or memory slots.
4004 : These include parameters (known equivalent to parameter slots)
4005 : and cse'd or loop-moved constant memory addresses.
4006 :
4007 : Record constant equivalents in reg_equiv_constant
4008 : so they will be substituted by find_reloads.
4009 : Record memory equivalents in reg_mem_equiv so they can
4010 : be substituted eventually by altering the REG-rtx's. */
4011 :
4012 : static void
4013 0 : init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4014 : {
4015 0 : int i;
4016 0 : rtx_insn *insn;
4017 :
4018 0 : grow_reg_equivs ();
4019 0 : if (do_subregs)
4020 0 : reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno);
4021 : else
4022 0 : reg_max_ref_mode = NULL;
4023 :
4024 0 : num_eliminable_invariants = 0;
4025 :
4026 0 : first_label_num = get_first_label_num ();
4027 0 : num_labels = max_label_num () - first_label_num;
4028 :
4029 : /* Allocate the tables used to store offset information at labels. */
4030 0 : offsets_known_at = XNEWVEC (char, num_labels);
4031 0 : offsets_at = (poly_int64 (*)[NUM_ELIMINABLE_REGS])
4032 0 : xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (poly_int64));
4033 :
4034 : /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4035 : to. If DO_SUBREGS is true, also find all paradoxical subregs and
4036 : find largest such for each pseudo. FIRST is the head of the insn
4037 : list. */
4038 :
4039 0 : for (insn = first; insn; insn = NEXT_INSN (insn))
4040 : {
4041 0 : rtx set = single_set (insn);
4042 :
4043 : /* We may introduce USEs that we want to remove at the end, so
4044 : we'll mark them with QImode. Make sure there are no
4045 : previously-marked insns left by say regmove. */
4046 0 : if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4047 0 : && GET_MODE (insn) != VOIDmode)
4048 0 : PUT_MODE (insn, VOIDmode);
4049 :
4050 0 : if (do_subregs && NONDEBUG_INSN_P (insn))
4051 0 : scan_paradoxical_subregs (PATTERN (insn));
4052 :
4053 0 : if (set != 0 && REG_P (SET_DEST (set)))
4054 : {
4055 0 : rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4056 0 : rtx x;
4057 :
4058 0 : if (! note)
4059 0 : continue;
4060 :
4061 0 : i = REGNO (SET_DEST (set));
4062 0 : x = XEXP (note, 0);
4063 :
4064 0 : if (i <= LAST_VIRTUAL_REGISTER)
4065 0 : continue;
4066 :
4067 : /* If flag_pic and we have constant, verify it's legitimate. */
4068 0 : if (!CONSTANT_P (x)
4069 0 : || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4070 : {
4071 : /* It can happen that a REG_EQUIV note contains a MEM
4072 : that is not a legitimate memory operand. As later
4073 : stages of reload assume that all addresses found
4074 : in the reg_equiv_* arrays were originally legitimate,
4075 : we ignore such REG_EQUIV notes. */
4076 0 : if (memory_operand (x, VOIDmode))
4077 : {
4078 : /* Always unshare the equivalence, so we can
4079 : substitute into this insn without touching the
4080 : equivalence. */
4081 0 : reg_equiv_memory_loc (i) = copy_rtx (x);
4082 : }
4083 0 : else if (function_invariant_p (x))
4084 : {
4085 0 : machine_mode mode;
4086 :
4087 0 : mode = GET_MODE (SET_DEST (set));
4088 0 : if (GET_CODE (x) == PLUS)
4089 : {
4090 : /* This is PLUS of frame pointer and a constant,
4091 : and might be shared. Unshare it. */
4092 0 : reg_equiv_invariant (i) = copy_rtx (x);
4093 0 : num_eliminable_invariants++;
4094 : }
4095 0 : else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4096 : {
4097 0 : reg_equiv_invariant (i) = x;
4098 0 : num_eliminable_invariants++;
4099 : }
4100 0 : else if (targetm.legitimate_constant_p (mode, x))
4101 0 : reg_equiv_constant (i) = x;
4102 : else
4103 : {
4104 0 : reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4105 0 : if (! reg_equiv_memory_loc (i))
4106 0 : reg_equiv_init (i) = NULL;
4107 : }
4108 : }
4109 : else
4110 : {
4111 0 : reg_equiv_init (i) = NULL;
4112 0 : continue;
4113 : }
4114 : }
4115 : else
4116 0 : reg_equiv_init (i) = NULL;
4117 : }
4118 : }
4119 :
4120 0 : if (dump_file)
4121 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4122 0 : if (reg_equiv_init (i))
4123 : {
4124 0 : fprintf (dump_file, "init_insns for %u: ", i);
4125 0 : print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4126 0 : fprintf (dump_file, "\n");
4127 : }
4128 0 : }
4129 :
4130 : /* Indicate that we no longer have known memory locations or constants.
4131 : Free all data involved in tracking these. */
4132 :
4133 : static void
4134 0 : free_reg_equiv (void)
4135 : {
4136 0 : int i;
4137 :
4138 0 : free (offsets_known_at);
4139 0 : free (offsets_at);
4140 0 : offsets_at = 0;
4141 0 : offsets_known_at = 0;
4142 :
4143 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4144 0 : if (reg_equiv_alt_mem_list (i))
4145 0 : free_EXPR_LIST_list (®_equiv_alt_mem_list (i));
4146 0 : vec_free (reg_equivs);
4147 0 : }
4148 :
4149 : /* Kick all pseudos out of hard register REGNO.
4150 :
4151 : If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4152 : because we found we can't eliminate some register. In the case, no pseudos
4153 : are allowed to be in the register, even if they are only in a block that
4154 : doesn't require spill registers, unlike the case when we are spilling this
4155 : hard reg to produce another spill register.
4156 :
4157 : Return nonzero if any pseudos needed to be kicked out. */
4158 :
4159 : static void
4160 0 : spill_hard_reg (unsigned int regno, int cant_eliminate)
4161 : {
4162 0 : int i;
4163 :
4164 0 : if (cant_eliminate)
4165 : {
4166 0 : SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4167 0 : df_set_regs_ever_live (regno, true);
4168 : }
4169 :
4170 : /* Spill every pseudo reg that was allocated to this reg
4171 : or to something that overlaps this reg. */
4172 :
4173 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4174 0 : if (reg_renumber[i] >= 0
4175 0 : && (unsigned int) reg_renumber[i] <= regno
4176 0 : && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4177 0 : SET_REGNO_REG_SET (&spilled_pseudos, i);
4178 0 : }
4179 :
4180 : /* After spill_hard_reg was called and/or find_reload_regs was run for all
4181 : insns that need reloads, this function is used to actually spill pseudo
4182 : registers and try to reallocate them. It also sets up the spill_regs
4183 : array for use by choose_reload_regs.
4184 :
4185 : GLOBAL nonzero means we should attempt to reallocate any pseudo registers
4186 : that we displace from hard registers. */
4187 :
4188 : static int
4189 0 : finish_spills (int global)
4190 : {
4191 0 : class insn_chain *chain;
4192 0 : int something_changed = 0;
4193 0 : unsigned i;
4194 0 : reg_set_iterator rsi;
4195 :
4196 : /* Build the spill_regs array for the function. */
4197 : /* If there are some registers still to eliminate and one of the spill regs
4198 : wasn't ever used before, additional stack space may have to be
4199 : allocated to store this register. Thus, we may have changed the offset
4200 : between the stack and frame pointers, so mark that something has changed.
4201 :
4202 : One might think that we need only set VAL to 1 if this is a call-used
4203 : register. However, the set of registers that must be saved by the
4204 : prologue is not identical to the call-used set. For example, the
4205 : register used by the call insn for the return PC is a call-used register,
4206 : but must be saved by the prologue. */
4207 :
4208 0 : n_spills = 0;
4209 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4210 0 : if (TEST_HARD_REG_BIT (used_spill_regs, i))
4211 : {
4212 0 : spill_reg_order[i] = n_spills;
4213 0 : spill_regs[n_spills++] = i;
4214 0 : if (num_eliminable && ! df_regs_ever_live_p (i))
4215 : something_changed = 1;
4216 0 : df_set_regs_ever_live (i, true);
4217 : }
4218 : else
4219 0 : spill_reg_order[i] = -1;
4220 :
4221 0 : EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4222 0 : if (reg_renumber[i] >= 0)
4223 : {
4224 0 : SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4225 : /* Mark it as no longer having a hard register home. */
4226 0 : reg_renumber[i] = -1;
4227 0 : if (ira_conflicts_p)
4228 : /* Inform IRA about the change. */
4229 0 : ira_mark_allocation_change (i);
4230 : /* We will need to scan everything again. */
4231 : something_changed = 1;
4232 : }
4233 :
4234 : /* Retry global register allocation if possible. */
4235 0 : if (global && ira_conflicts_p)
4236 : {
4237 0 : unsigned int n;
4238 :
4239 0 : memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4240 : /* For every insn that needs reloads, set the registers used as spill
4241 : regs in pseudo_forbidden_regs for every pseudo live across the
4242 : insn. */
4243 0 : for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4244 : {
4245 0 : EXECUTE_IF_SET_IN_REG_SET
4246 : (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4247 : {
4248 0 : pseudo_forbidden_regs[i] |= chain->used_spill_regs;
4249 : }
4250 0 : EXECUTE_IF_SET_IN_REG_SET
4251 : (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4252 : {
4253 0 : pseudo_forbidden_regs[i] |= chain->used_spill_regs;
4254 : }
4255 : }
4256 :
4257 : /* Retry allocating the pseudos spilled in IRA and the
4258 : reload. For each reg, merge the various reg sets that
4259 : indicate which hard regs can't be used, and call
4260 : ira_reassign_pseudos. */
4261 0 : for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4262 0 : if (reg_old_renumber[i] != reg_renumber[i])
4263 : {
4264 0 : if (reg_renumber[i] < 0)
4265 0 : temp_pseudo_reg_arr[n++] = i;
4266 : else
4267 0 : CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4268 : }
4269 0 : if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4270 : bad_spill_regs_global,
4271 : pseudo_forbidden_regs, pseudo_previous_regs,
4272 : &spilled_pseudos))
4273 0 : something_changed = 1;
4274 : }
4275 : /* Fix up the register information in the insn chain.
4276 : This involves deleting those of the spilled pseudos which did not get
4277 : a new hard register home from the live_{before,after} sets. */
4278 0 : for (chain = reload_insn_chain; chain; chain = chain->next)
4279 : {
4280 0 : HARD_REG_SET used_by_pseudos;
4281 0 : HARD_REG_SET used_by_pseudos2;
4282 :
4283 0 : if (! ira_conflicts_p)
4284 : {
4285 : /* Don't do it for IRA because IRA and the reload still can
4286 : assign hard registers to the spilled pseudos on next
4287 : reload iterations. */
4288 0 : AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4289 0 : AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4290 : }
4291 : /* Mark any unallocated hard regs as available for spills. That
4292 : makes inheritance work somewhat better. */
4293 0 : if (chain->need_reload)
4294 : {
4295 0 : REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4296 0 : REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4297 0 : used_by_pseudos |= used_by_pseudos2;
4298 :
4299 0 : compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4300 0 : compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4301 : /* Value of chain->used_spill_regs from previous iteration
4302 : may be not included in the value calculated here because
4303 : of possible removing caller-saves insns (see function
4304 : delete_caller_save_insns. */
4305 0 : chain->used_spill_regs = ~used_by_pseudos & used_spill_regs;
4306 : }
4307 : }
4308 :
4309 0 : CLEAR_REG_SET (&changed_allocation_pseudos);
4310 : /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4311 0 : for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4312 : {
4313 0 : int regno = reg_renumber[i];
4314 0 : if (reg_old_renumber[i] == regno)
4315 0 : continue;
4316 :
4317 0 : SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4318 :
4319 0 : alter_reg (i, reg_old_renumber[i], false);
4320 0 : reg_old_renumber[i] = regno;
4321 0 : if (dump_file)
4322 : {
4323 0 : if (regno == -1)
4324 0 : fprintf (dump_file, " Register %d now on stack.\n\n", i);
4325 : else
4326 0 : fprintf (dump_file, " Register %d now in %d.\n\n",
4327 0 : i, reg_renumber[i]);
4328 : }
4329 : }
4330 :
4331 0 : return something_changed;
4332 : }
4333 :
4334 : /* Find all paradoxical subregs within X and update reg_max_ref_mode. */
4335 :
4336 : static void
4337 0 : scan_paradoxical_subregs (rtx x)
4338 : {
4339 0 : int i;
4340 0 : const char *fmt;
4341 0 : enum rtx_code code = GET_CODE (x);
4342 :
4343 0 : switch (code)
4344 : {
4345 : case REG:
4346 : case CONST:
4347 : case SYMBOL_REF:
4348 : case LABEL_REF:
4349 : CASE_CONST_ANY:
4350 : case PC:
4351 : case USE:
4352 : case CLOBBER:
4353 : return;
4354 :
4355 0 : case SUBREG:
4356 0 : if (REG_P (SUBREG_REG (x)))
4357 : {
4358 0 : unsigned int regno = REGNO (SUBREG_REG (x));
4359 0 : if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)))
4360 : {
4361 0 : reg_max_ref_mode[regno] = GET_MODE (x);
4362 0 : mark_home_live_1 (regno, GET_MODE (x));
4363 : }
4364 : }
4365 : return;
4366 :
4367 0 : default:
4368 0 : break;
4369 : }
4370 :
4371 0 : fmt = GET_RTX_FORMAT (code);
4372 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4373 : {
4374 0 : if (fmt[i] == 'e')
4375 0 : scan_paradoxical_subregs (XEXP (x, i));
4376 0 : else if (fmt[i] == 'E')
4377 : {
4378 0 : int j;
4379 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4380 0 : scan_paradoxical_subregs (XVECEXP (x, i, j));
4381 : }
4382 : }
4383 : }
4384 :
4385 : /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4386 : If *OP_PTR is a paradoxical subreg, try to remove that subreg
4387 : and apply the corresponding narrowing subreg to *OTHER_PTR.
4388 : Return true if the operands were changed, false otherwise. */
4389 :
4390 : static bool
4391 0 : strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4392 : {
4393 0 : rtx op, inner, other, tem;
4394 :
4395 0 : op = *op_ptr;
4396 0 : if (!paradoxical_subreg_p (op))
4397 : return false;
4398 0 : inner = SUBREG_REG (op);
4399 :
4400 0 : other = *other_ptr;
4401 0 : tem = gen_lowpart_common (GET_MODE (inner), other);
4402 0 : if (!tem)
4403 : return false;
4404 :
4405 : /* If the lowpart operation turned a hard register into a subreg,
4406 : rather than simplifying it to another hard register, then the
4407 : mode change cannot be properly represented. For example, OTHER
4408 : might be valid in its current mode, but not in the new one. */
4409 0 : if (GET_CODE (tem) == SUBREG
4410 0 : && REG_P (other)
4411 0 : && HARD_REGISTER_P (other))
4412 : return false;
4413 :
4414 0 : *op_ptr = inner;
4415 0 : *other_ptr = tem;
4416 0 : return true;
4417 : }
4418 :
4419 : /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4420 : examine all of the reload insns between PREV and NEXT exclusive, and
4421 : annotate all that may trap. */
4422 :
4423 : static void
4424 0 : fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4425 : {
4426 0 : rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4427 0 : if (note == NULL)
4428 : return;
4429 0 : if (!insn_could_throw_p (insn))
4430 0 : remove_note (insn, note);
4431 0 : copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4432 : }
4433 :
4434 : /* Reload pseudo-registers into hard regs around each insn as needed.
4435 : Additional register load insns are output before the insn that needs it
4436 : and perhaps store insns after insns that modify the reloaded pseudo reg.
4437 :
4438 : reg_last_reload_reg and reg_reloaded_contents keep track of
4439 : which registers are already available in reload registers.
4440 : We update these for the reloads that we perform,
4441 : as the insns are scanned. */
4442 :
4443 : static void
4444 0 : reload_as_needed (int live_known)
4445 : {
4446 0 : class insn_chain *chain;
4447 : #if AUTO_INC_DEC
4448 : int i;
4449 : #endif
4450 0 : rtx_note *marker;
4451 :
4452 0 : memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4453 0 : memset (spill_reg_store, 0, sizeof spill_reg_store);
4454 0 : reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4455 0 : INIT_REG_SET (®_has_output_reload);
4456 0 : CLEAR_HARD_REG_SET (reg_reloaded_valid);
4457 :
4458 0 : set_initial_elim_offsets ();
4459 :
4460 : /* Generate a marker insn that we will move around. */
4461 0 : marker = emit_note (NOTE_INSN_DELETED);
4462 0 : unlink_insn_chain (marker, marker);
4463 :
4464 0 : for (chain = reload_insn_chain; chain; chain = chain->next)
4465 : {
4466 0 : rtx_insn *prev = 0;
4467 0 : rtx_insn *insn = chain->insn;
4468 0 : rtx_insn *old_next = NEXT_INSN (insn);
4469 : #if AUTO_INC_DEC
4470 : rtx_insn *old_prev = PREV_INSN (insn);
4471 : #endif
4472 :
4473 0 : if (will_delete_init_insn_p (insn))
4474 0 : continue;
4475 :
4476 : /* If we pass a label, copy the offsets from the label information
4477 : into the current offsets of each elimination. */
4478 0 : if (LABEL_P (insn))
4479 0 : set_offsets_for_label (insn);
4480 :
4481 0 : else if (INSN_P (insn))
4482 : {
4483 0 : regset_head regs_to_forget;
4484 0 : INIT_REG_SET (®s_to_forget);
4485 0 : note_stores (insn, forget_old_reloads_1, ®s_to_forget);
4486 :
4487 : /* If this is a USE and CLOBBER of a MEM, ensure that any
4488 : references to eliminable registers have been removed. */
4489 :
4490 0 : if ((GET_CODE (PATTERN (insn)) == USE
4491 0 : || GET_CODE (PATTERN (insn)) == CLOBBER)
4492 0 : && MEM_P (XEXP (PATTERN (insn), 0)))
4493 0 : XEXP (XEXP (PATTERN (insn), 0), 0)
4494 0 : = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4495 0 : GET_MODE (XEXP (PATTERN (insn), 0)),
4496 : NULL_RTX);
4497 :
4498 : /* If we need to do register elimination processing, do so.
4499 : This might delete the insn, in which case we are done. */
4500 0 : if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4501 : {
4502 0 : eliminate_regs_in_insn (insn, 1);
4503 0 : if (NOTE_P (insn))
4504 : {
4505 0 : update_eliminable_offsets ();
4506 0 : CLEAR_REG_SET (®s_to_forget);
4507 0 : continue;
4508 : }
4509 : }
4510 :
4511 : /* If need_elim is nonzero but need_reload is zero, one might think
4512 : that we could simply set n_reloads to 0. However, find_reloads
4513 : could have done some manipulation of the insn (such as swapping
4514 : commutative operands), and these manipulations are lost during
4515 : the first pass for every insn that needs register elimination.
4516 : So the actions of find_reloads must be redone here. */
4517 :
4518 0 : if (! chain->need_elim && ! chain->need_reload
4519 0 : && ! chain->need_operand_change)
4520 0 : n_reloads = 0;
4521 : /* First find the pseudo regs that must be reloaded for this insn.
4522 : This info is returned in the tables reload_... (see reload.h).
4523 : Also modify the body of INSN by substituting RELOAD
4524 : rtx's for those pseudo regs. */
4525 : else
4526 : {
4527 0 : CLEAR_REG_SET (®_has_output_reload);
4528 0 : CLEAR_HARD_REG_SET (reg_is_output_reload);
4529 :
4530 0 : find_reloads (insn, 1, spill_indirect_levels, live_known,
4531 : spill_reg_order);
4532 : }
4533 :
4534 0 : if (n_reloads > 0)
4535 : {
4536 0 : rtx_insn *next = NEXT_INSN (insn);
4537 :
4538 : /* ??? PREV can get deleted by reload inheritance.
4539 : Work around this by emitting a marker note. */
4540 0 : prev = PREV_INSN (insn);
4541 0 : reorder_insns_nobb (marker, marker, prev);
4542 :
4543 : /* Now compute which reload regs to reload them into. Perhaps
4544 : reusing reload regs from previous insns, or else output
4545 : load insns to reload them. Maybe output store insns too.
4546 : Record the choices of reload reg in reload_reg_rtx. */
4547 0 : choose_reload_regs (chain);
4548 :
4549 : /* Generate the insns to reload operands into or out of
4550 : their reload regs. */
4551 0 : emit_reload_insns (chain);
4552 :
4553 : /* Substitute the chosen reload regs from reload_reg_rtx
4554 : into the insn's body (or perhaps into the bodies of other
4555 : load and store insn that we just made for reloading
4556 : and that we moved the structure into). */
4557 0 : subst_reloads (insn);
4558 :
4559 0 : prev = PREV_INSN (marker);
4560 0 : unlink_insn_chain (marker, marker);
4561 :
4562 : /* Adjust the exception region notes for loads and stores. */
4563 0 : if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4564 0 : fixup_eh_region_note (insn, prev, next);
4565 :
4566 : /* Adjust the location of REG_ARGS_SIZE. */
4567 0 : rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4568 0 : if (p)
4569 : {
4570 0 : remove_note (insn, p);
4571 0 : fixup_args_size_notes (prev, PREV_INSN (next),
4572 : get_args_size (p));
4573 : }
4574 :
4575 : /* If this was an ASM, make sure that all the reload insns
4576 : we have generated are valid. If not, give an error
4577 : and delete them. */
4578 0 : if (asm_noperands (PATTERN (insn)) >= 0)
4579 0 : for (rtx_insn *p = NEXT_INSN (prev);
4580 0 : p != next;
4581 0 : p = NEXT_INSN (p))
4582 0 : if (p != insn && INSN_P (p)
4583 0 : && GET_CODE (PATTERN (p)) != USE
4584 0 : && (recog_memoized (p) < 0
4585 0 : || (extract_insn (p),
4586 0 : !(constrain_operands (1,
4587 : get_enabled_alternatives (p))))))
4588 : {
4589 0 : error_for_asm (insn,
4590 : "%<asm%> operand requires "
4591 : "impossible reload");
4592 0 : delete_insn (p);
4593 : }
4594 : }
4595 :
4596 0 : if (num_eliminable && chain->need_elim)
4597 0 : update_eliminable_offsets ();
4598 :
4599 : /* Any previously reloaded spilled pseudo reg, stored in this insn,
4600 : is no longer validly lying around to save a future reload.
4601 : Note that this does not detect pseudos that were reloaded
4602 : for this insn in order to be stored in
4603 : (obeying register constraints). That is correct; such reload
4604 : registers ARE still valid. */
4605 0 : forget_marked_reloads (®s_to_forget);
4606 0 : CLEAR_REG_SET (®s_to_forget);
4607 :
4608 : /* There may have been CLOBBER insns placed after INSN. So scan
4609 : between INSN and NEXT and use them to forget old reloads. */
4610 0 : for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4611 0 : if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4612 0 : note_stores (x, forget_old_reloads_1, NULL);
4613 :
4614 : #if AUTO_INC_DEC
4615 : /* Likewise for regs altered by auto-increment in this insn.
4616 : REG_INC notes have been changed by reloading:
4617 : find_reloads_address_1 records substitutions for them,
4618 : which have been performed by subst_reloads above. */
4619 : for (i = n_reloads - 1; i >= 0; i--)
4620 : {
4621 : rtx in_reg = rld[i].in_reg;
4622 : if (in_reg)
4623 : {
4624 : enum rtx_code code = GET_CODE (in_reg);
4625 : /* PRE_INC / PRE_DEC will have the reload register ending up
4626 : with the same value as the stack slot, but that doesn't
4627 : hold true for POST_INC / POST_DEC. Either we have to
4628 : convert the memory access to a true POST_INC / POST_DEC,
4629 : or we can't use the reload register for inheritance. */
4630 : if ((code == POST_INC || code == POST_DEC)
4631 : && TEST_HARD_REG_BIT (reg_reloaded_valid,
4632 : REGNO (rld[i].reg_rtx))
4633 : /* Make sure it is the inc/dec pseudo, and not
4634 : some other (e.g. output operand) pseudo. */
4635 : && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4636 : == REGNO (XEXP (in_reg, 0))))
4637 :
4638 : {
4639 : rtx reload_reg = rld[i].reg_rtx;
4640 : machine_mode mode = GET_MODE (reload_reg);
4641 : int n = 0;
4642 : rtx_insn *p;
4643 :
4644 : for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4645 : {
4646 : /* We really want to ignore REG_INC notes here, so
4647 : use PATTERN (p) as argument to reg_set_p . */
4648 : if (reg_set_p (reload_reg, PATTERN (p)))
4649 : break;
4650 : n = count_occurrences (PATTERN (p), reload_reg, 0);
4651 : if (! n)
4652 : continue;
4653 : if (n == 1)
4654 : {
4655 : rtx replace_reg
4656 : = gen_rtx_fmt_e (code, mode, reload_reg);
4657 :
4658 : validate_replace_rtx_group (reload_reg,
4659 : replace_reg, p);
4660 : n = verify_changes (0);
4661 :
4662 : /* We must also verify that the constraints
4663 : are met after the replacement. Make sure
4664 : extract_insn is only called for an insn
4665 : where the replacements were found to be
4666 : valid so far. */
4667 : if (n)
4668 : {
4669 : extract_insn (p);
4670 : n = constrain_operands (1,
4671 : get_enabled_alternatives (p));
4672 : }
4673 :
4674 : /* If the constraints were not met, then
4675 : undo the replacement, else confirm it. */
4676 : if (!n)
4677 : cancel_changes (0);
4678 : else
4679 : confirm_change_group ();
4680 : }
4681 : break;
4682 : }
4683 : if (n == 1)
4684 : {
4685 : add_reg_note (p, REG_INC, reload_reg);
4686 : /* Mark this as having an output reload so that the
4687 : REG_INC processing code below won't invalidate
4688 : the reload for inheritance. */
4689 : SET_HARD_REG_BIT (reg_is_output_reload,
4690 : REGNO (reload_reg));
4691 : SET_REGNO_REG_SET (®_has_output_reload,
4692 : REGNO (XEXP (in_reg, 0)));
4693 : }
4694 : else
4695 : forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4696 : NULL);
4697 : }
4698 : else if ((code == PRE_INC || code == PRE_DEC)
4699 : && TEST_HARD_REG_BIT (reg_reloaded_valid,
4700 : REGNO (rld[i].reg_rtx))
4701 : /* Make sure it is the inc/dec pseudo, and not
4702 : some other (e.g. output operand) pseudo. */
4703 : && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4704 : == REGNO (XEXP (in_reg, 0))))
4705 : {
4706 : SET_HARD_REG_BIT (reg_is_output_reload,
4707 : REGNO (rld[i].reg_rtx));
4708 : SET_REGNO_REG_SET (®_has_output_reload,
4709 : REGNO (XEXP (in_reg, 0)));
4710 : }
4711 : else if (code == PRE_INC || code == PRE_DEC
4712 : || code == POST_INC || code == POST_DEC)
4713 : {
4714 : int in_regno = REGNO (XEXP (in_reg, 0));
4715 :
4716 : if (reg_last_reload_reg[in_regno] != NULL_RTX)
4717 : {
4718 : int in_hard_regno;
4719 : bool forget_p = true;
4720 :
4721 : in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4722 : if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4723 : in_hard_regno))
4724 : {
4725 : for (rtx_insn *x = (old_prev ?
4726 : NEXT_INSN (old_prev) : insn);
4727 : x != old_next;
4728 : x = NEXT_INSN (x))
4729 : if (x == reg_reloaded_insn[in_hard_regno])
4730 : {
4731 : forget_p = false;
4732 : break;
4733 : }
4734 : }
4735 : /* If for some reasons, we didn't set up
4736 : reg_last_reload_reg in this insn,
4737 : invalidate inheritance from previous
4738 : insns for the incremented/decremented
4739 : register. Such registers will be not in
4740 : reg_has_output_reload. Invalidate it
4741 : also if the corresponding element in
4742 : reg_reloaded_insn is also
4743 : invalidated. */
4744 : if (forget_p)
4745 : forget_old_reloads_1 (XEXP (in_reg, 0),
4746 : NULL_RTX, NULL);
4747 : }
4748 : }
4749 : }
4750 : }
4751 : /* If a pseudo that got a hard register is auto-incremented,
4752 : we must purge records of copying it into pseudos without
4753 : hard registers. */
4754 : for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4755 : if (REG_NOTE_KIND (x) == REG_INC)
4756 : {
4757 : /* See if this pseudo reg was reloaded in this insn.
4758 : If so, its last-reload info is still valid
4759 : because it is based on this insn's reload. */
4760 : for (i = 0; i < n_reloads; i++)
4761 : if (rld[i].out == XEXP (x, 0))
4762 : break;
4763 :
4764 : if (i == n_reloads)
4765 : forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4766 : }
4767 : #endif
4768 : }
4769 : /* A reload reg's contents are unknown after a label. */
4770 0 : if (LABEL_P (insn))
4771 0 : CLEAR_HARD_REG_SET (reg_reloaded_valid);
4772 :
4773 : /* Don't assume a reload reg is still good after a call insn
4774 : if it is a call-used reg, or if it contains a value that will
4775 : be partially clobbered by the call. */
4776 0 : else if (CALL_P (insn))
4777 : {
4778 0 : reg_reloaded_valid
4779 0 : &= ~insn_callee_abi (insn).full_and_partial_reg_clobbers ();
4780 :
4781 : /* If this is a call to a setjmp-type function, we must not
4782 : reuse any reload reg contents across the call; that will
4783 : just be clobbered by other uses of the register in later
4784 : code, before the longjmp. */
4785 0 : if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4786 0 : CLEAR_HARD_REG_SET (reg_reloaded_valid);
4787 : }
4788 : }
4789 :
4790 : /* Clean up. */
4791 0 : free (reg_last_reload_reg);
4792 0 : CLEAR_REG_SET (®_has_output_reload);
4793 0 : }
4794 :
4795 : /* Discard all record of any value reloaded from X,
4796 : or reloaded in X from someplace else;
4797 : unless X is an output reload reg of the current insn.
4798 :
4799 : X may be a hard reg (the reload reg)
4800 : or it may be a pseudo reg that was reloaded from.
4801 :
4802 : When DATA is non-NULL just mark the registers in regset
4803 : to be forgotten later. */
4804 :
4805 : static void
4806 0 : forget_old_reloads_1 (rtx x, const_rtx, void *data)
4807 : {
4808 0 : unsigned int regno;
4809 0 : unsigned int nr;
4810 0 : regset regs = (regset) data;
4811 :
4812 : /* note_stores does give us subregs of hard regs,
4813 : subreg_regno_offset requires a hard reg. */
4814 0 : while (GET_CODE (x) == SUBREG)
4815 : {
4816 : /* We ignore the subreg offset when calculating the regno,
4817 : because we are using the entire underlying hard register
4818 : below. */
4819 0 : x = SUBREG_REG (x);
4820 : }
4821 :
4822 0 : if (!REG_P (x))
4823 : return;
4824 :
4825 0 : regno = REGNO (x);
4826 :
4827 0 : if (regno >= FIRST_PSEUDO_REGISTER)
4828 : nr = 1;
4829 : else
4830 : {
4831 0 : unsigned int i;
4832 :
4833 0 : nr = REG_NREGS (x);
4834 : /* Storing into a spilled-reg invalidates its contents.
4835 : This can happen if a block-local pseudo is allocated to that reg
4836 : and it wasn't spilled because this block's total need is 0.
4837 : Then some insn might have an optional reload and use this reg. */
4838 0 : if (!regs)
4839 0 : for (i = 0; i < nr; i++)
4840 : /* But don't do this if the reg actually serves as an output
4841 : reload reg in the current instruction. */
4842 0 : if (n_reloads == 0
4843 0 : || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4844 : {
4845 0 : CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4846 0 : spill_reg_store[regno + i] = 0;
4847 : }
4848 : }
4849 :
4850 0 : if (regs)
4851 0 : while (nr-- > 0)
4852 0 : SET_REGNO_REG_SET (regs, regno + nr);
4853 : else
4854 : {
4855 : /* Since value of X has changed,
4856 : forget any value previously copied from it. */
4857 :
4858 0 : while (nr-- > 0)
4859 : /* But don't forget a copy if this is the output reload
4860 : that establishes the copy's validity. */
4861 0 : if (n_reloads == 0
4862 0 : || !REGNO_REG_SET_P (®_has_output_reload, regno + nr))
4863 0 : reg_last_reload_reg[regno + nr] = 0;
4864 : }
4865 : }
4866 :
4867 : /* Forget the reloads marked in regset by previous function. */
4868 : static void
4869 0 : forget_marked_reloads (regset regs)
4870 : {
4871 0 : unsigned int reg;
4872 0 : reg_set_iterator rsi;
4873 0 : EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4874 : {
4875 0 : if (reg < FIRST_PSEUDO_REGISTER
4876 : /* But don't do this if the reg actually serves as an output
4877 : reload reg in the current instruction. */
4878 0 : && (n_reloads == 0
4879 0 : || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4880 : {
4881 0 : CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4882 0 : spill_reg_store[reg] = 0;
4883 : }
4884 0 : if (n_reloads == 0
4885 0 : || !REGNO_REG_SET_P (®_has_output_reload, reg))
4886 0 : reg_last_reload_reg[reg] = 0;
4887 : }
4888 0 : }
4889 :
4890 : /* The following HARD_REG_SETs indicate when each hard register is
4891 : used for a reload of various parts of the current insn. */
4892 :
4893 : /* If reg is unavailable for all reloads. */
4894 : static HARD_REG_SET reload_reg_unavailable;
4895 : /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4896 : static HARD_REG_SET reload_reg_used;
4897 : /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4898 : static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4899 : /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4900 : static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4901 : /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4902 : static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4903 : /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4904 : static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4905 : /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4906 : static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4907 : /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4908 : static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4909 : /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4910 : static HARD_REG_SET reload_reg_used_in_op_addr;
4911 : /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4912 : static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4913 : /* If reg is in use for a RELOAD_FOR_INSN reload. */
4914 : static HARD_REG_SET reload_reg_used_in_insn;
4915 : /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4916 : static HARD_REG_SET reload_reg_used_in_other_addr;
4917 :
4918 : /* If reg is in use as a reload reg for any sort of reload. */
4919 : static HARD_REG_SET reload_reg_used_at_all;
4920 :
4921 : /* If reg is use as an inherited reload. We just mark the first register
4922 : in the group. */
4923 : static HARD_REG_SET reload_reg_used_for_inherit;
4924 :
4925 : /* Records which hard regs are used in any way, either as explicit use or
4926 : by being allocated to a pseudo during any point of the current insn. */
4927 : static HARD_REG_SET reg_used_in_insn;
4928 :
4929 : /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4930 : TYPE. MODE is used to indicate how many consecutive regs are
4931 : actually used. */
4932 :
4933 : static void
4934 0 : mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4935 : machine_mode mode)
4936 : {
4937 0 : switch (type)
4938 : {
4939 0 : case RELOAD_OTHER:
4940 0 : add_to_hard_reg_set (&reload_reg_used, mode, regno);
4941 0 : break;
4942 :
4943 0 : case RELOAD_FOR_INPUT_ADDRESS:
4944 0 : add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
4945 0 : break;
4946 :
4947 0 : case RELOAD_FOR_INPADDR_ADDRESS:
4948 0 : add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
4949 0 : break;
4950 :
4951 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
4952 0 : add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
4953 0 : break;
4954 :
4955 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
4956 0 : add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
4957 0 : break;
4958 :
4959 0 : case RELOAD_FOR_OPERAND_ADDRESS:
4960 0 : add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
4961 0 : break;
4962 :
4963 0 : case RELOAD_FOR_OPADDR_ADDR:
4964 0 : add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
4965 0 : break;
4966 :
4967 0 : case RELOAD_FOR_OTHER_ADDRESS:
4968 0 : add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
4969 0 : break;
4970 :
4971 0 : case RELOAD_FOR_INPUT:
4972 0 : add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
4973 0 : break;
4974 :
4975 0 : case RELOAD_FOR_OUTPUT:
4976 0 : add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
4977 0 : break;
4978 :
4979 0 : case RELOAD_FOR_INSN:
4980 0 : add_to_hard_reg_set (&reload_reg_used_in_insn, mode, regno);
4981 0 : break;
4982 : }
4983 :
4984 0 : add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
4985 0 : }
4986 :
4987 : /* Similarly, but show REGNO is no longer in use for a reload. */
4988 :
4989 : static void
4990 0 : clear_reload_reg_in_use (unsigned int regno, int opnum,
4991 : enum reload_type type, machine_mode mode)
4992 : {
4993 0 : unsigned int nregs = hard_regno_nregs (regno, mode);
4994 0 : unsigned int start_regno, end_regno, r;
4995 0 : int i;
4996 : /* A complication is that for some reload types, inheritance might
4997 : allow multiple reloads of the same types to share a reload register.
4998 : We set check_opnum if we have to check only reloads with the same
4999 : operand number, and check_any if we have to check all reloads. */
5000 0 : int check_opnum = 0;
5001 0 : int check_any = 0;
5002 0 : HARD_REG_SET *used_in_set;
5003 :
5004 0 : switch (type)
5005 : {
5006 : case RELOAD_OTHER:
5007 : used_in_set = &reload_reg_used;
5008 : break;
5009 :
5010 0 : case RELOAD_FOR_INPUT_ADDRESS:
5011 0 : used_in_set = &reload_reg_used_in_input_addr[opnum];
5012 0 : break;
5013 :
5014 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5015 0 : check_opnum = 1;
5016 0 : used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5017 0 : break;
5018 :
5019 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5020 0 : used_in_set = &reload_reg_used_in_output_addr[opnum];
5021 0 : break;
5022 :
5023 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5024 0 : check_opnum = 1;
5025 0 : used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5026 0 : break;
5027 :
5028 0 : case RELOAD_FOR_OPERAND_ADDRESS:
5029 0 : used_in_set = &reload_reg_used_in_op_addr;
5030 0 : break;
5031 :
5032 0 : case RELOAD_FOR_OPADDR_ADDR:
5033 0 : check_any = 1;
5034 0 : used_in_set = &reload_reg_used_in_op_addr_reload;
5035 0 : break;
5036 :
5037 0 : case RELOAD_FOR_OTHER_ADDRESS:
5038 0 : used_in_set = &reload_reg_used_in_other_addr;
5039 0 : check_any = 1;
5040 0 : break;
5041 :
5042 0 : case RELOAD_FOR_INPUT:
5043 0 : used_in_set = &reload_reg_used_in_input[opnum];
5044 0 : break;
5045 :
5046 0 : case RELOAD_FOR_OUTPUT:
5047 0 : used_in_set = &reload_reg_used_in_output[opnum];
5048 0 : break;
5049 :
5050 0 : case RELOAD_FOR_INSN:
5051 0 : used_in_set = &reload_reg_used_in_insn;
5052 0 : break;
5053 0 : default:
5054 0 : gcc_unreachable ();
5055 : }
5056 : /* We resolve conflicts with remaining reloads of the same type by
5057 : excluding the intervals of reload registers by them from the
5058 : interval of freed reload registers. Since we only keep track of
5059 : one set of interval bounds, we might have to exclude somewhat
5060 : more than what would be necessary if we used a HARD_REG_SET here.
5061 : But this should only happen very infrequently, so there should
5062 : be no reason to worry about it. */
5063 :
5064 0 : start_regno = regno;
5065 0 : end_regno = regno + nregs;
5066 0 : if (check_opnum || check_any)
5067 : {
5068 0 : for (i = n_reloads - 1; i >= 0; i--)
5069 : {
5070 0 : if (rld[i].when_needed == type
5071 0 : && (check_any || rld[i].opnum == opnum)
5072 0 : && rld[i].reg_rtx)
5073 : {
5074 0 : unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5075 0 : unsigned int conflict_end
5076 0 : = end_hard_regno (rld[i].mode, conflict_start);
5077 :
5078 : /* If there is an overlap with the first to-be-freed register,
5079 : adjust the interval start. */
5080 0 : if (conflict_start <= start_regno && conflict_end > start_regno)
5081 0 : start_regno = conflict_end;
5082 : /* Otherwise, if there is a conflict with one of the other
5083 : to-be-freed registers, adjust the interval end. */
5084 0 : if (conflict_start > start_regno && conflict_start < end_regno)
5085 0 : end_regno = conflict_start;
5086 : }
5087 : }
5088 : }
5089 :
5090 0 : for (r = start_regno; r < end_regno; r++)
5091 0 : CLEAR_HARD_REG_BIT (*used_in_set, r);
5092 0 : }
5093 :
5094 : /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5095 : specified by OPNUM and TYPE. */
5096 :
5097 : static int
5098 0 : reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5099 : {
5100 0 : int i;
5101 :
5102 : /* In use for a RELOAD_OTHER means it's not available for anything. */
5103 0 : if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5104 0 : || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5105 : return 0;
5106 :
5107 0 : switch (type)
5108 : {
5109 0 : case RELOAD_OTHER:
5110 : /* In use for anything means we can't use it for RELOAD_OTHER. */
5111 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5112 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5113 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5114 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5115 : return 0;
5116 :
5117 0 : for (i = 0; i < reload_n_operands; i++)
5118 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5119 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5120 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5121 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5122 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5123 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5124 : return 0;
5125 :
5126 : return 1;
5127 :
5128 0 : case RELOAD_FOR_INPUT:
5129 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5130 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5131 : return 0;
5132 :
5133 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5134 : return 0;
5135 :
5136 : /* If it is used for some other input, can't use it. */
5137 0 : for (i = 0; i < reload_n_operands; i++)
5138 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5139 : return 0;
5140 :
5141 : /* If it is used in a later operand's address, can't use it. */
5142 0 : for (i = opnum + 1; i < reload_n_operands; i++)
5143 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5144 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5145 : return 0;
5146 :
5147 : return 1;
5148 :
5149 0 : case RELOAD_FOR_INPUT_ADDRESS:
5150 : /* Can't use a register if it is used for an input address for this
5151 : operand or used as an input in an earlier one. */
5152 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5153 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5154 : return 0;
5155 :
5156 0 : for (i = 0; i < opnum; i++)
5157 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5158 : return 0;
5159 :
5160 : return 1;
5161 :
5162 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5163 : /* Can't use a register if it is used for an input address
5164 : for this operand or used as an input in an earlier
5165 : one. */
5166 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5167 : return 0;
5168 :
5169 0 : for (i = 0; i < opnum; i++)
5170 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5171 : return 0;
5172 :
5173 : return 1;
5174 :
5175 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5176 : /* Can't use a register if it is used for an output address for this
5177 : operand or used as an output in this or a later operand. Note
5178 : that multiple output operands are emitted in reverse order, so
5179 : the conflicting ones are those with lower indices. */
5180 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5181 : return 0;
5182 :
5183 0 : for (i = 0; i <= opnum; i++)
5184 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5185 : return 0;
5186 :
5187 : return 1;
5188 :
5189 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5190 : /* Can't use a register if it is used for an output address
5191 : for this operand or used as an output in this or a
5192 : later operand. Note that multiple output operands are
5193 : emitted in reverse order, so the conflicting ones are
5194 : those with lower indices. */
5195 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5196 : return 0;
5197 :
5198 0 : for (i = 0; i <= opnum; i++)
5199 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5200 : return 0;
5201 :
5202 : return 1;
5203 :
5204 : case RELOAD_FOR_OPERAND_ADDRESS:
5205 0 : for (i = 0; i < reload_n_operands; i++)
5206 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5207 : return 0;
5208 :
5209 0 : return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5210 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5211 :
5212 : case RELOAD_FOR_OPADDR_ADDR:
5213 0 : for (i = 0; i < reload_n_operands; i++)
5214 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5215 : return 0;
5216 :
5217 0 : return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5218 :
5219 0 : case RELOAD_FOR_OUTPUT:
5220 : /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5221 : outputs, or an operand address for this or an earlier output.
5222 : Note that multiple output operands are emitted in reverse order,
5223 : so the conflicting ones are those with higher indices. */
5224 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5225 : return 0;
5226 :
5227 0 : for (i = 0; i < reload_n_operands; i++)
5228 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5229 : return 0;
5230 :
5231 0 : for (i = opnum; i < reload_n_operands; i++)
5232 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5233 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5234 : return 0;
5235 :
5236 : return 1;
5237 :
5238 : case RELOAD_FOR_INSN:
5239 0 : for (i = 0; i < reload_n_operands; i++)
5240 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5241 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5242 : return 0;
5243 :
5244 0 : return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5245 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5246 :
5247 0 : case RELOAD_FOR_OTHER_ADDRESS:
5248 0 : return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5249 :
5250 0 : default:
5251 0 : gcc_unreachable ();
5252 : }
5253 : }
5254 :
5255 : /* Return 1 if the value in reload reg REGNO, as used by the reload with
5256 : the number RELOADNUM, is still available in REGNO at the end of the insn.
5257 :
5258 : We can assume that the reload reg was already tested for availability
5259 : at the time it is needed, and we should not check this again,
5260 : in case the reg has already been marked in use. */
5261 :
5262 : static int
5263 0 : reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5264 : {
5265 0 : int opnum = rld[reloadnum].opnum;
5266 0 : enum reload_type type = rld[reloadnum].when_needed;
5267 0 : int i;
5268 :
5269 : /* See if there is a reload with the same type for this operand, using
5270 : the same register. This case is not handled by the code below. */
5271 0 : for (i = reloadnum + 1; i < n_reloads; i++)
5272 : {
5273 0 : rtx reg;
5274 :
5275 0 : if (rld[i].opnum != opnum || rld[i].when_needed != type)
5276 0 : continue;
5277 0 : reg = rld[i].reg_rtx;
5278 0 : if (reg == NULL_RTX)
5279 0 : continue;
5280 0 : if (regno >= REGNO (reg) && regno < END_REGNO (reg))
5281 : return 0;
5282 : }
5283 :
5284 0 : switch (type)
5285 : {
5286 : case RELOAD_OTHER:
5287 : /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5288 : its value must reach the end. */
5289 : return 1;
5290 :
5291 : /* If this use is for part of the insn,
5292 : its value reaches if no subsequent part uses the same register.
5293 : Just like the above function, don't try to do this with lots
5294 : of fallthroughs. */
5295 :
5296 : case RELOAD_FOR_OTHER_ADDRESS:
5297 : /* Here we check for everything else, since these don't conflict
5298 : with anything else and everything comes later. */
5299 :
5300 0 : for (i = 0; i < reload_n_operands; i++)
5301 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5302 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5303 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5304 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5305 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5306 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5307 : return 0;
5308 :
5309 0 : return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5310 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5311 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5312 0 : && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5313 :
5314 : case RELOAD_FOR_INPUT_ADDRESS:
5315 : case RELOAD_FOR_INPADDR_ADDRESS:
5316 : /* Similar, except that we check only for this and subsequent inputs
5317 : and the address of only subsequent inputs and we do not need
5318 : to check for RELOAD_OTHER objects since they are known not to
5319 : conflict. */
5320 :
5321 0 : for (i = opnum; i < reload_n_operands; i++)
5322 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5323 : return 0;
5324 :
5325 : /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5326 : could be killed if the register is also used by reload with type
5327 : RELOAD_FOR_INPUT_ADDRESS, so check it. */
5328 0 : if (type == RELOAD_FOR_INPADDR_ADDRESS
5329 0 : && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5330 : return 0;
5331 :
5332 0 : for (i = opnum + 1; i < reload_n_operands; i++)
5333 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5334 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5335 : return 0;
5336 :
5337 0 : for (i = 0; i < reload_n_operands; i++)
5338 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5339 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5340 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5341 : return 0;
5342 :
5343 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5344 : return 0;
5345 :
5346 0 : return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5347 0 : && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5348 0 : && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5349 :
5350 0 : case RELOAD_FOR_INPUT:
5351 : /* Similar to input address, except we start at the next operand for
5352 : both input and input address and we do not check for
5353 : RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5354 : would conflict. */
5355 :
5356 0 : for (i = opnum + 1; i < reload_n_operands; i++)
5357 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5358 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5359 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5360 : return 0;
5361 :
5362 : /* ... fall through ... */
5363 :
5364 : case RELOAD_FOR_OPERAND_ADDRESS:
5365 : /* Check outputs and their addresses. */
5366 :
5367 0 : for (i = 0; i < reload_n_operands; i++)
5368 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5369 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5370 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5371 : return 0;
5372 :
5373 0 : return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5374 :
5375 : case RELOAD_FOR_OPADDR_ADDR:
5376 0 : for (i = 0; i < reload_n_operands; i++)
5377 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5378 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5379 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5380 : return 0;
5381 :
5382 0 : return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5383 0 : && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5384 0 : && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5385 :
5386 0 : case RELOAD_FOR_INSN:
5387 : /* These conflict with other outputs with RELOAD_OTHER. So
5388 : we need only check for output addresses. */
5389 :
5390 0 : opnum = reload_n_operands;
5391 :
5392 : /* fall through */
5393 :
5394 0 : case RELOAD_FOR_OUTPUT:
5395 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5396 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5397 : /* We already know these can't conflict with a later output. So the
5398 : only thing to check are later output addresses.
5399 : Note that multiple output operands are emitted in reverse order,
5400 : so the conflicting ones are those with lower indices. */
5401 0 : for (i = 0; i < opnum; i++)
5402 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5403 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5404 : return 0;
5405 :
5406 : /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5407 : could be killed if the register is also used by reload with type
5408 : RELOAD_FOR_OUTPUT_ADDRESS, so check it. */
5409 0 : if (type == RELOAD_FOR_OUTADDR_ADDRESS
5410 0 : && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5411 : return 0;
5412 :
5413 : return 1;
5414 :
5415 0 : default:
5416 0 : gcc_unreachable ();
5417 : }
5418 : }
5419 :
5420 : /* Like reload_reg_reaches_end_p, but check that the condition holds for
5421 : every register in REG. */
5422 :
5423 : static bool
5424 0 : reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5425 : {
5426 0 : unsigned int i;
5427 :
5428 0 : for (i = REGNO (reg); i < END_REGNO (reg); i++)
5429 0 : if (!reload_reg_reaches_end_p (i, reloadnum))
5430 : return false;
5431 : return true;
5432 : }
5433 :
5434 :
5435 : /* Returns whether R1 and R2 are uniquely chained: the value of one
5436 : is used by the other, and that value is not used by any other
5437 : reload for this insn. This is used to partially undo the decision
5438 : made in find_reloads when in the case of multiple
5439 : RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5440 : RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5441 : reloads. This code tries to avoid the conflict created by that
5442 : change. It might be cleaner to explicitly keep track of which
5443 : RELOAD_FOR_OPADDR_ADDR reload is associated with which
5444 : RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5445 : this after the fact. */
5446 : static bool
5447 0 : reloads_unique_chain_p (int r1, int r2)
5448 : {
5449 0 : int i;
5450 :
5451 : /* We only check input reloads. */
5452 0 : if (! rld[r1].in || ! rld[r2].in)
5453 : return false;
5454 :
5455 : /* Avoid anything with output reloads. */
5456 0 : if (rld[r1].out || rld[r2].out)
5457 : return false;
5458 :
5459 : /* "chained" means one reload is a component of the other reload,
5460 : not the same as the other reload. */
5461 0 : if (rld[r1].opnum != rld[r2].opnum
5462 0 : || rtx_equal_p (rld[r1].in, rld[r2].in)
5463 0 : || rld[r1].optional || rld[r2].optional
5464 0 : || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5465 0 : || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5466 0 : return false;
5467 :
5468 : /* The following loop assumes that r1 is the reload that feeds r2. */
5469 0 : if (r1 > r2)
5470 0 : std::swap (r1, r2);
5471 :
5472 0 : for (i = 0; i < n_reloads; i ++)
5473 : /* Look for input reloads that aren't our two */
5474 0 : if (i != r1 && i != r2 && rld[i].in)
5475 : {
5476 : /* If our reload is mentioned at all, it isn't a simple chain. */
5477 0 : if (reg_mentioned_p (rld[r1].in, rld[i].in))
5478 : return false;
5479 : }
5480 : return true;
5481 : }
5482 :
5483 : /* The recursive function change all occurrences of WHAT in *WHERE
5484 : to REPL. */
5485 : static void
5486 0 : substitute (rtx *where, const_rtx what, rtx repl)
5487 : {
5488 0 : const char *fmt;
5489 0 : int i;
5490 0 : enum rtx_code code;
5491 :
5492 0 : if (*where == 0)
5493 : return;
5494 :
5495 0 : if (*where == what || rtx_equal_p (*where, what))
5496 : {
5497 : /* Record the location of the changed rtx. */
5498 0 : substitute_stack.safe_push (where);
5499 0 : *where = repl;
5500 0 : return;
5501 : }
5502 :
5503 0 : code = GET_CODE (*where);
5504 0 : fmt = GET_RTX_FORMAT (code);
5505 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5506 : {
5507 0 : if (fmt[i] == 'E')
5508 : {
5509 0 : int j;
5510 :
5511 0 : for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5512 0 : substitute (&XVECEXP (*where, i, j), what, repl);
5513 : }
5514 0 : else if (fmt[i] == 'e')
5515 0 : substitute (&XEXP (*where, i), what, repl);
5516 : }
5517 : }
5518 :
5519 : /* The function returns TRUE if chain of reload R1 and R2 (in any
5520 : order) can be evaluated without usage of intermediate register for
5521 : the reload containing another reload. It is important to see
5522 : gen_reload to understand what the function is trying to do. As an
5523 : example, let us have reload chain
5524 :
5525 : r2: const
5526 : r1: <something> + const
5527 :
5528 : and reload R2 got reload reg HR. The function returns true if
5529 : there is a correct insn HR = HR + <something>. Otherwise,
5530 : gen_reload will use intermediate register (and this is the reload
5531 : reg for R1) to reload <something>.
5532 :
5533 : We need this function to find a conflict for chain reloads. In our
5534 : example, if HR = HR + <something> is incorrect insn, then we cannot
5535 : use HR as a reload register for R2. If we do use it then we get a
5536 : wrong code:
5537 :
5538 : HR = const
5539 : HR = <something>
5540 : HR = HR + HR
5541 :
5542 : */
5543 : static bool
5544 0 : gen_reload_chain_without_interm_reg_p (int r1, int r2)
5545 : {
5546 : /* Assume other cases in gen_reload are not possible for
5547 : chain reloads or do need an intermediate hard registers. */
5548 0 : bool result = true;
5549 0 : int regno, code;
5550 0 : rtx out, in;
5551 0 : rtx_insn *insn;
5552 0 : rtx_insn *last = get_last_insn ();
5553 :
5554 : /* Make r2 a component of r1. */
5555 0 : if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5556 0 : std::swap (r1, r2);
5557 :
5558 0 : gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5559 0 : regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5560 0 : gcc_assert (regno >= 0);
5561 0 : out = gen_rtx_REG (rld[r1].mode, regno);
5562 0 : in = rld[r1].in;
5563 0 : substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5564 :
5565 : /* If IN is a paradoxical SUBREG, remove it and try to put the
5566 : opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5567 0 : strip_paradoxical_subreg (&in, &out);
5568 :
5569 0 : if (GET_CODE (in) == PLUS
5570 0 : && (REG_P (XEXP (in, 0))
5571 : || GET_CODE (XEXP (in, 0)) == SUBREG
5572 : || MEM_P (XEXP (in, 0)))
5573 0 : && (REG_P (XEXP (in, 1))
5574 0 : || GET_CODE (XEXP (in, 1)) == SUBREG
5575 0 : || CONSTANT_P (XEXP (in, 1))
5576 0 : || MEM_P (XEXP (in, 1))))
5577 : {
5578 0 : insn = emit_insn (gen_rtx_SET (out, in));
5579 0 : code = recog_memoized (insn);
5580 0 : result = false;
5581 :
5582 0 : if (code >= 0)
5583 : {
5584 0 : extract_insn (insn);
5585 : /* We want constrain operands to treat this insn strictly in
5586 : its validity determination, i.e., the way it would after
5587 : reload has completed. */
5588 0 : result = constrain_operands (1, get_enabled_alternatives (insn));
5589 : }
5590 :
5591 0 : delete_insns_since (last);
5592 : }
5593 :
5594 : /* Restore the original value at each changed address within R1. */
5595 0 : while (!substitute_stack.is_empty ())
5596 : {
5597 0 : rtx *where = substitute_stack.pop ();
5598 0 : *where = rld[r2].in;
5599 : }
5600 :
5601 0 : return result;
5602 : }
5603 :
5604 : /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5605 : Return 0 otherwise.
5606 :
5607 : This function uses the same algorithm as reload_reg_free_p above. */
5608 :
5609 : static int
5610 0 : reloads_conflict (int r1, int r2)
5611 : {
5612 0 : enum reload_type r1_type = rld[r1].when_needed;
5613 0 : enum reload_type r2_type = rld[r2].when_needed;
5614 0 : int r1_opnum = rld[r1].opnum;
5615 0 : int r2_opnum = rld[r2].opnum;
5616 :
5617 : /* RELOAD_OTHER conflicts with everything. */
5618 0 : if (r2_type == RELOAD_OTHER)
5619 : return 1;
5620 :
5621 : /* Otherwise, check conflicts differently for each type. */
5622 :
5623 0 : switch (r1_type)
5624 : {
5625 0 : case RELOAD_FOR_INPUT:
5626 0 : return (r2_type == RELOAD_FOR_INSN
5627 0 : || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5628 0 : || r2_type == RELOAD_FOR_OPADDR_ADDR
5629 0 : || r2_type == RELOAD_FOR_INPUT
5630 0 : || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5631 0 : || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5632 0 : && r2_opnum > r1_opnum));
5633 :
5634 0 : case RELOAD_FOR_INPUT_ADDRESS:
5635 0 : return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5636 0 : || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5637 :
5638 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5639 0 : return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5640 0 : || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5641 :
5642 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5643 0 : return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5644 0 : || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5645 :
5646 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5647 0 : return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5648 0 : || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5649 :
5650 0 : case RELOAD_FOR_OPERAND_ADDRESS:
5651 0 : return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5652 0 : || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5653 0 : && (!reloads_unique_chain_p (r1, r2)
5654 0 : || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5655 :
5656 0 : case RELOAD_FOR_OPADDR_ADDR:
5657 0 : return (r2_type == RELOAD_FOR_INPUT
5658 0 : || r2_type == RELOAD_FOR_OPADDR_ADDR);
5659 :
5660 0 : case RELOAD_FOR_OUTPUT:
5661 0 : return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5662 0 : || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5663 0 : || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5664 0 : && r2_opnum >= r1_opnum));
5665 :
5666 0 : case RELOAD_FOR_INSN:
5667 0 : return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5668 0 : || r2_type == RELOAD_FOR_INSN
5669 0 : || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5670 :
5671 0 : case RELOAD_FOR_OTHER_ADDRESS:
5672 0 : return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5673 :
5674 : case RELOAD_OTHER:
5675 : return 1;
5676 :
5677 0 : default:
5678 0 : gcc_unreachable ();
5679 : }
5680 : }
5681 :
5682 : /* Indexed by reload number, 1 if incoming value
5683 : inherited from previous insns. */
5684 : static char reload_inherited[MAX_RELOADS];
5685 :
5686 : /* For an inherited reload, this is the insn the reload was inherited from,
5687 : if we know it. Otherwise, this is 0. */
5688 : static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5689 :
5690 : /* If nonzero, this is a place to get the value of the reload,
5691 : rather than using reload_in. */
5692 : static rtx reload_override_in[MAX_RELOADS];
5693 :
5694 : /* For each reload, the hard register number of the register used,
5695 : or -1 if we did not need a register for this reload. */
5696 : static int reload_spill_index[MAX_RELOADS];
5697 :
5698 : /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5699 : static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5700 :
5701 : /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5702 : static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5703 :
5704 : /* Subroutine of free_for_value_p, used to check a single register.
5705 : START_REGNO is the starting regno of the full reload register
5706 : (possibly comprising multiple hard registers) that we are considering. */
5707 :
5708 : static int
5709 0 : reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5710 : enum reload_type type, rtx value, rtx out,
5711 : int reloadnum, int ignore_address_reloads)
5712 : {
5713 0 : int time1;
5714 : /* Set if we see an input reload that must not share its reload register
5715 : with any new earlyclobber, but might otherwise share the reload
5716 : register with an output or input-output reload. */
5717 0 : int check_earlyclobber = 0;
5718 0 : int i;
5719 0 : int copy = 0;
5720 :
5721 0 : if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5722 : return 0;
5723 :
5724 0 : if (out == const0_rtx)
5725 : {
5726 0 : copy = 1;
5727 0 : out = NULL_RTX;
5728 : }
5729 :
5730 : /* We use some pseudo 'time' value to check if the lifetimes of the
5731 : new register use would overlap with the one of a previous reload
5732 : that is not read-only or uses a different value.
5733 : The 'time' used doesn't have to be linear in any shape or form, just
5734 : monotonic.
5735 : Some reload types use different 'buckets' for each operand.
5736 : So there are MAX_RECOG_OPERANDS different time values for each
5737 : such reload type.
5738 : We compute TIME1 as the time when the register for the prospective
5739 : new reload ceases to be live, and TIME2 for each existing
5740 : reload as the time when that the reload register of that reload
5741 : becomes live.
5742 : Where there is little to be gained by exact lifetime calculations,
5743 : we just make conservative assumptions, i.e. a longer lifetime;
5744 : this is done in the 'default:' cases. */
5745 0 : switch (type)
5746 : {
5747 0 : case RELOAD_FOR_OTHER_ADDRESS:
5748 : /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5749 0 : time1 = copy ? 0 : 1;
5750 0 : break;
5751 0 : case RELOAD_OTHER:
5752 0 : time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5753 : break;
5754 : /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5755 : RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5756 : respectively, to the time values for these, we get distinct time
5757 : values. To get distinct time values for each operand, we have to
5758 : multiply opnum by at least three. We round that up to four because
5759 : multiply by four is often cheaper. */
5760 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5761 0 : time1 = opnum * 4 + 2;
5762 0 : break;
5763 0 : case RELOAD_FOR_INPUT_ADDRESS:
5764 0 : time1 = opnum * 4 + 3;
5765 0 : break;
5766 0 : case RELOAD_FOR_INPUT:
5767 : /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5768 : executes (inclusive). */
5769 0 : time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5770 : break;
5771 : case RELOAD_FOR_OPADDR_ADDR:
5772 : /* opnum * 4 + 4
5773 : <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5774 : time1 = MAX_RECOG_OPERANDS * 4 + 1;
5775 : break;
5776 0 : case RELOAD_FOR_OPERAND_ADDRESS:
5777 : /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5778 : is executed. */
5779 0 : time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5780 : break;
5781 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5782 0 : time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5783 0 : break;
5784 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5785 0 : time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5786 0 : break;
5787 : default:
5788 0 : time1 = MAX_RECOG_OPERANDS * 5 + 5;
5789 : }
5790 :
5791 0 : for (i = 0; i < n_reloads; i++)
5792 : {
5793 0 : rtx reg = rld[i].reg_rtx;
5794 0 : if (reg && REG_P (reg)
5795 0 : && (unsigned) regno - true_regnum (reg) < REG_NREGS (reg)
5796 0 : && i != reloadnum)
5797 : {
5798 0 : rtx other_input = rld[i].in;
5799 :
5800 : /* If the other reload loads the same input value, that
5801 : will not cause a conflict only if it's loading it into
5802 : the same register. */
5803 0 : if (true_regnum (reg) != start_regno)
5804 : other_input = NULL_RTX;
5805 0 : if (! other_input || ! rtx_equal_p (other_input, value)
5806 0 : || rld[i].out || out)
5807 : {
5808 0 : int time2;
5809 0 : switch (rld[i].when_needed)
5810 : {
5811 : case RELOAD_FOR_OTHER_ADDRESS:
5812 : time2 = 0;
5813 : break;
5814 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5815 : /* find_reloads makes sure that a
5816 : RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5817 : by at most one - the first -
5818 : RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5819 : address reload is inherited, the address address reload
5820 : goes away, so we can ignore this conflict. */
5821 0 : if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5822 0 : && ignore_address_reloads
5823 : /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5824 : Then the address address is still needed to store
5825 : back the new address. */
5826 0 : && ! rld[reloadnum].out)
5827 0 : continue;
5828 : /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5829 : RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5830 : reloads go away. */
5831 0 : if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5832 0 : && ignore_address_reloads
5833 : /* Unless we are reloading an auto_inc expression. */
5834 0 : && ! rld[reloadnum].out)
5835 0 : continue;
5836 0 : time2 = rld[i].opnum * 4 + 2;
5837 0 : break;
5838 0 : case RELOAD_FOR_INPUT_ADDRESS:
5839 0 : if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5840 0 : && ignore_address_reloads
5841 0 : && ! rld[reloadnum].out)
5842 0 : continue;
5843 0 : time2 = rld[i].opnum * 4 + 3;
5844 0 : break;
5845 0 : case RELOAD_FOR_INPUT:
5846 0 : time2 = rld[i].opnum * 4 + 4;
5847 0 : check_earlyclobber = 1;
5848 0 : break;
5849 : /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5850 : == MAX_RECOG_OPERAND * 4 */
5851 0 : case RELOAD_FOR_OPADDR_ADDR:
5852 0 : if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5853 0 : && ignore_address_reloads
5854 0 : && ! rld[reloadnum].out)
5855 0 : continue;
5856 : time2 = MAX_RECOG_OPERANDS * 4 + 1;
5857 : break;
5858 0 : case RELOAD_FOR_OPERAND_ADDRESS:
5859 0 : time2 = MAX_RECOG_OPERANDS * 4 + 2;
5860 0 : check_earlyclobber = 1;
5861 0 : break;
5862 0 : case RELOAD_FOR_INSN:
5863 0 : time2 = MAX_RECOG_OPERANDS * 4 + 3;
5864 0 : break;
5865 0 : case RELOAD_FOR_OUTPUT:
5866 : /* All RELOAD_FOR_OUTPUT reloads become live just after the
5867 : instruction is executed. */
5868 0 : time2 = MAX_RECOG_OPERANDS * 4 + 4;
5869 0 : break;
5870 : /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5871 : the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5872 : value. */
5873 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5874 0 : if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5875 0 : && ignore_address_reloads
5876 0 : && ! rld[reloadnum].out)
5877 0 : continue;
5878 0 : time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5879 0 : break;
5880 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5881 0 : time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5882 0 : break;
5883 0 : case RELOAD_OTHER:
5884 : /* If there is no conflict in the input part, handle this
5885 : like an output reload. */
5886 0 : if (! rld[i].in || rtx_equal_p (other_input, value))
5887 : {
5888 0 : time2 = MAX_RECOG_OPERANDS * 4 + 4;
5889 : /* Earlyclobbered outputs must conflict with inputs. */
5890 0 : if (earlyclobber_operand_p (rld[i].out))
5891 0 : time2 = MAX_RECOG_OPERANDS * 4 + 3;
5892 :
5893 : break;
5894 : }
5895 0 : time2 = 1;
5896 : /* RELOAD_OTHER might be live beyond instruction execution,
5897 : but this is not obvious when we set time2 = 1. So check
5898 : here if there might be a problem with the new reload
5899 : clobbering the register used by the RELOAD_OTHER. */
5900 0 : if (out)
5901 : return 0;
5902 : break;
5903 : default:
5904 : return 0;
5905 : }
5906 0 : if ((time1 >= time2
5907 0 : && (! rld[i].in || rld[i].out
5908 0 : || ! rtx_equal_p (other_input, value)))
5909 0 : || (out && rld[reloadnum].out_reg
5910 0 : && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5911 0 : return 0;
5912 : }
5913 : }
5914 : }
5915 :
5916 : /* Earlyclobbered outputs must conflict with inputs. */
5917 0 : if (check_earlyclobber && out && earlyclobber_operand_p (out))
5918 : return 0;
5919 :
5920 : return 1;
5921 : }
5922 :
5923 : /* Return 1 if the value in reload reg REGNO, as used by a reload
5924 : needed for the part of the insn specified by OPNUM and TYPE,
5925 : may be used to load VALUE into it.
5926 :
5927 : MODE is the mode in which the register is used, this is needed to
5928 : determine how many hard regs to test.
5929 :
5930 : Other read-only reloads with the same value do not conflict
5931 : unless OUT is nonzero and these other reloads have to live while
5932 : output reloads live.
5933 : If OUT is CONST0_RTX, this is a special case: it means that the
5934 : test should not be for using register REGNO as reload register, but
5935 : for copying from register REGNO into the reload register.
5936 :
5937 : RELOADNUM is the number of the reload we want to load this value for;
5938 : a reload does not conflict with itself.
5939 :
5940 : When IGNORE_ADDRESS_RELOADS is set, we cannot have conflicts with
5941 : reloads that load an address for the very reload we are considering.
5942 :
5943 : The caller has to make sure that there is no conflict with the return
5944 : register. */
5945 :
5946 : static int
5947 0 : free_for_value_p (int regno, machine_mode mode, int opnum,
5948 : enum reload_type type, rtx value, rtx out, int reloadnum,
5949 : int ignore_address_reloads)
5950 : {
5951 0 : int nregs = hard_regno_nregs (regno, mode);
5952 0 : while (nregs-- > 0)
5953 0 : if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5954 : value, out, reloadnum,
5955 : ignore_address_reloads))
5956 : return 0;
5957 : return 1;
5958 : }
5959 :
5960 : /* Return true if the rtx X is invariant over the current function. */
5961 : /* ??? Actually, the places where we use this expect exactly what is
5962 : tested here, and not everything that is function invariant. In
5963 : particular, the frame pointer and arg pointer are special cased;
5964 : pic_offset_table_rtx is not, and we must not spill these things to
5965 : memory. */
5966 :
5967 : bool
5968 18609211 : function_invariant_p (const_rtx x)
5969 : {
5970 18609211 : if (CONSTANT_P (x))
5971 : return 1;
5972 14250948 : if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5973 : return 1;
5974 14243954 : if (GET_CODE (x) == PLUS
5975 3572010 : && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5976 1638748 : && GET_CODE (XEXP (x, 1)) == CONST_INT)
5977 1638664 : return 1;
5978 : return 0;
5979 : }
5980 :
5981 : /* Determine whether the reload reg X overlaps any rtx'es used for
5982 : overriding inheritance. Return nonzero if so. */
5983 :
5984 : static int
5985 0 : conflicts_with_override (rtx x)
5986 : {
5987 0 : int i;
5988 0 : for (i = 0; i < n_reloads; i++)
5989 0 : if (reload_override_in[i]
5990 0 : && reg_overlap_mentioned_p (x, reload_override_in[i]))
5991 : return 1;
5992 : return 0;
5993 : }
5994 :
5995 : /* Give an error message saying we failed to find a reload for INSN,
5996 : and clear out reload R. */
5997 : static void
5998 0 : failed_reload (rtx_insn *insn, int r)
5999 : {
6000 0 : if (asm_noperands (PATTERN (insn)) < 0)
6001 : /* It's the compiler's fault. */
6002 0 : fatal_insn ("could not find a spill register", insn);
6003 :
6004 : /* It's the user's fault; the operand's mode and constraint
6005 : don't match. Disable this reload so we don't crash in final. */
6006 0 : error_for_asm (insn,
6007 : "%<asm%> operand constraint incompatible with operand size");
6008 0 : rld[r].in = 0;
6009 0 : rld[r].out = 0;
6010 0 : rld[r].reg_rtx = 0;
6011 0 : rld[r].optional = 1;
6012 0 : rld[r].secondary_p = 1;
6013 0 : }
6014 :
6015 : /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6016 : for reload R. If it's valid, get an rtx for it. Return nonzero if
6017 : successful. */
6018 : static int
6019 0 : set_reload_reg (int i, int r)
6020 : {
6021 0 : int regno;
6022 0 : rtx reg = spill_reg_rtx[i];
6023 :
6024 0 : if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6025 0 : spill_reg_rtx[i] = reg
6026 0 : = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6027 :
6028 0 : regno = true_regnum (reg);
6029 :
6030 : /* Detect when the reload reg can't hold the reload mode.
6031 : This used to be one `if', but Sequent compiler can't handle that. */
6032 0 : if (targetm.hard_regno_mode_ok (regno, rld[r].mode))
6033 : {
6034 0 : machine_mode test_mode = VOIDmode;
6035 0 : if (rld[r].in)
6036 0 : test_mode = GET_MODE (rld[r].in);
6037 : /* If rld[r].in has VOIDmode, it means we will load it
6038 : in whatever mode the reload reg has: to wit, rld[r].mode.
6039 : We have already tested that for validity. */
6040 : /* Aside from that, we need to test that the expressions
6041 : to reload from or into have modes which are valid for this
6042 : reload register. Otherwise the reload insns would be invalid. */
6043 0 : if (! (rld[r].in != 0 && test_mode != VOIDmode
6044 0 : && !targetm.hard_regno_mode_ok (regno, test_mode)))
6045 0 : if (! (rld[r].out != 0
6046 0 : && !targetm.hard_regno_mode_ok (regno, GET_MODE (rld[r].out))))
6047 : {
6048 : /* The reg is OK. */
6049 0 : last_spill_reg = i;
6050 :
6051 : /* Mark as in use for this insn the reload regs we use
6052 : for this. */
6053 0 : mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6054 : rld[r].when_needed, rld[r].mode);
6055 :
6056 0 : rld[r].reg_rtx = reg;
6057 0 : reload_spill_index[r] = spill_regs[i];
6058 0 : return 1;
6059 : }
6060 : }
6061 : return 0;
6062 : }
6063 :
6064 : /* Find a spill register to use as a reload register for reload R.
6065 : LAST_RELOAD is nonzero if this is the last reload for the insn being
6066 : processed.
6067 :
6068 : Set rld[R].reg_rtx to the register allocated.
6069 :
6070 : We return 1 if successful, or 0 if we couldn't find a spill reg and
6071 : we didn't change anything. */
6072 :
6073 : static int
6074 0 : allocate_reload_reg (class insn_chain *chain ATTRIBUTE_UNUSED, int r,
6075 : int last_reload)
6076 : {
6077 0 : int i, pass, count;
6078 :
6079 : /* If we put this reload ahead, thinking it is a group,
6080 : then insist on finding a group. Otherwise we can grab a
6081 : reg that some other reload needs.
6082 : (That can happen when we have a 68000 DATA_OR_FP_REG
6083 : which is a group of data regs or one fp reg.)
6084 : We need not be so restrictive if there are no more reloads
6085 : for this insn.
6086 :
6087 : ??? Really it would be nicer to have smarter handling
6088 : for that kind of reg class, where a problem like this is normal.
6089 : Perhaps those classes should be avoided for reloading
6090 : by use of more alternatives. */
6091 :
6092 0 : int force_group = rld[r].nregs > 1 && ! last_reload;
6093 :
6094 : /* If we want a single register and haven't yet found one,
6095 : take any reg in the right class and not in use.
6096 : If we want a consecutive group, here is where we look for it.
6097 :
6098 : We use three passes so we can first look for reload regs to
6099 : reuse, which are already in use for other reloads in this insn,
6100 : and only then use additional registers which are not "bad", then
6101 : finally any register.
6102 :
6103 : I think that maximizing reuse is needed to make sure we don't
6104 : run out of reload regs. Suppose we have three reloads, and
6105 : reloads A and B can share regs. These need two regs.
6106 : Suppose A and B are given different regs.
6107 : That leaves none for C. */
6108 0 : for (pass = 0; pass < 3; pass++)
6109 : {
6110 : /* I is the index in spill_regs.
6111 : We advance it round-robin between insns to use all spill regs
6112 : equally, so that inherited reloads have a chance
6113 : of leapfrogging each other. */
6114 :
6115 0 : i = last_spill_reg;
6116 :
6117 0 : for (count = 0; count < n_spills; count++)
6118 : {
6119 0 : int rclass = (int) rld[r].rclass;
6120 0 : int regnum;
6121 :
6122 0 : i++;
6123 0 : if (i >= n_spills)
6124 0 : i -= n_spills;
6125 0 : regnum = spill_regs[i];
6126 :
6127 0 : if ((reload_reg_free_p (regnum, rld[r].opnum,
6128 : rld[r].when_needed)
6129 0 : || (rld[r].in
6130 : /* We check reload_reg_used to make sure we
6131 : don't clobber the return register. */
6132 0 : && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6133 0 : && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6134 : rld[r].when_needed, rld[r].in,
6135 : rld[r].out, r, 1)))
6136 0 : && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6137 0 : && targetm.hard_regno_mode_ok (regnum, rld[r].mode)
6138 : /* Look first for regs to share, then for unshared. But
6139 : don't share regs used for inherited reloads; they are
6140 : the ones we want to preserve. */
6141 0 : && (pass
6142 0 : || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6143 : regnum)
6144 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6145 : regnum))))
6146 : {
6147 0 : int nr = hard_regno_nregs (regnum, rld[r].mode);
6148 :
6149 : /* During the second pass we want to avoid reload registers
6150 : which are "bad" for this reload. */
6151 0 : if (pass == 1
6152 0 : && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6153 0 : continue;
6154 :
6155 : /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6156 : (on 68000) got us two FP regs. If NR is 1,
6157 : we would reject both of them. */
6158 0 : if (force_group)
6159 0 : nr = rld[r].nregs;
6160 : /* If we need only one reg, we have already won. */
6161 0 : if (nr == 1)
6162 : {
6163 : /* But reject a single reg if we demand a group. */
6164 0 : if (force_group)
6165 0 : continue;
6166 : break;
6167 : }
6168 : /* Otherwise check that as many consecutive regs as we need
6169 : are available here. */
6170 0 : while (nr > 1)
6171 : {
6172 0 : int regno = regnum + nr - 1;
6173 0 : if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6174 0 : && spill_reg_order[regno] >= 0
6175 0 : && reload_reg_free_p (regno, rld[r].opnum,
6176 : rld[r].when_needed)))
6177 : break;
6178 0 : nr--;
6179 : }
6180 0 : if (nr == 1)
6181 : break;
6182 : }
6183 : }
6184 :
6185 : /* If we found something on the current pass, omit later passes. */
6186 0 : if (count < n_spills)
6187 : break;
6188 : }
6189 :
6190 : /* We should have found a spill register by now. */
6191 0 : if (count >= n_spills)
6192 : return 0;
6193 :
6194 : /* I is the index in SPILL_REG_RTX of the reload register we are to
6195 : allocate. Get an rtx for it and find its register number. */
6196 :
6197 0 : return set_reload_reg (i, r);
6198 : }
6199 :
6200 : /* Initialize all the tables needed to allocate reload registers.
6201 : CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6202 : is the array we use to restore the reg_rtx field for every reload. */
6203 :
6204 : static void
6205 0 : choose_reload_regs_init (class insn_chain *chain, rtx *save_reload_reg_rtx)
6206 : {
6207 0 : int i;
6208 :
6209 0 : for (i = 0; i < n_reloads; i++)
6210 0 : rld[i].reg_rtx = save_reload_reg_rtx[i];
6211 :
6212 0 : memset (reload_inherited, 0, MAX_RELOADS);
6213 0 : memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6214 0 : memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6215 :
6216 0 : CLEAR_HARD_REG_SET (reload_reg_used);
6217 0 : CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6218 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6219 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6220 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6221 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6222 :
6223 0 : CLEAR_HARD_REG_SET (reg_used_in_insn);
6224 : {
6225 : HARD_REG_SET tmp;
6226 0 : REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6227 0 : reg_used_in_insn |= tmp;
6228 0 : REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6229 0 : reg_used_in_insn |= tmp;
6230 0 : compute_use_by_pseudos (®_used_in_insn, &chain->live_throughout);
6231 0 : compute_use_by_pseudos (®_used_in_insn, &chain->dead_or_set);
6232 : }
6233 :
6234 0 : for (i = 0; i < reload_n_operands; i++)
6235 : {
6236 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6237 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6238 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6239 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6240 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6241 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6242 : }
6243 :
6244 0 : reload_reg_unavailable = ~chain->used_spill_regs;
6245 :
6246 0 : CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6247 :
6248 0 : for (i = 0; i < n_reloads; i++)
6249 : /* If we have already decided to use a certain register,
6250 : don't use it in another way. */
6251 0 : if (rld[i].reg_rtx)
6252 0 : mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6253 : rld[i].when_needed, rld[i].mode);
6254 0 : }
6255 :
6256 : /* If X is not a subreg, return it unmodified. If it is a subreg,
6257 : look up whether we made a replacement for the SUBREG_REG. Return
6258 : either the replacement or the SUBREG_REG. */
6259 :
6260 : static rtx
6261 0 : replaced_subreg (rtx x)
6262 : {
6263 0 : if (GET_CODE (x) == SUBREG)
6264 0 : return find_replacement (&SUBREG_REG (x));
6265 : return x;
6266 : }
6267 :
6268 : /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6269 : mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6270 : SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6271 : otherwise it is NULL. */
6272 :
6273 : static poly_int64
6274 0 : compute_reload_subreg_offset (machine_mode outermode,
6275 : rtx subreg,
6276 : machine_mode innermode)
6277 : {
6278 0 : poly_int64 outer_offset;
6279 0 : machine_mode middlemode;
6280 :
6281 0 : if (!subreg)
6282 0 : return subreg_lowpart_offset (outermode, innermode);
6283 :
6284 0 : outer_offset = SUBREG_BYTE (subreg);
6285 0 : middlemode = GET_MODE (SUBREG_REG (subreg));
6286 :
6287 : /* If SUBREG is paradoxical then return the normal lowpart offset
6288 : for OUTERMODE and INNERMODE. Our caller has already checked
6289 : that OUTERMODE fits in INNERMODE. */
6290 0 : if (paradoxical_subreg_p (outermode, middlemode))
6291 0 : return subreg_lowpart_offset (outermode, innermode);
6292 :
6293 : /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6294 : plus the normal lowpart offset for MIDDLEMODE and INNERMODE. */
6295 0 : return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6296 : }
6297 :
6298 : /* Assign hard reg targets for the pseudo-registers we must reload
6299 : into hard regs for this insn.
6300 : Also output the instructions to copy them in and out of the hard regs.
6301 :
6302 : For machines with register classes, we are responsible for
6303 : finding a reload reg in the proper class. */
6304 :
6305 : static void
6306 0 : choose_reload_regs (class insn_chain *chain)
6307 : {
6308 0 : rtx_insn *insn = chain->insn;
6309 0 : int i, j;
6310 0 : unsigned int max_group_size = 1;
6311 0 : enum reg_class group_class = NO_REGS;
6312 0 : int pass, win, inheritance;
6313 :
6314 0 : rtx save_reload_reg_rtx[MAX_RELOADS];
6315 :
6316 : /* In order to be certain of getting the registers we need,
6317 : we must sort the reloads into order of increasing register class.
6318 : Then our grabbing of reload registers will parallel the process
6319 : that provided the reload registers.
6320 :
6321 : Also note whether any of the reloads wants a consecutive group of regs.
6322 : If so, record the maximum size of the group desired and what
6323 : register class contains all the groups needed by this insn. */
6324 :
6325 0 : for (j = 0; j < n_reloads; j++)
6326 : {
6327 0 : reload_order[j] = j;
6328 0 : if (rld[j].reg_rtx != NULL_RTX)
6329 : {
6330 0 : gcc_assert (REG_P (rld[j].reg_rtx)
6331 : && HARD_REGISTER_P (rld[j].reg_rtx));
6332 0 : reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6333 : }
6334 : else
6335 0 : reload_spill_index[j] = -1;
6336 :
6337 0 : if (rld[j].nregs > 1)
6338 : {
6339 0 : max_group_size = MAX (rld[j].nregs, max_group_size);
6340 0 : group_class
6341 0 : = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6342 : }
6343 :
6344 0 : save_reload_reg_rtx[j] = rld[j].reg_rtx;
6345 : }
6346 :
6347 0 : if (n_reloads > 1)
6348 0 : qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6349 :
6350 : /* If -O, try first with inheritance, then turning it off.
6351 : If not -O, don't do inheritance.
6352 : Using inheritance when not optimizing leads to paradoxes
6353 : with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6354 : because one side of the comparison might be inherited. */
6355 0 : win = 0;
6356 0 : for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6357 : {
6358 0 : choose_reload_regs_init (chain, save_reload_reg_rtx);
6359 :
6360 : /* Process the reloads in order of preference just found.
6361 : Beyond this point, subregs can be found in reload_reg_rtx.
6362 :
6363 : This used to look for an existing reloaded home for all of the
6364 : reloads, and only then perform any new reloads. But that could lose
6365 : if the reloads were done out of reg-class order because a later
6366 : reload with a looser constraint might have an old home in a register
6367 : needed by an earlier reload with a tighter constraint.
6368 :
6369 : To solve this, we make two passes over the reloads, in the order
6370 : described above. In the first pass we try to inherit a reload
6371 : from a previous insn. If there is a later reload that needs a
6372 : class that is a proper subset of the class being processed, we must
6373 : also allocate a spill register during the first pass.
6374 :
6375 : Then make a second pass over the reloads to allocate any reloads
6376 : that haven't been given registers yet. */
6377 :
6378 0 : for (j = 0; j < n_reloads; j++)
6379 : {
6380 0 : int r = reload_order[j];
6381 0 : rtx search_equiv = NULL_RTX;
6382 :
6383 : /* Ignore reloads that got marked inoperative. */
6384 0 : if (rld[r].out == 0 && rld[r].in == 0
6385 0 : && ! rld[r].secondary_p)
6386 0 : continue;
6387 :
6388 : /* If find_reloads chose to use reload_in or reload_out as a reload
6389 : register, we don't need to chose one. Otherwise, try even if it
6390 : found one since we might save an insn if we find the value lying
6391 : around.
6392 : Try also when reload_in is a pseudo without a hard reg. */
6393 0 : if (rld[r].in != 0 && rld[r].reg_rtx != 0
6394 0 : && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6395 0 : || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6396 0 : && !MEM_P (rld[r].in)
6397 0 : && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6398 0 : continue;
6399 :
6400 : #if 0 /* No longer needed for correct operation.
6401 : It might give better code, or might not; worth an experiment? */
6402 : /* If this is an optional reload, we can't inherit from earlier insns
6403 : until we are sure that any non-optional reloads have been allocated.
6404 : The following code takes advantage of the fact that optional reloads
6405 : are at the end of reload_order. */
6406 : if (rld[r].optional != 0)
6407 : for (i = 0; i < j; i++)
6408 : if ((rld[reload_order[i]].out != 0
6409 : || rld[reload_order[i]].in != 0
6410 : || rld[reload_order[i]].secondary_p)
6411 : && ! rld[reload_order[i]].optional
6412 : && rld[reload_order[i]].reg_rtx == 0)
6413 : allocate_reload_reg (chain, reload_order[i], 0);
6414 : #endif
6415 :
6416 : /* First see if this pseudo is already available as reloaded
6417 : for a previous insn. We cannot try to inherit for reloads
6418 : that are smaller than the maximum number of registers needed
6419 : for groups unless the register we would allocate cannot be used
6420 : for the groups.
6421 :
6422 : We could check here to see if this is a secondary reload for
6423 : an object that is already in a register of the desired class.
6424 : This would avoid the need for the secondary reload register.
6425 : But this is complex because we can't easily determine what
6426 : objects might want to be loaded via this reload. So let a
6427 : register be allocated here. In `emit_reload_insns' we suppress
6428 : one of the loads in the case described above. */
6429 :
6430 0 : if (inheritance)
6431 : {
6432 0 : poly_int64 byte = 0;
6433 0 : int regno = -1;
6434 0 : machine_mode mode = VOIDmode;
6435 0 : rtx subreg = NULL_RTX;
6436 :
6437 0 : if (rld[r].in == 0)
6438 : ;
6439 0 : else if (REG_P (rld[r].in))
6440 : {
6441 0 : regno = REGNO (rld[r].in);
6442 0 : mode = GET_MODE (rld[r].in);
6443 : }
6444 0 : else if (REG_P (rld[r].in_reg))
6445 : {
6446 0 : regno = REGNO (rld[r].in_reg);
6447 0 : mode = GET_MODE (rld[r].in_reg);
6448 : }
6449 0 : else if (GET_CODE (rld[r].in_reg) == SUBREG
6450 0 : && REG_P (SUBREG_REG (rld[r].in_reg)))
6451 : {
6452 0 : regno = REGNO (SUBREG_REG (rld[r].in_reg));
6453 0 : if (regno < FIRST_PSEUDO_REGISTER)
6454 0 : regno = subreg_regno (rld[r].in_reg);
6455 : else
6456 : {
6457 0 : subreg = rld[r].in_reg;
6458 0 : byte = SUBREG_BYTE (subreg);
6459 : }
6460 0 : mode = GET_MODE (rld[r].in_reg);
6461 : }
6462 : #if AUTO_INC_DEC
6463 : else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6464 : && REG_P (XEXP (rld[r].in_reg, 0)))
6465 : {
6466 : regno = REGNO (XEXP (rld[r].in_reg, 0));
6467 : mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6468 : rld[r].out = rld[r].in;
6469 : }
6470 : #endif
6471 : #if 0
6472 : /* This won't work, since REGNO can be a pseudo reg number.
6473 : Also, it takes much more hair to keep track of all the things
6474 : that can invalidate an inherited reload of part of a pseudoreg. */
6475 : else if (GET_CODE (rld[r].in) == SUBREG
6476 : && REG_P (SUBREG_REG (rld[r].in)))
6477 : regno = subreg_regno (rld[r].in);
6478 : #endif
6479 :
6480 0 : if (regno >= 0
6481 0 : && reg_last_reload_reg[regno] != 0
6482 0 : && (known_ge
6483 : (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])),
6484 : GET_MODE_SIZE (mode) + byte))
6485 : /* Verify that the register it's in can be used in
6486 : mode MODE. */
6487 0 : && (REG_CAN_CHANGE_MODE_P
6488 : (REGNO (reg_last_reload_reg[regno]),
6489 : GET_MODE (reg_last_reload_reg[regno]),
6490 : mode)))
6491 : {
6492 0 : enum reg_class rclass = rld[r].rclass, last_class;
6493 0 : rtx last_reg = reg_last_reload_reg[regno];
6494 :
6495 0 : i = REGNO (last_reg);
6496 0 : byte = compute_reload_subreg_offset (mode,
6497 : subreg,
6498 0 : GET_MODE (last_reg));
6499 0 : i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6500 0 : last_class = REGNO_REG_CLASS (i);
6501 :
6502 0 : if (reg_reloaded_contents[i] == regno
6503 0 : && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6504 0 : && targetm.hard_regno_mode_ok (i, rld[r].mode)
6505 0 : && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6506 : /* Even if we can't use this register as a reload
6507 : register, we might use it for reload_override_in,
6508 : if copying it to the desired class is cheap
6509 : enough. */
6510 0 : || ((register_move_cost (mode, last_class, rclass)
6511 0 : < memory_move_cost (mode, rclass, true))
6512 0 : && (secondary_reload_class (1, rclass, mode,
6513 : last_reg)
6514 : == NO_REGS)
6515 0 : && !(targetm.secondary_memory_needed
6516 0 : (mode, last_class, rclass))))
6517 0 : && (rld[r].nregs == max_group_size
6518 0 : || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6519 : i))
6520 0 : && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6521 : rld[r].when_needed, rld[r].in,
6522 : const0_rtx, r, 1))
6523 : {
6524 : /* If a group is needed, verify that all the subsequent
6525 : registers still have their values intact. */
6526 0 : int nr = hard_regno_nregs (i, rld[r].mode);
6527 0 : int k;
6528 :
6529 0 : for (k = 1; k < nr; k++)
6530 0 : if (reg_reloaded_contents[i + k] != regno
6531 0 : || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6532 : break;
6533 :
6534 0 : if (k == nr)
6535 : {
6536 0 : int i1;
6537 0 : int bad_for_class;
6538 :
6539 0 : last_reg = (GET_MODE (last_reg) == mode
6540 0 : ? last_reg : gen_rtx_REG (mode, i));
6541 :
6542 0 : bad_for_class = 0;
6543 0 : for (k = 0; k < nr; k++)
6544 0 : bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6545 0 : i+k);
6546 :
6547 : /* We found a register that contains the
6548 : value we need. If this register is the
6549 : same as an `earlyclobber' operand of the
6550 : current insn, just mark it as a place to
6551 : reload from since we can't use it as the
6552 : reload register itself. */
6553 :
6554 0 : for (i1 = 0; i1 < n_earlyclobbers; i1++)
6555 0 : if (reg_overlap_mentioned_for_reload_p
6556 0 : (reg_last_reload_reg[regno],
6557 : reload_earlyclobbers[i1]))
6558 : break;
6559 :
6560 0 : if (i1 != n_earlyclobbers
6561 0 : || ! (free_for_value_p (i, rld[r].mode,
6562 : rld[r].opnum,
6563 : rld[r].when_needed, rld[r].in,
6564 : rld[r].out, r, 1))
6565 : /* Don't use it if we'd clobber a pseudo reg. */
6566 0 : || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6567 0 : && rld[r].out
6568 0 : && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6569 : /* Don't clobber the frame pointer. */
6570 0 : || (i == HARD_FRAME_POINTER_REGNUM
6571 0 : && frame_pointer_needed
6572 0 : && rld[r].out)
6573 : /* Don't really use the inherited spill reg
6574 : if we need it wider than we've got it. */
6575 0 : || paradoxical_subreg_p (rld[r].mode, mode)
6576 0 : || bad_for_class
6577 :
6578 : /* If find_reloads chose reload_out as reload
6579 : register, stay with it - that leaves the
6580 : inherited register for subsequent reloads. */
6581 0 : || (rld[r].out && rld[r].reg_rtx
6582 0 : && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6583 : {
6584 0 : if (! rld[r].optional)
6585 : {
6586 0 : reload_override_in[r] = last_reg;
6587 0 : reload_inheritance_insn[r]
6588 0 : = reg_reloaded_insn[i];
6589 : }
6590 : }
6591 : else
6592 : {
6593 0 : int k;
6594 : /* We can use this as a reload reg. */
6595 : /* Mark the register as in use for this part of
6596 : the insn. */
6597 0 : mark_reload_reg_in_use (i,
6598 : rld[r].opnum,
6599 : rld[r].when_needed,
6600 : rld[r].mode);
6601 0 : rld[r].reg_rtx = last_reg;
6602 0 : reload_inherited[r] = 1;
6603 0 : reload_inheritance_insn[r]
6604 0 : = reg_reloaded_insn[i];
6605 0 : reload_spill_index[r] = i;
6606 0 : for (k = 0; k < nr; k++)
6607 0 : SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6608 0 : i + k);
6609 : }
6610 : }
6611 : }
6612 : }
6613 : }
6614 :
6615 : /* Here's another way to see if the value is already lying around. */
6616 0 : if (inheritance
6617 0 : && rld[r].in != 0
6618 0 : && ! reload_inherited[r]
6619 0 : && rld[r].out == 0
6620 0 : && (CONSTANT_P (rld[r].in)
6621 : || GET_CODE (rld[r].in) == PLUS
6622 : || REG_P (rld[r].in)
6623 : || MEM_P (rld[r].in))
6624 0 : && (rld[r].nregs == max_group_size
6625 0 : || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6626 0 : search_equiv = rld[r].in;
6627 :
6628 0 : if (search_equiv)
6629 : {
6630 0 : rtx equiv
6631 0 : = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6632 : -1, NULL, 0, rld[r].mode);
6633 0 : int regno = 0;
6634 :
6635 0 : if (equiv != 0)
6636 : {
6637 0 : if (REG_P (equiv))
6638 0 : regno = REGNO (equiv);
6639 : else
6640 : {
6641 : /* This must be a SUBREG of a hard register.
6642 : Make a new REG since this might be used in an
6643 : address and not all machines support SUBREGs
6644 : there. */
6645 0 : gcc_assert (GET_CODE (equiv) == SUBREG);
6646 0 : regno = subreg_regno (equiv);
6647 0 : equiv = gen_rtx_REG (rld[r].mode, regno);
6648 : /* If we choose EQUIV as the reload register, but the
6649 : loop below decides to cancel the inheritance, we'll
6650 : end up reloading EQUIV in rld[r].mode, not the mode
6651 : it had originally. That isn't safe when EQUIV isn't
6652 : available as a spill register since its value might
6653 : still be live at this point. */
6654 0 : for (i = regno; i < regno + (int) rld[r].nregs; i++)
6655 0 : if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6656 0 : equiv = 0;
6657 : }
6658 : }
6659 :
6660 : /* If we found a spill reg, reject it unless it is free
6661 : and of the desired class. */
6662 0 : if (equiv != 0)
6663 : {
6664 0 : int regs_used = 0;
6665 0 : int bad_for_class = 0;
6666 0 : int max_regno = regno + rld[r].nregs;
6667 :
6668 0 : for (i = regno; i < max_regno; i++)
6669 : {
6670 0 : regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6671 : i);
6672 0 : bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6673 : i);
6674 : }
6675 :
6676 0 : if ((regs_used
6677 0 : && ! free_for_value_p (regno, rld[r].mode,
6678 : rld[r].opnum, rld[r].when_needed,
6679 : rld[r].in, rld[r].out, r, 1))
6680 0 : || bad_for_class)
6681 : equiv = 0;
6682 : }
6683 :
6684 0 : if (equiv != 0
6685 0 : && !targetm.hard_regno_mode_ok (regno, rld[r].mode))
6686 : equiv = 0;
6687 :
6688 : /* We found a register that contains the value we need.
6689 : If this register is the same as an `earlyclobber' operand
6690 : of the current insn, just mark it as a place to reload from
6691 : since we can't use it as the reload register itself. */
6692 :
6693 0 : if (equiv != 0)
6694 0 : for (i = 0; i < n_earlyclobbers; i++)
6695 0 : if (reg_overlap_mentioned_for_reload_p (equiv,
6696 : reload_earlyclobbers[i]))
6697 : {
6698 0 : if (! rld[r].optional)
6699 0 : reload_override_in[r] = equiv;
6700 : equiv = 0;
6701 : break;
6702 : }
6703 :
6704 : /* If the equiv register we have found is explicitly clobbered
6705 : in the current insn, it depends on the reload type if we
6706 : can use it, use it for reload_override_in, or not at all.
6707 : In particular, we then can't use EQUIV for a
6708 : RELOAD_FOR_OUTPUT_ADDRESS reload. */
6709 :
6710 0 : if (equiv != 0)
6711 : {
6712 0 : if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6713 0 : switch (rld[r].when_needed)
6714 : {
6715 : case RELOAD_FOR_OTHER_ADDRESS:
6716 : case RELOAD_FOR_INPADDR_ADDRESS:
6717 : case RELOAD_FOR_INPUT_ADDRESS:
6718 : case RELOAD_FOR_OPADDR_ADDR:
6719 : break;
6720 0 : case RELOAD_OTHER:
6721 0 : case RELOAD_FOR_INPUT:
6722 0 : case RELOAD_FOR_OPERAND_ADDRESS:
6723 0 : if (! rld[r].optional)
6724 0 : reload_override_in[r] = equiv;
6725 : /* Fall through. */
6726 : default:
6727 : equiv = 0;
6728 : break;
6729 : }
6730 0 : else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6731 0 : switch (rld[r].when_needed)
6732 : {
6733 : case RELOAD_FOR_OTHER_ADDRESS:
6734 : case RELOAD_FOR_INPADDR_ADDRESS:
6735 : case RELOAD_FOR_INPUT_ADDRESS:
6736 : case RELOAD_FOR_OPADDR_ADDR:
6737 : case RELOAD_FOR_OPERAND_ADDRESS:
6738 : case RELOAD_FOR_INPUT:
6739 : break;
6740 0 : case RELOAD_OTHER:
6741 0 : if (! rld[r].optional)
6742 0 : reload_override_in[r] = equiv;
6743 : /* Fall through. */
6744 : default:
6745 : equiv = 0;
6746 : break;
6747 : }
6748 : }
6749 :
6750 : /* If we found an equivalent reg, say no code need be generated
6751 : to load it, and use it as our reload reg. */
6752 0 : if (equiv != 0
6753 0 : && (regno != HARD_FRAME_POINTER_REGNUM
6754 0 : || !frame_pointer_needed))
6755 : {
6756 0 : int nr = hard_regno_nregs (regno, rld[r].mode);
6757 0 : int k;
6758 0 : rld[r].reg_rtx = equiv;
6759 0 : reload_spill_index[r] = regno;
6760 0 : reload_inherited[r] = 1;
6761 :
6762 : /* If reg_reloaded_valid is not set for this register,
6763 : there might be a stale spill_reg_store lying around.
6764 : We must clear it, since otherwise emit_reload_insns
6765 : might delete the store. */
6766 0 : if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6767 0 : spill_reg_store[regno] = NULL;
6768 : /* If any of the hard registers in EQUIV are spill
6769 : registers, mark them as in use for this insn. */
6770 0 : for (k = 0; k < nr; k++)
6771 : {
6772 0 : i = spill_reg_order[regno + k];
6773 0 : if (i >= 0)
6774 : {
6775 0 : mark_reload_reg_in_use (regno, rld[r].opnum,
6776 : rld[r].when_needed,
6777 : rld[r].mode);
6778 0 : SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6779 : regno + k);
6780 : }
6781 : }
6782 : }
6783 : }
6784 :
6785 : /* If we found a register to use already, or if this is an optional
6786 : reload, we are done. */
6787 0 : if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6788 0 : continue;
6789 :
6790 : #if 0
6791 : /* No longer needed for correct operation. Might or might
6792 : not give better code on the average. Want to experiment? */
6793 :
6794 : /* See if there is a later reload that has a class different from our
6795 : class that intersects our class or that requires less register
6796 : than our reload. If so, we must allocate a register to this
6797 : reload now, since that reload might inherit a previous reload
6798 : and take the only available register in our class. Don't do this
6799 : for optional reloads since they will force all previous reloads
6800 : to be allocated. Also don't do this for reloads that have been
6801 : turned off. */
6802 :
6803 : for (i = j + 1; i < n_reloads; i++)
6804 : {
6805 : int s = reload_order[i];
6806 :
6807 : if ((rld[s].in == 0 && rld[s].out == 0
6808 : && ! rld[s].secondary_p)
6809 : || rld[s].optional)
6810 : continue;
6811 :
6812 : if ((rld[s].rclass != rld[r].rclass
6813 : && reg_classes_intersect_p (rld[r].rclass,
6814 : rld[s].rclass))
6815 : || rld[s].nregs < rld[r].nregs)
6816 : break;
6817 : }
6818 :
6819 : if (i == n_reloads)
6820 : continue;
6821 :
6822 : allocate_reload_reg (chain, r, j == n_reloads - 1);
6823 : #endif
6824 : }
6825 :
6826 : /* Now allocate reload registers for anything non-optional that
6827 : didn't get one yet. */
6828 0 : for (j = 0; j < n_reloads; j++)
6829 : {
6830 0 : int r = reload_order[j];
6831 :
6832 : /* Ignore reloads that got marked inoperative. */
6833 0 : if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6834 0 : continue;
6835 :
6836 : /* Skip reloads that already have a register allocated or are
6837 : optional. */
6838 0 : if (rld[r].reg_rtx != 0 || rld[r].optional)
6839 0 : continue;
6840 :
6841 0 : if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6842 : break;
6843 : }
6844 :
6845 : /* If that loop got all the way, we have won. */
6846 0 : if (j == n_reloads)
6847 : {
6848 : win = 1;
6849 : break;
6850 : }
6851 :
6852 : /* Loop around and try without any inheritance. */
6853 : }
6854 :
6855 0 : if (! win)
6856 : {
6857 : /* First undo everything done by the failed attempt
6858 : to allocate with inheritance. */
6859 0 : choose_reload_regs_init (chain, save_reload_reg_rtx);
6860 :
6861 : /* Some sanity tests to verify that the reloads found in the first
6862 : pass are identical to the ones we have now. */
6863 0 : gcc_assert (chain->n_reloads == n_reloads);
6864 :
6865 0 : for (i = 0; i < n_reloads; i++)
6866 : {
6867 0 : if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6868 0 : continue;
6869 0 : gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6870 0 : for (j = 0; j < n_spills; j++)
6871 0 : if (spill_regs[j] == chain->rld[i].regno)
6872 0 : if (! set_reload_reg (j, i))
6873 0 : failed_reload (chain->insn, i);
6874 : }
6875 : }
6876 :
6877 : /* If we thought we could inherit a reload, because it seemed that
6878 : nothing else wanted the same reload register earlier in the insn,
6879 : verify that assumption, now that all reloads have been assigned.
6880 : Likewise for reloads where reload_override_in has been set. */
6881 :
6882 : /* If doing expensive optimizations, do one preliminary pass that doesn't
6883 : cancel any inheritance, but removes reloads that have been needed only
6884 : for reloads that we know can be inherited. */
6885 0 : for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6886 : {
6887 0 : for (j = 0; j < n_reloads; j++)
6888 : {
6889 0 : int r = reload_order[j];
6890 0 : rtx check_reg;
6891 0 : rtx tem;
6892 0 : if (reload_inherited[r] && rld[r].reg_rtx)
6893 : check_reg = rld[r].reg_rtx;
6894 0 : else if (reload_override_in[r]
6895 0 : && (REG_P (reload_override_in[r])
6896 0 : || GET_CODE (reload_override_in[r]) == SUBREG))
6897 : check_reg = reload_override_in[r];
6898 : else
6899 0 : continue;
6900 0 : if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6901 : rld[r].opnum, rld[r].when_needed, rld[r].in,
6902 : (reload_inherited[r]
6903 : ? rld[r].out : const0_rtx),
6904 : r, 1))
6905 : {
6906 0 : if (pass)
6907 0 : continue;
6908 0 : reload_inherited[r] = 0;
6909 0 : reload_override_in[r] = 0;
6910 : }
6911 : /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6912 : reload_override_in, then we do not need its related
6913 : RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6914 : likewise for other reload types.
6915 : We handle this by removing a reload when its only replacement
6916 : is mentioned in reload_in of the reload we are going to inherit.
6917 : A special case are auto_inc expressions; even if the input is
6918 : inherited, we still need the address for the output. We can
6919 : recognize them because they have RELOAD_OUT set to RELOAD_IN.
6920 : If we succeeded removing some reload and we are doing a preliminary
6921 : pass just to remove such reloads, make another pass, since the
6922 : removal of one reload might allow us to inherit another one. */
6923 0 : else if (rld[r].in
6924 0 : && rld[r].out != rld[r].in
6925 0 : && remove_address_replacements (rld[r].in))
6926 : {
6927 0 : if (pass)
6928 0 : pass = 2;
6929 : }
6930 : /* If we needed a memory location for the reload, we also have to
6931 : remove its related reloads. */
6932 0 : else if (rld[r].in
6933 0 : && rld[r].out != rld[r].in
6934 0 : && (tem = replaced_subreg (rld[r].in), REG_P (tem))
6935 0 : && REGNO (tem) < FIRST_PSEUDO_REGISTER
6936 0 : && (targetm.secondary_memory_needed
6937 0 : (rld[r].inmode, REGNO_REG_CLASS (REGNO (tem)),
6938 0 : rld[r].rclass))
6939 0 : && remove_address_replacements
6940 0 : (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
6941 : rld[r].when_needed)))
6942 : {
6943 0 : if (pass)
6944 0 : pass = 2;
6945 : }
6946 : }
6947 : }
6948 :
6949 : /* Now that reload_override_in is known valid,
6950 : actually override reload_in. */
6951 0 : for (j = 0; j < n_reloads; j++)
6952 0 : if (reload_override_in[j])
6953 0 : rld[j].in = reload_override_in[j];
6954 :
6955 : /* If this reload won't be done because it has been canceled or is
6956 : optional and not inherited, clear reload_reg_rtx so other
6957 : routines (such as subst_reloads) don't get confused. */
6958 0 : for (j = 0; j < n_reloads; j++)
6959 0 : if (rld[j].reg_rtx != 0
6960 0 : && ((rld[j].optional && ! reload_inherited[j])
6961 0 : || (rld[j].in == 0 && rld[j].out == 0
6962 0 : && ! rld[j].secondary_p)))
6963 : {
6964 0 : int regno = true_regnum (rld[j].reg_rtx);
6965 :
6966 0 : if (spill_reg_order[regno] >= 0)
6967 0 : clear_reload_reg_in_use (regno, rld[j].opnum,
6968 : rld[j].when_needed, rld[j].mode);
6969 0 : rld[j].reg_rtx = 0;
6970 0 : reload_spill_index[j] = -1;
6971 : }
6972 :
6973 : /* Record which pseudos and which spill regs have output reloads. */
6974 0 : for (j = 0; j < n_reloads; j++)
6975 : {
6976 0 : int r = reload_order[j];
6977 :
6978 0 : i = reload_spill_index[r];
6979 :
6980 : /* I is nonneg if this reload uses a register.
6981 : If rld[r].reg_rtx is 0, this is an optional reload
6982 : that we opted to ignore. */
6983 0 : if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6984 0 : && rld[r].reg_rtx != 0)
6985 : {
6986 0 : int nregno = REGNO (rld[r].out_reg);
6987 0 : int nr = 1;
6988 :
6989 0 : if (nregno < FIRST_PSEUDO_REGISTER)
6990 0 : nr = hard_regno_nregs (nregno, rld[r].mode);
6991 :
6992 0 : while (--nr >= 0)
6993 0 : SET_REGNO_REG_SET (®_has_output_reload,
6994 : nregno + nr);
6995 :
6996 0 : if (i >= 0)
6997 0 : add_to_hard_reg_set (®_is_output_reload, rld[r].mode, i);
6998 :
6999 0 : gcc_assert (rld[r].when_needed == RELOAD_OTHER
7000 : || rld[r].when_needed == RELOAD_FOR_OUTPUT
7001 : || rld[r].when_needed == RELOAD_FOR_INSN);
7002 : }
7003 : }
7004 0 : }
7005 :
7006 : /* Deallocate the reload register for reload R. This is called from
7007 : remove_address_replacements. */
7008 :
7009 : void
7010 0 : deallocate_reload_reg (int r)
7011 : {
7012 0 : int regno;
7013 :
7014 0 : if (! rld[r].reg_rtx)
7015 : return;
7016 0 : regno = true_regnum (rld[r].reg_rtx);
7017 0 : rld[r].reg_rtx = 0;
7018 0 : if (spill_reg_order[regno] >= 0)
7019 0 : clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7020 : rld[r].mode);
7021 0 : reload_spill_index[r] = -1;
7022 : }
7023 :
7024 : /* These arrays are filled by emit_reload_insns and its subroutines. */
7025 : static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7026 : static rtx_insn *other_input_address_reload_insns = 0;
7027 : static rtx_insn *other_input_reload_insns = 0;
7028 : static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7029 : static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7030 : static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7031 : static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7032 : static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7033 : static rtx_insn *operand_reload_insns = 0;
7034 : static rtx_insn *other_operand_reload_insns = 0;
7035 : static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7036 :
7037 : /* Values to be put in spill_reg_store are put here first. Instructions
7038 : must only be placed here if the associated reload register reaches
7039 : the end of the instruction's reload sequence. */
7040 : static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7041 : static HARD_REG_SET reg_reloaded_died;
7042 :
7043 : /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7044 : of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7045 : is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7046 : adjusted register, and return true. Otherwise, return false. */
7047 : static bool
7048 0 : reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7049 : enum reg_class new_class,
7050 : machine_mode new_mode)
7051 :
7052 : {
7053 0 : rtx reg;
7054 :
7055 0 : for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7056 : {
7057 0 : unsigned regno = REGNO (reg);
7058 :
7059 0 : if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7060 0 : continue;
7061 0 : if (GET_MODE (reg) != new_mode)
7062 : {
7063 0 : if (!targetm.hard_regno_mode_ok (regno, new_mode))
7064 0 : continue;
7065 0 : if (hard_regno_nregs (regno, new_mode) > REG_NREGS (reg))
7066 0 : continue;
7067 0 : reg = reload_adjust_reg_for_mode (reg, new_mode);
7068 : }
7069 0 : *reload_reg = reg;
7070 0 : return true;
7071 : }
7072 : return false;
7073 : }
7074 :
7075 : /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7076 : pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7077 : nonzero, if that is suitable. On success, change *RELOAD_REG to the
7078 : adjusted register, and return true. Otherwise, return false. */
7079 : static bool
7080 0 : reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7081 : enum insn_code icode)
7082 :
7083 : {
7084 0 : enum reg_class new_class = scratch_reload_class (icode);
7085 0 : machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7086 :
7087 0 : return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7088 0 : new_class, new_mode);
7089 : }
7090 :
7091 : /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7092 : has the number J. OLD contains the value to be used as input. */
7093 :
7094 : static void
7095 0 : emit_input_reload_insns (class insn_chain *chain, struct reload *rl,
7096 : rtx old, int j)
7097 : {
7098 0 : rtx_insn *insn = chain->insn;
7099 0 : rtx reloadreg;
7100 0 : rtx oldequiv_reg = 0;
7101 0 : rtx oldequiv = 0;
7102 0 : int special = 0;
7103 0 : machine_mode mode;
7104 0 : rtx_insn **where;
7105 :
7106 : /* delete_output_reload is only invoked properly if old contains
7107 : the original pseudo register. Since this is replaced with a
7108 : hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7109 : find the pseudo in RELOAD_IN_REG. This is also used to
7110 : determine whether a secondary reload is needed. */
7111 0 : if (reload_override_in[j]
7112 0 : && (REG_P (rl->in_reg)
7113 0 : || (GET_CODE (rl->in_reg) == SUBREG
7114 0 : && REG_P (SUBREG_REG (rl->in_reg)))))
7115 : {
7116 0 : oldequiv = old;
7117 0 : old = rl->in_reg;
7118 : }
7119 0 : if (oldequiv == 0)
7120 : oldequiv = old;
7121 0 : else if (REG_P (oldequiv))
7122 : oldequiv_reg = oldequiv;
7123 0 : else if (GET_CODE (oldequiv) == SUBREG)
7124 0 : oldequiv_reg = SUBREG_REG (oldequiv);
7125 :
7126 0 : reloadreg = reload_reg_rtx_for_input[j];
7127 0 : mode = GET_MODE (reloadreg);
7128 :
7129 : /* If we are reloading from a register that was recently stored in
7130 : with an output-reload, see if we can prove there was
7131 : actually no need to store the old value in it. */
7132 :
7133 0 : if (optimize && REG_P (oldequiv)
7134 0 : && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7135 0 : && spill_reg_store[REGNO (oldequiv)]
7136 0 : && REG_P (old)
7137 0 : && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7138 0 : || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7139 0 : rl->out_reg)))
7140 0 : delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7141 :
7142 : /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7143 : OLDEQUIV. */
7144 :
7145 0 : while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7146 0 : oldequiv = SUBREG_REG (oldequiv);
7147 0 : if (GET_MODE (oldequiv) != VOIDmode
7148 0 : && mode != GET_MODE (oldequiv))
7149 0 : oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7150 :
7151 : /* Switch to the right place to emit the reload insns. */
7152 0 : switch (rl->when_needed)
7153 : {
7154 : case RELOAD_OTHER:
7155 0 : where = &other_input_reload_insns;
7156 : break;
7157 0 : case RELOAD_FOR_INPUT:
7158 0 : where = &input_reload_insns[rl->opnum];
7159 0 : break;
7160 0 : case RELOAD_FOR_INPUT_ADDRESS:
7161 0 : where = &input_address_reload_insns[rl->opnum];
7162 0 : break;
7163 0 : case RELOAD_FOR_INPADDR_ADDRESS:
7164 0 : where = &inpaddr_address_reload_insns[rl->opnum];
7165 0 : break;
7166 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
7167 0 : where = &output_address_reload_insns[rl->opnum];
7168 0 : break;
7169 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
7170 0 : where = &outaddr_address_reload_insns[rl->opnum];
7171 0 : break;
7172 0 : case RELOAD_FOR_OPERAND_ADDRESS:
7173 0 : where = &operand_reload_insns;
7174 0 : break;
7175 0 : case RELOAD_FOR_OPADDR_ADDR:
7176 0 : where = &other_operand_reload_insns;
7177 0 : break;
7178 0 : case RELOAD_FOR_OTHER_ADDRESS:
7179 0 : where = &other_input_address_reload_insns;
7180 0 : break;
7181 0 : default:
7182 0 : gcc_unreachable ();
7183 : }
7184 :
7185 0 : push_to_sequence (*where);
7186 :
7187 : /* Auto-increment addresses must be reloaded in a special way. */
7188 0 : if (rl->out && ! rl->out_reg)
7189 : {
7190 : /* We are not going to bother supporting the case where a
7191 : incremented register can't be copied directly from
7192 : OLDEQUIV since this seems highly unlikely. */
7193 0 : gcc_assert (rl->secondary_in_reload < 0);
7194 :
7195 0 : if (reload_inherited[j])
7196 0 : oldequiv = reloadreg;
7197 :
7198 0 : old = XEXP (rl->in_reg, 0);
7199 :
7200 : /* Prevent normal processing of this reload. */
7201 0 : special = 1;
7202 : /* Output a special code sequence for this case. */
7203 0 : inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7204 : }
7205 :
7206 : /* If we are reloading a pseudo-register that was set by the previous
7207 : insn, see if we can get rid of that pseudo-register entirely
7208 : by redirecting the previous insn into our reload register. */
7209 :
7210 0 : else if (optimize && REG_P (old)
7211 0 : && REGNO (old) >= FIRST_PSEUDO_REGISTER
7212 0 : && dead_or_set_p (insn, old)
7213 : /* This is unsafe if some other reload
7214 : uses the same reg first. */
7215 0 : && ! conflicts_with_override (reloadreg)
7216 0 : && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7217 : rl->when_needed, old, rl->out, j, 0))
7218 : {
7219 0 : rtx_insn *temp = PREV_INSN (insn);
7220 0 : while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7221 0 : temp = PREV_INSN (temp);
7222 0 : if (temp
7223 0 : && NONJUMP_INSN_P (temp)
7224 0 : && GET_CODE (PATTERN (temp)) == SET
7225 0 : && SET_DEST (PATTERN (temp)) == old
7226 : /* Make sure we can access insn_operand_constraint. */
7227 0 : && asm_noperands (PATTERN (temp)) < 0
7228 : /* This is unsafe if operand occurs more than once in current
7229 : insn. Perhaps some occurrences aren't reloaded. */
7230 0 : && count_occurrences (PATTERN (insn), old, 0) == 1)
7231 : {
7232 0 : rtx old = SET_DEST (PATTERN (temp));
7233 : /* Store into the reload register instead of the pseudo. */
7234 0 : SET_DEST (PATTERN (temp)) = reloadreg;
7235 :
7236 : /* Verify that resulting insn is valid.
7237 :
7238 : Note that we have replaced the destination of TEMP with
7239 : RELOADREG. If TEMP references RELOADREG within an
7240 : autoincrement addressing mode, then the resulting insn
7241 : is ill-formed and we must reject this optimization. */
7242 0 : extract_insn (temp);
7243 0 : if (constrain_operands (1, get_enabled_alternatives (temp))
7244 0 : && (!AUTO_INC_DEC || ! find_reg_note (temp, REG_INC, reloadreg)))
7245 : {
7246 : /* If the previous insn is an output reload, the source is
7247 : a reload register, and its spill_reg_store entry will
7248 : contain the previous destination. This is now
7249 : invalid. */
7250 0 : if (REG_P (SET_SRC (PATTERN (temp)))
7251 0 : && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7252 : {
7253 0 : spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7254 0 : spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7255 : }
7256 :
7257 : /* If these are the only uses of the pseudo reg,
7258 : pretend for GDB it lives in the reload reg we used. */
7259 0 : if (REG_N_DEATHS (REGNO (old)) == 1
7260 0 : && REG_N_SETS (REGNO (old)) == 1)
7261 : {
7262 0 : reg_renumber[REGNO (old)] = REGNO (reloadreg);
7263 0 : if (ira_conflicts_p)
7264 : /* Inform IRA about the change. */
7265 0 : ira_mark_allocation_change (REGNO (old));
7266 0 : alter_reg (REGNO (old), -1, false);
7267 : }
7268 0 : special = 1;
7269 :
7270 : /* Adjust any debug insns between temp and insn. */
7271 0 : while ((temp = NEXT_INSN (temp)) != insn)
7272 0 : if (DEBUG_BIND_INSN_P (temp))
7273 0 : INSN_VAR_LOCATION_LOC (temp)
7274 0 : = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp),
7275 : old, reloadreg);
7276 : else
7277 0 : gcc_assert (DEBUG_INSN_P (temp) || NOTE_P (temp));
7278 : }
7279 : else
7280 : {
7281 0 : SET_DEST (PATTERN (temp)) = old;
7282 : }
7283 : }
7284 : }
7285 :
7286 : /* We can't do that, so output an insn to load RELOADREG. */
7287 :
7288 : /* If we have a secondary reload, pick up the secondary register
7289 : and icode, if any. If OLDEQUIV and OLD are different or
7290 : if this is an in-out reload, recompute whether or not we
7291 : still need a secondary register and what the icode should
7292 : be. If we still need a secondary register and the class or
7293 : icode is different, go back to reloading from OLD if using
7294 : OLDEQUIV means that we got the wrong type of register. We
7295 : cannot have different class or icode due to an in-out reload
7296 : because we don't make such reloads when both the input and
7297 : output need secondary reload registers. */
7298 :
7299 0 : if (! special && rl->secondary_in_reload >= 0)
7300 : {
7301 0 : rtx second_reload_reg = 0;
7302 0 : rtx third_reload_reg = 0;
7303 0 : int secondary_reload = rl->secondary_in_reload;
7304 0 : rtx real_oldequiv = oldequiv;
7305 0 : rtx real_old = old;
7306 0 : rtx tmp;
7307 0 : enum insn_code icode;
7308 0 : enum insn_code tertiary_icode = CODE_FOR_nothing;
7309 :
7310 : /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7311 : and similarly for OLD.
7312 : See comments in get_secondary_reload in reload.cc. */
7313 : /* If it is a pseudo that cannot be replaced with its
7314 : equivalent MEM, we must fall back to reload_in, which
7315 : will have all the necessary substitutions registered.
7316 : Likewise for a pseudo that can't be replaced with its
7317 : equivalent constant.
7318 :
7319 : Take extra care for subregs of such pseudos. Note that
7320 : we cannot use reg_equiv_mem in this case because it is
7321 : not in the right mode. */
7322 :
7323 0 : tmp = oldequiv;
7324 0 : if (GET_CODE (tmp) == SUBREG)
7325 0 : tmp = SUBREG_REG (tmp);
7326 0 : if (REG_P (tmp)
7327 0 : && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7328 0 : && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7329 0 : || reg_equiv_constant (REGNO (tmp)) != 0))
7330 : {
7331 0 : if (! reg_equiv_mem (REGNO (tmp))
7332 0 : || num_not_at_initial_offset
7333 0 : || GET_CODE (oldequiv) == SUBREG)
7334 0 : real_oldequiv = rl->in;
7335 : else
7336 : real_oldequiv = reg_equiv_mem (REGNO (tmp));
7337 : }
7338 :
7339 0 : tmp = old;
7340 0 : if (GET_CODE (tmp) == SUBREG)
7341 0 : tmp = SUBREG_REG (tmp);
7342 0 : if (REG_P (tmp)
7343 0 : && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7344 0 : && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7345 0 : || reg_equiv_constant (REGNO (tmp)) != 0))
7346 : {
7347 0 : if (! reg_equiv_mem (REGNO (tmp))
7348 0 : || num_not_at_initial_offset
7349 0 : || GET_CODE (old) == SUBREG)
7350 0 : real_old = rl->in;
7351 : else
7352 : real_old = reg_equiv_mem (REGNO (tmp));
7353 : }
7354 :
7355 0 : second_reload_reg = rld[secondary_reload].reg_rtx;
7356 0 : if (rld[secondary_reload].secondary_in_reload >= 0)
7357 : {
7358 0 : int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7359 :
7360 0 : third_reload_reg = rld[tertiary_reload].reg_rtx;
7361 0 : tertiary_icode = rld[secondary_reload].secondary_in_icode;
7362 : /* We'd have to add more code for quartary reloads. */
7363 0 : gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7364 : }
7365 0 : icode = rl->secondary_in_icode;
7366 :
7367 0 : if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7368 0 : || (rl->in != 0 && rl->out != 0))
7369 : {
7370 0 : secondary_reload_info sri, sri2;
7371 0 : enum reg_class new_class, new_t_class;
7372 :
7373 0 : sri.icode = CODE_FOR_nothing;
7374 0 : sri.prev_sri = NULL;
7375 0 : new_class
7376 0 : = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7377 0 : rl->rclass, mode,
7378 : &sri);
7379 :
7380 0 : if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7381 0 : second_reload_reg = 0;
7382 0 : else if (new_class == NO_REGS)
7383 : {
7384 0 : if (reload_adjust_reg_for_icode (&second_reload_reg,
7385 : third_reload_reg,
7386 : (enum insn_code) sri.icode))
7387 : {
7388 0 : icode = (enum insn_code) sri.icode;
7389 0 : third_reload_reg = 0;
7390 : }
7391 : else
7392 : {
7393 : oldequiv = old;
7394 : real_oldequiv = real_old;
7395 : }
7396 : }
7397 0 : else if (sri.icode != CODE_FOR_nothing)
7398 : /* We currently lack a way to express this in reloads. */
7399 0 : gcc_unreachable ();
7400 : else
7401 : {
7402 0 : sri2.icode = CODE_FOR_nothing;
7403 0 : sri2.prev_sri = &sri;
7404 0 : new_t_class
7405 0 : = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7406 : new_class, mode,
7407 : &sri);
7408 0 : if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7409 : {
7410 0 : if (reload_adjust_reg_for_temp (&second_reload_reg,
7411 : third_reload_reg,
7412 : new_class, mode))
7413 : {
7414 0 : third_reload_reg = 0;
7415 0 : tertiary_icode = (enum insn_code) sri2.icode;
7416 : }
7417 : else
7418 : {
7419 : oldequiv = old;
7420 : real_oldequiv = real_old;
7421 : }
7422 : }
7423 : else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7424 : {
7425 : rtx intermediate = second_reload_reg;
7426 :
7427 : if (reload_adjust_reg_for_temp (&intermediate, NULL,
7428 : new_class, mode)
7429 : && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7430 : ((enum insn_code)
7431 : sri2.icode)))
7432 : {
7433 : second_reload_reg = intermediate;
7434 : tertiary_icode = (enum insn_code) sri2.icode;
7435 : }
7436 : else
7437 : {
7438 : oldequiv = old;
7439 : real_oldequiv = real_old;
7440 : }
7441 : }
7442 : else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7443 : {
7444 0 : rtx intermediate = second_reload_reg;
7445 :
7446 0 : if (reload_adjust_reg_for_temp (&intermediate, NULL,
7447 : new_class, mode)
7448 0 : && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7449 : new_t_class, mode))
7450 : {
7451 0 : second_reload_reg = intermediate;
7452 0 : tertiary_icode = (enum insn_code) sri2.icode;
7453 : }
7454 : else
7455 : {
7456 : oldequiv = old;
7457 : real_oldequiv = real_old;
7458 : }
7459 0 : }
7460 : else
7461 : {
7462 : /* This could be handled more intelligently too. */
7463 : oldequiv = old;
7464 : real_oldequiv = real_old;
7465 : }
7466 : }
7467 : }
7468 :
7469 : /* If we still need a secondary reload register, check
7470 : to see if it is being used as a scratch or intermediate
7471 : register and generate code appropriately. If we need
7472 : a scratch register, use REAL_OLDEQUIV since the form of
7473 : the insn may depend on the actual address if it is
7474 : a MEM. */
7475 :
7476 0 : if (second_reload_reg)
7477 : {
7478 0 : if (icode != CODE_FOR_nothing)
7479 : {
7480 : /* We'd have to add extra code to handle this case. */
7481 0 : gcc_assert (!third_reload_reg);
7482 :
7483 0 : emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7484 : second_reload_reg));
7485 0 : special = 1;
7486 : }
7487 : else
7488 : {
7489 : /* See if we need a scratch register to load the
7490 : intermediate register (a tertiary reload). */
7491 0 : if (tertiary_icode != CODE_FOR_nothing)
7492 : {
7493 0 : emit_insn ((GEN_FCN (tertiary_icode)
7494 0 : (second_reload_reg, real_oldequiv,
7495 : third_reload_reg)));
7496 : }
7497 0 : else if (third_reload_reg)
7498 : {
7499 0 : gen_reload (third_reload_reg, real_oldequiv,
7500 : rl->opnum,
7501 : rl->when_needed);
7502 0 : gen_reload (second_reload_reg, third_reload_reg,
7503 : rl->opnum,
7504 : rl->when_needed);
7505 : }
7506 : else
7507 0 : gen_reload (second_reload_reg, real_oldequiv,
7508 : rl->opnum,
7509 : rl->when_needed);
7510 :
7511 : oldequiv = second_reload_reg;
7512 : }
7513 : }
7514 : }
7515 :
7516 0 : if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7517 : {
7518 0 : rtx real_oldequiv = oldequiv;
7519 :
7520 0 : if ((REG_P (oldequiv)
7521 0 : && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7522 0 : && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7523 0 : || reg_equiv_constant (REGNO (oldequiv)) != 0))
7524 0 : || (GET_CODE (oldequiv) == SUBREG
7525 0 : && REG_P (SUBREG_REG (oldequiv))
7526 0 : && (REGNO (SUBREG_REG (oldequiv))
7527 : >= FIRST_PSEUDO_REGISTER)
7528 0 : && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7529 0 : || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7530 0 : || (CONSTANT_P (oldequiv)
7531 0 : && (targetm.preferred_reload_class (oldequiv,
7532 0 : REGNO_REG_CLASS (REGNO (reloadreg)))
7533 : == NO_REGS)))
7534 0 : real_oldequiv = rl->in;
7535 0 : gen_reload (reloadreg, real_oldequiv, rl->opnum,
7536 : rl->when_needed);
7537 : }
7538 :
7539 0 : if (cfun->can_throw_non_call_exceptions)
7540 0 : copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7541 :
7542 : /* End this sequence. */
7543 0 : *where = end_sequence ();
7544 :
7545 : /* Update reload_override_in so that delete_address_reloads_1
7546 : can see the actual register usage. */
7547 0 : if (oldequiv_reg)
7548 0 : reload_override_in[j] = oldequiv;
7549 0 : }
7550 :
7551 : /* Generate insns to for the output reload RL, which is for the insn described
7552 : by CHAIN and has the number J. */
7553 : static void
7554 0 : emit_output_reload_insns (class insn_chain *chain, struct reload *rl,
7555 : int j)
7556 : {
7557 0 : rtx reloadreg;
7558 0 : rtx_insn *insn = chain->insn;
7559 0 : int special = 0;
7560 0 : rtx old = rl->out;
7561 0 : machine_mode mode;
7562 0 : rtx_insn *p;
7563 0 : rtx rl_reg_rtx;
7564 :
7565 0 : if (rl->when_needed == RELOAD_OTHER)
7566 0 : start_sequence ();
7567 : else
7568 0 : push_to_sequence (output_reload_insns[rl->opnum]);
7569 :
7570 0 : rl_reg_rtx = reload_reg_rtx_for_output[j];
7571 0 : mode = GET_MODE (rl_reg_rtx);
7572 :
7573 0 : reloadreg = rl_reg_rtx;
7574 :
7575 : /* If we need two reload regs, set RELOADREG to the intermediate
7576 : one, since it will be stored into OLD. We might need a secondary
7577 : register only for an input reload, so check again here. */
7578 :
7579 0 : if (rl->secondary_out_reload >= 0)
7580 : {
7581 0 : rtx real_old = old;
7582 0 : int secondary_reload = rl->secondary_out_reload;
7583 0 : int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7584 :
7585 0 : if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7586 0 : && reg_equiv_mem (REGNO (old)) != 0)
7587 0 : real_old = reg_equiv_mem (REGNO (old));
7588 :
7589 0 : if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7590 : {
7591 0 : rtx second_reloadreg = reloadreg;
7592 0 : reloadreg = rld[secondary_reload].reg_rtx;
7593 :
7594 : /* See if RELOADREG is to be used as a scratch register
7595 : or as an intermediate register. */
7596 0 : if (rl->secondary_out_icode != CODE_FOR_nothing)
7597 : {
7598 : /* We'd have to add extra code to handle this case. */
7599 0 : gcc_assert (tertiary_reload < 0);
7600 :
7601 0 : emit_insn ((GEN_FCN (rl->secondary_out_icode)
7602 0 : (real_old, second_reloadreg, reloadreg)));
7603 0 : special = 1;
7604 : }
7605 : else
7606 : {
7607 : /* See if we need both a scratch and intermediate reload
7608 : register. */
7609 :
7610 0 : enum insn_code tertiary_icode
7611 : = rld[secondary_reload].secondary_out_icode;
7612 :
7613 : /* We'd have to add more code for quartary reloads. */
7614 0 : gcc_assert (tertiary_reload < 0
7615 : || rld[tertiary_reload].secondary_out_reload < 0);
7616 :
7617 0 : if (GET_MODE (reloadreg) != mode)
7618 0 : reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7619 :
7620 0 : if (tertiary_icode != CODE_FOR_nothing)
7621 : {
7622 0 : rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7623 :
7624 : /* Copy primary reload reg to secondary reload reg.
7625 : (Note that these have been swapped above, then
7626 : secondary reload reg to OLD using our insn.) */
7627 :
7628 : /* If REAL_OLD is a paradoxical SUBREG, remove it
7629 : and try to put the opposite SUBREG on
7630 : RELOADREG. */
7631 0 : strip_paradoxical_subreg (&real_old, &reloadreg);
7632 :
7633 0 : gen_reload (reloadreg, second_reloadreg,
7634 : rl->opnum, rl->when_needed);
7635 0 : emit_insn ((GEN_FCN (tertiary_icode)
7636 0 : (real_old, reloadreg, third_reloadreg)));
7637 0 : special = 1;
7638 : }
7639 :
7640 : else
7641 : {
7642 : /* Copy between the reload regs here and then to
7643 : OUT later. */
7644 :
7645 0 : gen_reload (reloadreg, second_reloadreg,
7646 : rl->opnum, rl->when_needed);
7647 0 : if (tertiary_reload >= 0)
7648 : {
7649 0 : rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7650 :
7651 0 : gen_reload (third_reloadreg, reloadreg,
7652 : rl->opnum, rl->when_needed);
7653 0 : reloadreg = third_reloadreg;
7654 : }
7655 : }
7656 : }
7657 : }
7658 : }
7659 :
7660 : /* Output the last reload insn. */
7661 0 : if (! special)
7662 : {
7663 0 : rtx set;
7664 :
7665 : /* Don't output the last reload if OLD is not the dest of
7666 : INSN and is in the src and is clobbered by INSN. */
7667 0 : if (! flag_expensive_optimizations
7668 0 : || !REG_P (old)
7669 0 : || !(set = single_set (insn))
7670 0 : || rtx_equal_p (old, SET_DEST (set))
7671 0 : || !reg_mentioned_p (old, SET_SRC (set))
7672 0 : || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7673 0 : && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7674 0 : gen_reload (old, reloadreg, rl->opnum,
7675 : rl->when_needed);
7676 : }
7677 :
7678 : /* Look at all insns we emitted, just to be safe. */
7679 0 : for (p = get_insns (); p; p = NEXT_INSN (p))
7680 0 : if (INSN_P (p))
7681 : {
7682 0 : rtx pat = PATTERN (p);
7683 :
7684 : /* If this output reload doesn't come from a spill reg,
7685 : clear any memory of reloaded copies of the pseudo reg.
7686 : If this output reload comes from a spill reg,
7687 : reg_has_output_reload will make this do nothing. */
7688 0 : note_stores (p, forget_old_reloads_1, NULL);
7689 :
7690 0 : if (reg_mentioned_p (rl_reg_rtx, pat))
7691 : {
7692 0 : rtx set = single_set (insn);
7693 0 : if (reload_spill_index[j] < 0
7694 0 : && set
7695 0 : && SET_SRC (set) == rl_reg_rtx)
7696 : {
7697 0 : int src = REGNO (SET_SRC (set));
7698 :
7699 0 : reload_spill_index[j] = src;
7700 0 : SET_HARD_REG_BIT (reg_is_output_reload, src);
7701 0 : if (find_regno_note (insn, REG_DEAD, src))
7702 0 : SET_HARD_REG_BIT (reg_reloaded_died, src);
7703 : }
7704 0 : if (HARD_REGISTER_P (rl_reg_rtx))
7705 : {
7706 0 : int s = rl->secondary_out_reload;
7707 0 : set = single_set (p);
7708 : /* If this reload copies only to the secondary reload
7709 : register, the secondary reload does the actual
7710 : store. */
7711 0 : if (s >= 0 && set == NULL_RTX)
7712 : /* We can't tell what function the secondary reload
7713 : has and where the actual store to the pseudo is
7714 : made; leave new_spill_reg_store alone. */
7715 : ;
7716 0 : else if (s >= 0
7717 0 : && SET_SRC (set) == rl_reg_rtx
7718 0 : && SET_DEST (set) == rld[s].reg_rtx)
7719 : {
7720 : /* Usually the next instruction will be the
7721 : secondary reload insn; if we can confirm
7722 : that it is, setting new_spill_reg_store to
7723 : that insn will allow an extra optimization. */
7724 0 : rtx s_reg = rld[s].reg_rtx;
7725 0 : rtx_insn *next = NEXT_INSN (p);
7726 0 : rld[s].out = rl->out;
7727 0 : rld[s].out_reg = rl->out_reg;
7728 0 : set = single_set (next);
7729 0 : if (set && SET_SRC (set) == s_reg
7730 0 : && reload_reg_rtx_reaches_end_p (s_reg, s))
7731 : {
7732 0 : SET_HARD_REG_BIT (reg_is_output_reload,
7733 : REGNO (s_reg));
7734 0 : new_spill_reg_store[REGNO (s_reg)] = next;
7735 : }
7736 : }
7737 0 : else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7738 0 : new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7739 : }
7740 : }
7741 : }
7742 :
7743 0 : if (rl->when_needed == RELOAD_OTHER)
7744 : {
7745 0 : emit_insn (other_output_reload_insns[rl->opnum]);
7746 0 : other_output_reload_insns[rl->opnum] = get_insns ();
7747 : }
7748 : else
7749 0 : output_reload_insns[rl->opnum] = get_insns ();
7750 :
7751 0 : if (cfun->can_throw_non_call_exceptions)
7752 0 : copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7753 :
7754 0 : end_sequence ();
7755 0 : }
7756 :
7757 : /* Do input reloading for reload RL, which is for the insn described by CHAIN
7758 : and has the number J. */
7759 : static void
7760 0 : do_input_reload (class insn_chain *chain, struct reload *rl, int j)
7761 : {
7762 0 : rtx_insn *insn = chain->insn;
7763 0 : rtx old = (rl->in && MEM_P (rl->in)
7764 0 : ? rl->in_reg : rl->in);
7765 0 : rtx reg_rtx = rl->reg_rtx;
7766 :
7767 0 : if (old && reg_rtx)
7768 : {
7769 0 : machine_mode mode;
7770 :
7771 : /* Determine the mode to reload in.
7772 : This is very tricky because we have three to choose from.
7773 : There is the mode the insn operand wants (rl->inmode).
7774 : There is the mode of the reload register RELOADREG.
7775 : There is the intrinsic mode of the operand, which we could find
7776 : by stripping some SUBREGs.
7777 : It turns out that RELOADREG's mode is irrelevant:
7778 : we can change that arbitrarily.
7779 :
7780 : Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7781 : then the reload reg may not support QImode moves, so use SImode.
7782 : If foo is in memory due to spilling a pseudo reg, this is safe,
7783 : because the QImode value is in the least significant part of a
7784 : slot big enough for a SImode. If foo is some other sort of
7785 : memory reference, then it is impossible to reload this case,
7786 : so previous passes had better make sure this never happens.
7787 :
7788 : Then consider a one-word union which has SImode and one of its
7789 : members is a float, being fetched as (SUBREG:SF union:SI).
7790 : We must fetch that as SFmode because we could be loading into
7791 : a float-only register. In this case OLD's mode is correct.
7792 :
7793 : Consider an immediate integer: it has VOIDmode. Here we need
7794 : to get a mode from something else.
7795 :
7796 : In some cases, there is a fourth mode, the operand's
7797 : containing mode. If the insn specifies a containing mode for
7798 : this operand, it overrides all others.
7799 :
7800 : I am not sure whether the algorithm here is always right,
7801 : but it does the right things in those cases. */
7802 :
7803 0 : mode = GET_MODE (old);
7804 0 : if (mode == VOIDmode)
7805 0 : mode = rl->inmode;
7806 :
7807 : /* We cannot use gen_lowpart_common since it can do the wrong thing
7808 : when REG_RTX has a multi-word mode. Note that REG_RTX must
7809 : always be a REG here. */
7810 0 : if (GET_MODE (reg_rtx) != mode)
7811 0 : reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7812 : }
7813 0 : reload_reg_rtx_for_input[j] = reg_rtx;
7814 :
7815 0 : if (old != 0
7816 : /* AUTO_INC reloads need to be handled even if inherited. We got an
7817 : AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7818 0 : && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7819 0 : && ! rtx_equal_p (reg_rtx, old)
7820 0 : && reg_rtx != 0)
7821 0 : emit_input_reload_insns (chain, rld + j, old, j);
7822 :
7823 : /* When inheriting a wider reload, we have a MEM in rl->in,
7824 : e.g. inheriting a SImode output reload for
7825 : (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7826 0 : if (optimize && reload_inherited[j] && rl->in
7827 0 : && MEM_P (rl->in)
7828 0 : && MEM_P (rl->in_reg)
7829 0 : && reload_spill_index[j] >= 0
7830 0 : && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7831 0 : rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7832 :
7833 : /* If we are reloading a register that was recently stored in with an
7834 : output-reload, see if we can prove there was
7835 : actually no need to store the old value in it. */
7836 :
7837 0 : if (optimize
7838 0 : && (reload_inherited[j] || reload_override_in[j])
7839 0 : && reg_rtx
7840 0 : && REG_P (reg_rtx)
7841 0 : && spill_reg_store[REGNO (reg_rtx)] != 0
7842 : #if 0
7843 : /* There doesn't seem to be any reason to restrict this to pseudos
7844 : and doing so loses in the case where we are copying from a
7845 : register of the wrong class. */
7846 : && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7847 : #endif
7848 : /* The insn might have already some references to stackslots
7849 : replaced by MEMs, while reload_out_reg still names the
7850 : original pseudo. */
7851 0 : && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7852 0 : || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7853 0 : delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7854 0 : }
7855 :
7856 : /* Do output reloading for reload RL, which is for the insn described by
7857 : CHAIN and has the number J.
7858 : ??? At some point we need to support handling output reloads of
7859 : JUMP_INSNs. */
7860 : static void
7861 0 : do_output_reload (class insn_chain *chain, struct reload *rl, int j)
7862 : {
7863 0 : rtx note, old;
7864 0 : rtx_insn *insn = chain->insn;
7865 : /* If this is an output reload that stores something that is
7866 : not loaded in this same reload, see if we can eliminate a previous
7867 : store. */
7868 0 : rtx pseudo = rl->out_reg;
7869 0 : rtx reg_rtx = rl->reg_rtx;
7870 :
7871 0 : if (rl->out && reg_rtx)
7872 : {
7873 0 : machine_mode mode;
7874 :
7875 : /* Determine the mode to reload in.
7876 : See comments above (for input reloading). */
7877 0 : mode = GET_MODE (rl->out);
7878 0 : if (mode == VOIDmode)
7879 : {
7880 : /* VOIDmode should never happen for an output. */
7881 0 : if (asm_noperands (PATTERN (insn)) < 0)
7882 : /* It's the compiler's fault. */
7883 0 : fatal_insn ("VOIDmode on an output", insn);
7884 0 : error_for_asm (insn, "output operand is constant in %<asm%>");
7885 : /* Prevent crash--use something we know is valid. */
7886 0 : mode = word_mode;
7887 0 : rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7888 : }
7889 0 : if (GET_MODE (reg_rtx) != mode)
7890 0 : reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7891 : }
7892 0 : reload_reg_rtx_for_output[j] = reg_rtx;
7893 :
7894 0 : if (pseudo
7895 0 : && optimize
7896 0 : && REG_P (pseudo)
7897 0 : && ! rtx_equal_p (rl->in_reg, pseudo)
7898 0 : && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7899 0 : && reg_last_reload_reg[REGNO (pseudo)])
7900 : {
7901 0 : int pseudo_no = REGNO (pseudo);
7902 0 : int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7903 :
7904 : /* We don't need to test full validity of last_regno for
7905 : inherit here; we only want to know if the store actually
7906 : matches the pseudo. */
7907 0 : if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7908 0 : && reg_reloaded_contents[last_regno] == pseudo_no
7909 0 : && spill_reg_store[last_regno]
7910 0 : && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7911 0 : delete_output_reload (insn, j, last_regno, reg_rtx);
7912 : }
7913 :
7914 0 : old = rl->out_reg;
7915 0 : if (old == 0
7916 0 : || reg_rtx == 0
7917 0 : || rtx_equal_p (old, reg_rtx))
7918 0 : return;
7919 :
7920 : /* An output operand that dies right away does need a reload,
7921 : but need not be copied from it. Show the new location in the
7922 : REG_UNUSED note. */
7923 0 : if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7924 0 : && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7925 : {
7926 0 : XEXP (note, 0) = reg_rtx;
7927 0 : return;
7928 : }
7929 : /* Likewise for a SUBREG of an operand that dies. */
7930 0 : else if (GET_CODE (old) == SUBREG
7931 0 : && REG_P (SUBREG_REG (old))
7932 0 : && (note = find_reg_note (insn, REG_UNUSED,
7933 : SUBREG_REG (old))) != 0)
7934 : {
7935 0 : XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7936 0 : return;
7937 : }
7938 0 : else if (GET_CODE (old) == SCRATCH)
7939 : /* If we aren't optimizing, there won't be a REG_UNUSED note,
7940 : but we don't want to make an output reload. */
7941 : return;
7942 :
7943 : /* If is a JUMP_INSN, we can't support output reloads yet. */
7944 0 : gcc_assert (NONJUMP_INSN_P (insn));
7945 :
7946 0 : emit_output_reload_insns (chain, rld + j, j);
7947 : }
7948 :
7949 : /* A reload copies values of MODE from register SRC to register DEST.
7950 : Return true if it can be treated for inheritance purposes like a
7951 : group of reloads, each one reloading a single hard register. The
7952 : caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7953 : occupy the same number of hard registers. */
7954 :
7955 : static bool
7956 0 : inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7957 : int src ATTRIBUTE_UNUSED,
7958 : machine_mode mode ATTRIBUTE_UNUSED)
7959 : {
7960 0 : return (REG_CAN_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7961 0 : && REG_CAN_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7962 : }
7963 :
7964 : /* Output insns to reload values in and out of the chosen reload regs. */
7965 :
7966 : static void
7967 0 : emit_reload_insns (class insn_chain *chain)
7968 : {
7969 0 : rtx_insn *insn = chain->insn;
7970 :
7971 0 : int j;
7972 :
7973 0 : CLEAR_HARD_REG_SET (reg_reloaded_died);
7974 :
7975 0 : for (j = 0; j < reload_n_operands; j++)
7976 0 : input_reload_insns[j] = input_address_reload_insns[j]
7977 0 : = inpaddr_address_reload_insns[j]
7978 0 : = output_reload_insns[j] = output_address_reload_insns[j]
7979 0 : = outaddr_address_reload_insns[j]
7980 0 : = other_output_reload_insns[j] = 0;
7981 0 : other_input_address_reload_insns = 0;
7982 0 : other_input_reload_insns = 0;
7983 0 : operand_reload_insns = 0;
7984 0 : other_operand_reload_insns = 0;
7985 :
7986 : /* Dump reloads into the dump file. */
7987 0 : if (dump_file)
7988 : {
7989 0 : fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7990 0 : debug_reload_to_stream (dump_file);
7991 : }
7992 :
7993 0 : for (j = 0; j < n_reloads; j++)
7994 0 : if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
7995 : {
7996 : unsigned int i;
7997 :
7998 0 : for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
7999 0 : new_spill_reg_store[i] = 0;
8000 : }
8001 :
8002 : /* Now output the instructions to copy the data into and out of the
8003 : reload registers. Do these in the order that the reloads were reported,
8004 : since reloads of base and index registers precede reloads of operands
8005 : and the operands may need the base and index registers reloaded. */
8006 :
8007 0 : for (j = 0; j < n_reloads; j++)
8008 : {
8009 0 : do_input_reload (chain, rld + j, j);
8010 0 : do_output_reload (chain, rld + j, j);
8011 : }
8012 :
8013 : /* Now write all the insns we made for reloads in the order expected by
8014 : the allocation functions. Prior to the insn being reloaded, we write
8015 : the following reloads:
8016 :
8017 : RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8018 :
8019 : RELOAD_OTHER reloads.
8020 :
8021 : For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8022 : by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8023 : RELOAD_FOR_INPUT reload for the operand.
8024 :
8025 : RELOAD_FOR_OPADDR_ADDRS reloads.
8026 :
8027 : RELOAD_FOR_OPERAND_ADDRESS reloads.
8028 :
8029 : After the insn being reloaded, we write the following:
8030 :
8031 : For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8032 : by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8033 : RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8034 : reloads for the operand. The RELOAD_OTHER output reloads are
8035 : output in descending order by reload number. */
8036 :
8037 0 : emit_insn_before (other_input_address_reload_insns, insn);
8038 0 : emit_insn_before (other_input_reload_insns, insn);
8039 :
8040 0 : for (j = 0; j < reload_n_operands; j++)
8041 : {
8042 0 : emit_insn_before (inpaddr_address_reload_insns[j], insn);
8043 0 : emit_insn_before (input_address_reload_insns[j], insn);
8044 0 : emit_insn_before (input_reload_insns[j], insn);
8045 : }
8046 :
8047 0 : emit_insn_before (other_operand_reload_insns, insn);
8048 0 : emit_insn_before (operand_reload_insns, insn);
8049 :
8050 0 : for (j = 0; j < reload_n_operands; j++)
8051 : {
8052 0 : rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8053 0 : x = emit_insn_after (output_address_reload_insns[j], x);
8054 0 : x = emit_insn_after (output_reload_insns[j], x);
8055 0 : emit_insn_after (other_output_reload_insns[j], x);
8056 : }
8057 :
8058 : /* For all the spill regs newly reloaded in this instruction,
8059 : record what they were reloaded from, so subsequent instructions
8060 : can inherit the reloads.
8061 :
8062 : Update spill_reg_store for the reloads of this insn.
8063 : Copy the elements that were updated in the loop above. */
8064 :
8065 0 : for (j = 0; j < n_reloads; j++)
8066 : {
8067 0 : int r = reload_order[j];
8068 0 : int i = reload_spill_index[r];
8069 :
8070 : /* If this is a non-inherited input reload from a pseudo, we must
8071 : clear any memory of a previous store to the same pseudo. Only do
8072 : something if there will not be an output reload for the pseudo
8073 : being reloaded. */
8074 0 : if (rld[r].in_reg != 0
8075 0 : && ! (reload_inherited[r] || reload_override_in[r]))
8076 : {
8077 0 : rtx reg = rld[r].in_reg;
8078 :
8079 0 : if (GET_CODE (reg) == SUBREG)
8080 0 : reg = SUBREG_REG (reg);
8081 :
8082 0 : if (REG_P (reg)
8083 0 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8084 0 : && !REGNO_REG_SET_P (®_has_output_reload, REGNO (reg)))
8085 : {
8086 0 : int nregno = REGNO (reg);
8087 :
8088 0 : if (reg_last_reload_reg[nregno])
8089 : {
8090 0 : int last_regno = REGNO (reg_last_reload_reg[nregno]);
8091 :
8092 0 : if (reg_reloaded_contents[last_regno] == nregno)
8093 0 : spill_reg_store[last_regno] = 0;
8094 : }
8095 : }
8096 : }
8097 :
8098 : /* I is nonneg if this reload used a register.
8099 : If rld[r].reg_rtx is 0, this is an optional reload
8100 : that we opted to ignore. */
8101 :
8102 0 : if (i >= 0 && rld[r].reg_rtx != 0)
8103 : {
8104 0 : int nr = hard_regno_nregs (i, GET_MODE (rld[r].reg_rtx));
8105 0 : int k;
8106 :
8107 : /* For a multi register reload, we need to check if all or part
8108 : of the value lives to the end. */
8109 0 : for (k = 0; k < nr; k++)
8110 0 : if (reload_reg_reaches_end_p (i + k, r))
8111 0 : CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8112 :
8113 : /* Maybe the spill reg contains a copy of reload_out. */
8114 0 : if (rld[r].out != 0
8115 0 : && (REG_P (rld[r].out)
8116 0 : || (rld[r].out_reg
8117 0 : ? REG_P (rld[r].out_reg)
8118 : /* The reload value is an auto-modification of
8119 : some kind. For PRE_INC, POST_INC, PRE_DEC
8120 : and POST_DEC, we record an equivalence
8121 : between the reload register and the operand
8122 : on the optimistic assumption that we can make
8123 : the equivalence hold. reload_as_needed must
8124 : then either make it hold or invalidate the
8125 : equivalence.
8126 :
8127 : PRE_MODIFY and POST_MODIFY addresses are reloaded
8128 : somewhat differently, and allowing them here leads
8129 : to problems. */
8130 : : (GET_CODE (rld[r].out) != POST_MODIFY
8131 0 : && GET_CODE (rld[r].out) != PRE_MODIFY))))
8132 : {
8133 0 : rtx reg;
8134 :
8135 0 : reg = reload_reg_rtx_for_output[r];
8136 0 : if (reload_reg_rtx_reaches_end_p (reg, r))
8137 : {
8138 0 : machine_mode mode = GET_MODE (reg);
8139 0 : int regno = REGNO (reg);
8140 0 : int nregs = REG_NREGS (reg);
8141 0 : rtx out = (REG_P (rld[r].out)
8142 0 : ? rld[r].out
8143 0 : : rld[r].out_reg
8144 0 : ? rld[r].out_reg
8145 0 : /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8146 0 : int out_regno = REGNO (out);
8147 0 : int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8148 0 : : hard_regno_nregs (out_regno, mode));
8149 0 : bool piecemeal;
8150 :
8151 0 : spill_reg_store[regno] = new_spill_reg_store[regno];
8152 0 : spill_reg_stored_to[regno] = out;
8153 0 : reg_last_reload_reg[out_regno] = reg;
8154 :
8155 0 : piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8156 0 : && nregs == out_nregs
8157 0 : && inherit_piecemeal_p (out_regno, regno, mode));
8158 :
8159 : /* If OUT_REGNO is a hard register, it may occupy more than
8160 : one register. If it does, say what is in the
8161 : rest of the registers assuming that both registers
8162 : agree on how many words the object takes. If not,
8163 : invalidate the subsequent registers. */
8164 :
8165 0 : if (HARD_REGISTER_NUM_P (out_regno))
8166 0 : for (k = 1; k < out_nregs; k++)
8167 0 : reg_last_reload_reg[out_regno + k]
8168 0 : = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8169 :
8170 : /* Now do the inverse operation. */
8171 0 : for (k = 0; k < nregs; k++)
8172 : {
8173 0 : CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8174 0 : reg_reloaded_contents[regno + k]
8175 0 : = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8176 0 : ? out_regno
8177 : : out_regno + k);
8178 0 : reg_reloaded_insn[regno + k] = insn;
8179 0 : SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8180 : }
8181 : }
8182 : }
8183 : /* Maybe the spill reg contains a copy of reload_in. Only do
8184 : something if there will not be an output reload for
8185 : the register being reloaded. */
8186 0 : else if (rld[r].out_reg == 0
8187 0 : && rld[r].in != 0
8188 0 : && ((REG_P (rld[r].in)
8189 0 : && !HARD_REGISTER_P (rld[r].in)
8190 0 : && !REGNO_REG_SET_P (®_has_output_reload,
8191 : REGNO (rld[r].in)))
8192 0 : || (REG_P (rld[r].in_reg)
8193 0 : && !REGNO_REG_SET_P (®_has_output_reload,
8194 : REGNO (rld[r].in_reg))))
8195 0 : && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8196 : {
8197 0 : rtx reg;
8198 :
8199 0 : reg = reload_reg_rtx_for_input[r];
8200 0 : if (reload_reg_rtx_reaches_end_p (reg, r))
8201 : {
8202 0 : machine_mode mode;
8203 0 : int regno;
8204 0 : int nregs;
8205 0 : int in_regno;
8206 0 : int in_nregs;
8207 0 : rtx in;
8208 0 : bool piecemeal;
8209 :
8210 0 : mode = GET_MODE (reg);
8211 0 : regno = REGNO (reg);
8212 0 : nregs = REG_NREGS (reg);
8213 0 : if (REG_P (rld[r].in)
8214 0 : && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8215 : in = rld[r].in;
8216 0 : else if (REG_P (rld[r].in_reg))
8217 : in = rld[r].in_reg;
8218 : else
8219 0 : in = XEXP (rld[r].in_reg, 0);
8220 0 : in_regno = REGNO (in);
8221 :
8222 0 : in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8223 0 : : hard_regno_nregs (in_regno, mode));
8224 :
8225 0 : reg_last_reload_reg[in_regno] = reg;
8226 :
8227 0 : piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8228 0 : && nregs == in_nregs
8229 0 : && inherit_piecemeal_p (regno, in_regno, mode));
8230 :
8231 0 : if (HARD_REGISTER_NUM_P (in_regno))
8232 0 : for (k = 1; k < in_nregs; k++)
8233 0 : reg_last_reload_reg[in_regno + k]
8234 0 : = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8235 :
8236 : /* Unless we inherited this reload, show we haven't
8237 : recently done a store.
8238 : Previous stores of inherited auto_inc expressions
8239 : also have to be discarded. */
8240 0 : if (! reload_inherited[r]
8241 0 : || (rld[r].out && ! rld[r].out_reg))
8242 0 : spill_reg_store[regno] = 0;
8243 :
8244 0 : for (k = 0; k < nregs; k++)
8245 : {
8246 0 : CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8247 0 : reg_reloaded_contents[regno + k]
8248 0 : = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8249 0 : ? in_regno
8250 : : in_regno + k);
8251 0 : reg_reloaded_insn[regno + k] = insn;
8252 0 : SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8253 : }
8254 : }
8255 : }
8256 : }
8257 :
8258 : /* The following if-statement was #if 0'd in 1.34 (or before...).
8259 : It's reenabled in 1.35 because supposedly nothing else
8260 : deals with this problem. */
8261 :
8262 : /* If a register gets output-reloaded from a non-spill register,
8263 : that invalidates any previous reloaded copy of it.
8264 : But forget_old_reloads_1 won't get to see it, because
8265 : it thinks only about the original insn. So invalidate it here.
8266 : Also do the same thing for RELOAD_OTHER constraints where the
8267 : output is discarded. */
8268 0 : if (i < 0
8269 0 : && ((rld[r].out != 0
8270 0 : && (REG_P (rld[r].out)
8271 0 : || (MEM_P (rld[r].out)
8272 0 : && REG_P (rld[r].out_reg))))
8273 0 : || (rld[r].out == 0 && rld[r].out_reg
8274 0 : && REG_P (rld[r].out_reg))))
8275 : {
8276 0 : rtx out = ((rld[r].out && REG_P (rld[r].out))
8277 0 : ? rld[r].out : rld[r].out_reg);
8278 0 : int out_regno = REGNO (out);
8279 0 : machine_mode mode = GET_MODE (out);
8280 :
8281 : /* REG_RTX is now set or clobbered by the main instruction.
8282 : As the comment above explains, forget_old_reloads_1 only
8283 : sees the original instruction, and there is no guarantee
8284 : that the original instruction also clobbered REG_RTX.
8285 : For example, if find_reloads sees that the input side of
8286 : a matched operand pair dies in this instruction, it may
8287 : use the input register as the reload register.
8288 :
8289 : Calling forget_old_reloads_1 is a waste of effort if
8290 : REG_RTX is also the output register.
8291 :
8292 : If we know that REG_RTX holds the value of a pseudo
8293 : register, the code after the call will record that fact. */
8294 0 : if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8295 0 : forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8296 :
8297 0 : if (!HARD_REGISTER_NUM_P (out_regno))
8298 : {
8299 0 : rtx src_reg;
8300 0 : rtx_insn *store_insn = NULL;
8301 :
8302 0 : reg_last_reload_reg[out_regno] = 0;
8303 :
8304 : /* If we can find a hard register that is stored, record
8305 : the storing insn so that we may delete this insn with
8306 : delete_output_reload. */
8307 0 : src_reg = reload_reg_rtx_for_output[r];
8308 :
8309 0 : if (src_reg)
8310 : {
8311 0 : if (reload_reg_rtx_reaches_end_p (src_reg, r))
8312 0 : store_insn = new_spill_reg_store[REGNO (src_reg)];
8313 : else
8314 : src_reg = NULL_RTX;
8315 : }
8316 : else
8317 : {
8318 : /* If this is an optional reload, try to find the
8319 : source reg from an input reload. */
8320 0 : rtx set = single_set (insn);
8321 0 : if (set && SET_DEST (set) == rld[r].out)
8322 : {
8323 0 : int k;
8324 :
8325 0 : src_reg = SET_SRC (set);
8326 0 : store_insn = insn;
8327 0 : for (k = 0; k < n_reloads; k++)
8328 : {
8329 0 : if (rld[k].in == src_reg)
8330 : {
8331 0 : src_reg = reload_reg_rtx_for_input[k];
8332 0 : break;
8333 : }
8334 : }
8335 : }
8336 : }
8337 0 : if (src_reg && REG_P (src_reg)
8338 0 : && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8339 : {
8340 0 : int src_regno, src_nregs, k;
8341 0 : rtx note;
8342 :
8343 0 : gcc_assert (GET_MODE (src_reg) == mode);
8344 0 : src_regno = REGNO (src_reg);
8345 0 : src_nregs = hard_regno_nregs (src_regno, mode);
8346 : /* The place where to find a death note varies with
8347 : PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8348 : necessarily checked exactly in the code that moves
8349 : notes, so just check both locations. */
8350 0 : note = find_regno_note (insn, REG_DEAD, src_regno);
8351 0 : if (! note && store_insn)
8352 0 : note = find_regno_note (store_insn, REG_DEAD, src_regno);
8353 0 : for (k = 0; k < src_nregs; k++)
8354 : {
8355 0 : spill_reg_store[src_regno + k] = store_insn;
8356 0 : spill_reg_stored_to[src_regno + k] = out;
8357 0 : reg_reloaded_contents[src_regno + k] = out_regno;
8358 0 : reg_reloaded_insn[src_regno + k] = store_insn;
8359 0 : CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8360 0 : SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8361 0 : SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8362 0 : if (note)
8363 0 : SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8364 : else
8365 0 : CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8366 : }
8367 0 : reg_last_reload_reg[out_regno] = src_reg;
8368 : /* We have to set reg_has_output_reload here, or else
8369 : forget_old_reloads_1 will clear reg_last_reload_reg
8370 : right away. */
8371 0 : SET_REGNO_REG_SET (®_has_output_reload,
8372 : out_regno);
8373 : }
8374 : }
8375 : else
8376 : {
8377 0 : int k, out_nregs = hard_regno_nregs (out_regno, mode);
8378 :
8379 0 : for (k = 0; k < out_nregs; k++)
8380 0 : reg_last_reload_reg[out_regno + k] = 0;
8381 : }
8382 : }
8383 : }
8384 0 : reg_reloaded_dead |= reg_reloaded_died;
8385 0 : }
8386 :
8387 :
8388 : /* Helper for emit_insn_if_valid_for_reload. */
8389 :
8390 : static rtx_insn *
8391 0 : emit_insn_if_valid_for_reload_1 (rtx pat)
8392 : {
8393 0 : rtx_insn *last = get_last_insn ();
8394 0 : int code;
8395 :
8396 0 : rtx_insn *insn = emit_insn (pat);
8397 0 : code = recog_memoized (insn);
8398 :
8399 0 : if (code >= 0)
8400 : {
8401 0 : extract_insn (insn);
8402 : /* We want constrain operands to treat this insn strictly in its
8403 : validity determination, i.e., the way it would after reload has
8404 : completed. */
8405 0 : if (constrain_operands (1, get_enabled_alternatives (insn)))
8406 : return insn;
8407 : }
8408 :
8409 0 : delete_insns_since (last);
8410 0 : return NULL;
8411 : }
8412 :
8413 : /* Go through the motions to emit INSN and test if it is strictly valid.
8414 : Return the emitted insn if valid, else return NULL. */
8415 :
8416 : static rtx_insn *
8417 0 : emit_insn_if_valid_for_reload (rtx pat)
8418 : {
8419 0 : rtx_insn *insn = emit_insn_if_valid_for_reload_1 (pat);
8420 :
8421 0 : if (insn)
8422 : return insn;
8423 :
8424 : /* If the pattern is a SET, and this target has a single
8425 : flags-register, try again with a PARALLEL that clobbers that
8426 : register. */
8427 0 : if (targetm.flags_regnum == INVALID_REGNUM || GET_CODE (pat) != SET)
8428 : return NULL;
8429 :
8430 0 : rtx flags_clobber = gen_hard_reg_clobber (CCmode, targetm.flags_regnum);
8431 0 : rtx parpat = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, pat, flags_clobber));
8432 :
8433 0 : return emit_insn_if_valid_for_reload (parpat);
8434 : }
8435 :
8436 : /* Emit code to perform a reload from IN (which may be a reload register) to
8437 : OUT (which may also be a reload register). IN or OUT is from operand
8438 : OPNUM with reload type TYPE.
8439 :
8440 : Returns first insn emitted. */
8441 :
8442 : static rtx_insn *
8443 0 : gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8444 : {
8445 0 : rtx_insn *last = get_last_insn ();
8446 0 : rtx_insn *tem;
8447 0 : rtx tem1, tem2;
8448 :
8449 : /* If IN is a paradoxical SUBREG, remove it and try to put the
8450 : opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8451 0 : if (!strip_paradoxical_subreg (&in, &out))
8452 0 : strip_paradoxical_subreg (&out, &in);
8453 :
8454 : /* How to do this reload can get quite tricky. Normally, we are being
8455 : asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8456 : register that didn't get a hard register. In that case we can just
8457 : call emit_move_insn.
8458 :
8459 : We can also be asked to reload a PLUS that adds a register or a MEM to
8460 : another register, constant or MEM. This can occur during frame pointer
8461 : elimination and while reloading addresses. This case is handled by
8462 : trying to emit a single insn to perform the add. If it is not valid,
8463 : we use a two insn sequence.
8464 :
8465 : Or we can be asked to reload an unary operand that was a fragment of
8466 : an addressing mode, into a register. If it isn't recognized as-is,
8467 : we try making the unop operand and the reload-register the same:
8468 : (set reg:X (unop:X expr:Y))
8469 : -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8470 :
8471 : Finally, we could be called to handle an 'o' constraint by putting
8472 : an address into a register. In that case, we first try to do this
8473 : with a named pattern of "reload_load_address". If no such pattern
8474 : exists, we just emit a SET insn and hope for the best (it will normally
8475 : be valid on machines that use 'o').
8476 :
8477 : This entire process is made complex because reload will never
8478 : process the insns we generate here and so we must ensure that
8479 : they will fit their constraints and also by the fact that parts of
8480 : IN might be being reloaded separately and replaced with spill registers.
8481 : Because of this, we are, in some sense, just guessing the right approach
8482 : here. The one listed above seems to work.
8483 :
8484 : ??? At some point, this whole thing needs to be rethought. */
8485 :
8486 0 : if (GET_CODE (in) == PLUS
8487 0 : && (REG_P (XEXP (in, 0))
8488 : || GET_CODE (XEXP (in, 0)) == SUBREG
8489 : || MEM_P (XEXP (in, 0)))
8490 0 : && (REG_P (XEXP (in, 1))
8491 0 : || GET_CODE (XEXP (in, 1)) == SUBREG
8492 0 : || CONSTANT_P (XEXP (in, 1))
8493 0 : || MEM_P (XEXP (in, 1))))
8494 : {
8495 : /* We need to compute the sum of a register or a MEM and another
8496 : register, constant, or MEM, and put it into the reload
8497 : register. The best possible way of doing this is if the machine
8498 : has a three-operand ADD insn that accepts the required operands.
8499 :
8500 : The simplest approach is to try to generate such an insn and see if it
8501 : is recognized and matches its constraints. If so, it can be used.
8502 :
8503 : It might be better not to actually emit the insn unless it is valid,
8504 : but we need to pass the insn as an operand to `recog' and
8505 : `extract_insn' and it is simpler to emit and then delete the insn if
8506 : not valid than to dummy things up. */
8507 :
8508 0 : rtx op0, op1, tem;
8509 0 : rtx_insn *insn;
8510 0 : enum insn_code code;
8511 :
8512 0 : op0 = find_replacement (&XEXP (in, 0));
8513 0 : op1 = find_replacement (&XEXP (in, 1));
8514 :
8515 : /* Since constraint checking is strict, commutativity won't be
8516 : checked, so we need to do that here to avoid spurious failure
8517 : if the add instruction is two-address and the second operand
8518 : of the add is the same as the reload reg, which is frequently
8519 : the case. If the insn would be A = B + A, rearrange it so
8520 : it will be A = A + B as constrain_operands expects. */
8521 :
8522 0 : if (REG_P (XEXP (in, 1))
8523 0 : && REGNO (out) == REGNO (XEXP (in, 1)))
8524 : tem = op0, op0 = op1, op1 = tem;
8525 :
8526 0 : if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8527 0 : in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8528 :
8529 0 : insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8530 0 : if (insn)
8531 : return insn;
8532 :
8533 : /* If that failed, we must use a conservative two-insn sequence.
8534 :
8535 : Use a move to copy one operand into the reload register. Prefer
8536 : to reload a constant, MEM or pseudo since the move patterns can
8537 : handle an arbitrary operand. If OP1 is not a constant, MEM or
8538 : pseudo and OP1 is not a valid operand for an add instruction, then
8539 : reload OP1.
8540 :
8541 : After reloading one of the operands into the reload register, add
8542 : the reload register to the output register.
8543 :
8544 : If there is another way to do this for a specific machine, a
8545 : DEFINE_PEEPHOLE should be specified that recognizes the sequence
8546 : we emit below. */
8547 :
8548 0 : code = optab_handler (add_optab, GET_MODE (out));
8549 :
8550 0 : if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8551 0 : || (REG_P (op1)
8552 0 : && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8553 0 : || (code != CODE_FOR_nothing
8554 0 : && !insn_operand_matches (code, 2, op1)))
8555 : tem = op0, op0 = op1, op1 = tem;
8556 :
8557 0 : gen_reload (out, op0, opnum, type);
8558 :
8559 : /* If OP0 and OP1 are the same, we can use OUT for OP1.
8560 : This fixes a problem on the 32K where the stack pointer cannot
8561 : be used as an operand of an add insn. */
8562 :
8563 0 : if (rtx_equal_p (op0, op1))
8564 0 : op1 = out;
8565 :
8566 0 : insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8567 0 : if (insn)
8568 : {
8569 : /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8570 0 : set_dst_reg_note (insn, REG_EQUIV, in, out);
8571 0 : return insn;
8572 : }
8573 :
8574 : /* If that failed, copy the address register to the reload register.
8575 : Then add the constant to the reload register. */
8576 :
8577 0 : gcc_assert (!reg_overlap_mentioned_p (out, op0));
8578 0 : gen_reload (out, op1, opnum, type);
8579 0 : insn = emit_insn (gen_add2_insn (out, op0));
8580 0 : set_dst_reg_note (insn, REG_EQUIV, in, out);
8581 0 : }
8582 :
8583 : /* If we need a memory location to do the move, do it that way. */
8584 0 : else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8585 0 : (REG_P (tem1) && REG_P (tem2)))
8586 0 : && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8587 0 : && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8588 0 : && targetm.secondary_memory_needed (GET_MODE (out),
8589 0 : REGNO_REG_CLASS (REGNO (tem1)),
8590 0 : REGNO_REG_CLASS (REGNO (tem2))))
8591 : {
8592 : /* Get the memory to use and rewrite both registers to its mode. */
8593 0 : rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8594 :
8595 0 : if (GET_MODE (loc) != GET_MODE (out))
8596 0 : out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8597 :
8598 0 : if (GET_MODE (loc) != GET_MODE (in))
8599 0 : in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8600 :
8601 0 : gen_reload (loc, in, opnum, type);
8602 0 : gen_reload (out, loc, opnum, type);
8603 : }
8604 0 : else if (REG_P (out) && UNARY_P (in))
8605 : {
8606 0 : rtx op1;
8607 0 : rtx out_moded;
8608 0 : rtx_insn *set;
8609 :
8610 0 : op1 = find_replacement (&XEXP (in, 0));
8611 0 : if (op1 != XEXP (in, 0))
8612 0 : in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8613 :
8614 : /* First, try a plain SET. */
8615 0 : set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8616 0 : if (set)
8617 : return set;
8618 :
8619 : /* If that failed, move the inner operand to the reload
8620 : register, and try the same unop with the inner expression
8621 : replaced with the reload register. */
8622 :
8623 0 : if (GET_MODE (op1) != GET_MODE (out))
8624 0 : out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8625 : else
8626 : out_moded = out;
8627 :
8628 0 : gen_reload (out_moded, op1, opnum, type);
8629 :
8630 0 : rtx temp = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8631 : out_moded));
8632 0 : rtx_insn *insn = emit_insn_if_valid_for_reload (temp);
8633 0 : if (insn)
8634 : {
8635 0 : set_unique_reg_note (insn, REG_EQUIV, in);
8636 0 : return insn;
8637 : }
8638 :
8639 0 : fatal_insn ("failure trying to reload:", in);
8640 : }
8641 : /* If IN is a simple operand, use gen_move_insn. */
8642 0 : else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8643 : {
8644 0 : tem = emit_insn (gen_move_insn (out, in));
8645 : /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8646 0 : mark_jump_label (in, tem, 0);
8647 : }
8648 :
8649 0 : else if (targetm.have_reload_load_address ())
8650 0 : emit_insn (targetm.gen_reload_load_address (out, in));
8651 :
8652 : /* Otherwise, just write (set OUT IN) and hope for the best. */
8653 : else
8654 0 : emit_insn (gen_rtx_SET (out, in));
8655 :
8656 : /* Return the first insn emitted.
8657 : We cannot just return get_last_insn, because there may have
8658 : been multiple instructions emitted. Also note that gen_move_insn may
8659 : emit more than one insn itself, so we cannot assume that there is one
8660 : insn emitted per emit_insn_before call. */
8661 :
8662 0 : return last ? NEXT_INSN (last) : get_insns ();
8663 : }
8664 :
8665 : /* Delete a previously made output-reload whose result we now believe
8666 : is not needed. First we double-check.
8667 :
8668 : INSN is the insn now being processed.
8669 : LAST_RELOAD_REG is the hard register number for which we want to delete
8670 : the last output reload.
8671 : J is the reload-number that originally used REG. The caller has made
8672 : certain that reload J doesn't use REG any longer for input.
8673 : NEW_RELOAD_REG is reload register that reload J is using for REG. */
8674 :
8675 : static void
8676 0 : delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8677 : rtx new_reload_reg)
8678 : {
8679 0 : rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8680 0 : rtx reg = spill_reg_stored_to[last_reload_reg];
8681 0 : int k;
8682 0 : int n_occurrences;
8683 0 : int n_inherited = 0;
8684 0 : rtx substed;
8685 0 : unsigned regno;
8686 0 : int nregs;
8687 :
8688 : /* It is possible that this reload has been only used to set another reload
8689 : we eliminated earlier and thus deleted this instruction too. */
8690 0 : if (output_reload_insn->deleted ())
8691 : return;
8692 :
8693 : /* Get the raw pseudo-register referred to. */
8694 :
8695 0 : while (GET_CODE (reg) == SUBREG)
8696 0 : reg = SUBREG_REG (reg);
8697 0 : substed = reg_equiv_memory_loc (REGNO (reg));
8698 :
8699 : /* This is unsafe if the operand occurs more often in the current
8700 : insn than it is inherited. */
8701 0 : for (k = n_reloads - 1; k >= 0; k--)
8702 : {
8703 0 : rtx reg2 = rld[k].in;
8704 0 : if (! reg2)
8705 0 : continue;
8706 0 : if (MEM_P (reg2) || reload_override_in[k])
8707 0 : reg2 = rld[k].in_reg;
8708 :
8709 : if (AUTO_INC_DEC && rld[k].out && ! rld[k].out_reg)
8710 : reg2 = XEXP (rld[k].in_reg, 0);
8711 :
8712 0 : while (GET_CODE (reg2) == SUBREG)
8713 0 : reg2 = SUBREG_REG (reg2);
8714 0 : if (rtx_equal_p (reg2, reg))
8715 : {
8716 0 : if (reload_inherited[k] || reload_override_in[k] || k == j)
8717 0 : n_inherited++;
8718 : else
8719 : return;
8720 : }
8721 : }
8722 0 : n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8723 0 : if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8724 0 : n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8725 : reg, 0);
8726 0 : if (substed)
8727 0 : n_occurrences += count_occurrences (PATTERN (insn),
8728 0 : eliminate_regs (substed, VOIDmode,
8729 : NULL_RTX), 0);
8730 0 : for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8731 : {
8732 0 : gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8733 0 : n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8734 : }
8735 0 : if (n_occurrences > n_inherited)
8736 : return;
8737 :
8738 0 : regno = REGNO (reg);
8739 0 : nregs = REG_NREGS (reg);
8740 :
8741 : /* If the pseudo-reg we are reloading is no longer referenced
8742 : anywhere between the store into it and here,
8743 : and we're within the same basic block, then the value can only
8744 : pass through the reload reg and end up here.
8745 : Otherwise, give up--return. */
8746 0 : for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8747 0 : i1 != insn; i1 = NEXT_INSN (i1))
8748 : {
8749 0 : if (NOTE_INSN_BASIC_BLOCK_P (i1))
8750 : return;
8751 0 : if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8752 0 : && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8753 : {
8754 : /* If this is USE in front of INSN, we only have to check that
8755 : there are no more references than accounted for by inheritance. */
8756 0 : while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8757 : {
8758 0 : n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8759 0 : i1 = NEXT_INSN (i1);
8760 : }
8761 0 : if (n_occurrences <= n_inherited && i1 == insn)
8762 : break;
8763 : return;
8764 : }
8765 : }
8766 :
8767 : /* We will be deleting the insn. Remove the spill reg information. */
8768 0 : for (k = hard_regno_nregs (last_reload_reg, GET_MODE (reg)); k-- > 0; )
8769 : {
8770 0 : spill_reg_store[last_reload_reg + k] = 0;
8771 0 : spill_reg_stored_to[last_reload_reg + k] = 0;
8772 : }
8773 :
8774 : /* The caller has already checked that REG dies or is set in INSN.
8775 : It has also checked that we are optimizing, and thus some
8776 : inaccuracies in the debugging information are acceptable.
8777 : So we could just delete output_reload_insn. But in some cases
8778 : we can improve the debugging information without sacrificing
8779 : optimization - maybe even improving the code: See if the pseudo
8780 : reg has been completely replaced with reload regs. If so, delete
8781 : the store insn and forget we had a stack slot for the pseudo. */
8782 0 : if (rld[j].out != rld[j].in
8783 0 : && REG_N_DEATHS (REGNO (reg)) == 1
8784 0 : && REG_N_SETS (REGNO (reg)) == 1
8785 0 : && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8786 0 : && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8787 : {
8788 0 : rtx_insn *i2;
8789 :
8790 : /* We know that it was used only between here and the beginning of
8791 : the current basic block. (We also know that the last use before
8792 : INSN was the output reload we are thinking of deleting, but never
8793 : mind that.) Search that range; see if any ref remains. */
8794 0 : for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8795 : {
8796 0 : rtx set = single_set (i2);
8797 :
8798 : /* Uses which just store in the pseudo don't count,
8799 : since if they are the only uses, they are dead. */
8800 0 : if (set != 0 && SET_DEST (set) == reg)
8801 0 : continue;
8802 0 : if (LABEL_P (i2) || JUMP_P (i2))
8803 : break;
8804 0 : if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8805 0 : && reg_mentioned_p (reg, PATTERN (i2)))
8806 : {
8807 : /* Some other ref remains; just delete the output reload we
8808 : know to be dead. */
8809 0 : delete_address_reloads (output_reload_insn, insn);
8810 0 : delete_insn (output_reload_insn);
8811 0 : return;
8812 : }
8813 : }
8814 :
8815 : /* Delete the now-dead stores into this pseudo. Note that this
8816 : loop also takes care of deleting output_reload_insn. */
8817 0 : for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8818 : {
8819 0 : rtx set = single_set (i2);
8820 :
8821 0 : if (set != 0 && SET_DEST (set) == reg)
8822 : {
8823 0 : delete_address_reloads (i2, insn);
8824 0 : delete_insn (i2);
8825 : }
8826 0 : if (LABEL_P (i2) || JUMP_P (i2))
8827 : break;
8828 : }
8829 :
8830 : /* For the debugging info, say the pseudo lives in this reload reg. */
8831 0 : reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8832 0 : if (ira_conflicts_p)
8833 : /* Inform IRA about the change. */
8834 0 : ira_mark_allocation_change (REGNO (reg));
8835 0 : alter_reg (REGNO (reg), -1, false);
8836 : }
8837 : else
8838 : {
8839 0 : delete_address_reloads (output_reload_insn, insn);
8840 0 : delete_insn (output_reload_insn);
8841 : }
8842 : }
8843 :
8844 : /* We are going to delete DEAD_INSN. Recursively delete loads of
8845 : reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8846 : CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8847 : static void
8848 0 : delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
8849 : {
8850 0 : rtx set = single_set (dead_insn);
8851 0 : rtx set2, dst;
8852 0 : rtx_insn *prev, *next;
8853 0 : if (set)
8854 : {
8855 0 : rtx dst = SET_DEST (set);
8856 0 : if (MEM_P (dst))
8857 0 : delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8858 : }
8859 : /* If we deleted the store from a reloaded post_{in,de}c expression,
8860 : we can delete the matching adds. */
8861 0 : prev = PREV_INSN (dead_insn);
8862 0 : next = NEXT_INSN (dead_insn);
8863 0 : if (! prev || ! next)
8864 : return;
8865 0 : set = single_set (next);
8866 0 : set2 = single_set (prev);
8867 0 : if (! set || ! set2
8868 0 : || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8869 0 : || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8870 0 : || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8871 : return;
8872 0 : dst = SET_DEST (set);
8873 0 : if (! rtx_equal_p (dst, SET_DEST (set2))
8874 0 : || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8875 0 : || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8876 0 : || (INTVAL (XEXP (SET_SRC (set), 1))
8877 0 : != -INTVAL (XEXP (SET_SRC (set2), 1))))
8878 0 : return;
8879 0 : delete_related_insns (prev);
8880 0 : delete_related_insns (next);
8881 : }
8882 :
8883 : /* Subfunction of delete_address_reloads: process registers found in X. */
8884 : static void
8885 0 : delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
8886 : {
8887 0 : rtx_insn *prev, *i2;
8888 0 : rtx set, dst;
8889 0 : int i, j;
8890 0 : enum rtx_code code = GET_CODE (x);
8891 :
8892 0 : if (code != REG)
8893 : {
8894 0 : const char *fmt = GET_RTX_FORMAT (code);
8895 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8896 : {
8897 0 : if (fmt[i] == 'e')
8898 0 : delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8899 0 : else if (fmt[i] == 'E')
8900 : {
8901 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8902 0 : delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8903 : current_insn);
8904 : }
8905 : }
8906 : return;
8907 : }
8908 :
8909 0 : if (spill_reg_order[REGNO (x)] < 0)
8910 : return;
8911 :
8912 : /* Scan backwards for the insn that sets x. This might be a way back due
8913 : to inheritance. */
8914 0 : for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8915 : {
8916 0 : code = GET_CODE (prev);
8917 0 : if (code == CODE_LABEL || code == JUMP_INSN)
8918 : return;
8919 0 : if (!INSN_P (prev))
8920 0 : continue;
8921 0 : if (reg_set_p (x, PATTERN (prev)))
8922 : break;
8923 0 : if (reg_referenced_p (x, PATTERN (prev)))
8924 : return;
8925 : }
8926 0 : if (! prev || INSN_UID (prev) < reload_first_uid)
8927 : return;
8928 : /* Check that PREV only sets the reload register. */
8929 0 : set = single_set (prev);
8930 0 : if (! set)
8931 : return;
8932 0 : dst = SET_DEST (set);
8933 0 : if (!REG_P (dst)
8934 0 : || ! rtx_equal_p (dst, x))
8935 0 : return;
8936 0 : if (! reg_set_p (dst, PATTERN (dead_insn)))
8937 : {
8938 : /* Check if DST was used in a later insn -
8939 : it might have been inherited. */
8940 0 : for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8941 : {
8942 0 : if (LABEL_P (i2))
8943 : break;
8944 0 : if (! INSN_P (i2))
8945 0 : continue;
8946 0 : if (reg_referenced_p (dst, PATTERN (i2)))
8947 : {
8948 : /* If there is a reference to the register in the current insn,
8949 : it might be loaded in a non-inherited reload. If no other
8950 : reload uses it, that means the register is set before
8951 : referenced. */
8952 0 : if (i2 == current_insn)
8953 : {
8954 0 : for (j = n_reloads - 1; j >= 0; j--)
8955 0 : if ((rld[j].reg_rtx == dst && reload_inherited[j])
8956 0 : || reload_override_in[j] == dst)
8957 : return;
8958 0 : for (j = n_reloads - 1; j >= 0; j--)
8959 0 : if (rld[j].in && rld[j].reg_rtx == dst)
8960 : break;
8961 0 : if (j >= 0)
8962 : break;
8963 : }
8964 : return;
8965 : }
8966 0 : if (JUMP_P (i2))
8967 : break;
8968 : /* If DST is still live at CURRENT_INSN, check if it is used for
8969 : any reload. Note that even if CURRENT_INSN sets DST, we still
8970 : have to check the reloads. */
8971 0 : if (i2 == current_insn)
8972 : {
8973 0 : for (j = n_reloads - 1; j >= 0; j--)
8974 0 : if ((rld[j].reg_rtx == dst && reload_inherited[j])
8975 0 : || reload_override_in[j] == dst)
8976 : return;
8977 : /* ??? We can't finish the loop here, because dst might be
8978 : allocated to a pseudo in this block if no reload in this
8979 : block needs any of the classes containing DST - see
8980 : spill_hard_reg. There is no easy way to tell this, so we
8981 : have to scan till the end of the basic block. */
8982 : }
8983 0 : if (reg_set_p (dst, PATTERN (i2)))
8984 : break;
8985 : }
8986 : }
8987 0 : delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8988 0 : reg_reloaded_contents[REGNO (dst)] = -1;
8989 0 : delete_insn (prev);
8990 : }
8991 :
8992 : /* Output reload-insns to reload VALUE into RELOADREG.
8993 : VALUE is an autoincrement or autodecrement RTX whose operand
8994 : is a register or memory location;
8995 : so reloading involves incrementing that location.
8996 : IN is either identical to VALUE, or some cheaper place to reload from.
8997 :
8998 : INC_AMOUNT is the number to increment or decrement by (always positive).
8999 : This cannot be deduced from VALUE. */
9000 :
9001 : static void
9002 0 : inc_for_reload (rtx reloadreg, rtx in, rtx value, poly_int64 inc_amount)
9003 : {
9004 : /* REG or MEM to be copied and incremented. */
9005 0 : rtx incloc = find_replacement (&XEXP (value, 0));
9006 : /* Nonzero if increment after copying. */
9007 0 : int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9008 0 : || GET_CODE (value) == POST_MODIFY);
9009 0 : rtx_insn *last;
9010 0 : rtx inc;
9011 0 : rtx_insn *add_insn;
9012 0 : int code;
9013 0 : rtx real_in = in == value ? incloc : in;
9014 :
9015 : /* No hard register is equivalent to this register after
9016 : inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9017 : we could inc/dec that register as well (maybe even using it for
9018 : the source), but I'm not sure it's worth worrying about. */
9019 0 : if (REG_P (incloc))
9020 0 : reg_last_reload_reg[REGNO (incloc)] = 0;
9021 :
9022 0 : if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9023 : {
9024 0 : gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9025 0 : inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9026 : }
9027 : else
9028 : {
9029 0 : if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9030 0 : inc_amount = -inc_amount;
9031 :
9032 0 : inc = gen_int_mode (inc_amount, Pmode);
9033 : }
9034 :
9035 : /* If this is post-increment, first copy the location to the reload reg. */
9036 0 : if (post && real_in != reloadreg)
9037 0 : emit_insn (gen_move_insn (reloadreg, real_in));
9038 :
9039 0 : if (in == value)
9040 : {
9041 : /* See if we can directly increment INCLOC. Use a method similar to
9042 : that in gen_reload. */
9043 :
9044 0 : last = get_last_insn ();
9045 0 : add_insn = emit_insn (gen_rtx_SET (incloc,
9046 : gen_rtx_PLUS (GET_MODE (incloc),
9047 : incloc, inc)));
9048 :
9049 0 : code = recog_memoized (add_insn);
9050 0 : if (code >= 0)
9051 : {
9052 0 : extract_insn (add_insn);
9053 0 : if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9054 : {
9055 : /* If this is a pre-increment and we have incremented the value
9056 : where it lives, copy the incremented value to RELOADREG to
9057 : be used as an address. */
9058 :
9059 0 : if (! post)
9060 0 : emit_insn (gen_move_insn (reloadreg, incloc));
9061 0 : return;
9062 : }
9063 : }
9064 0 : delete_insns_since (last);
9065 : }
9066 :
9067 : /* If couldn't do the increment directly, must increment in RELOADREG.
9068 : The way we do this depends on whether this is pre- or post-increment.
9069 : For pre-increment, copy INCLOC to the reload register, increment it
9070 : there, then save back. */
9071 :
9072 0 : if (! post)
9073 : {
9074 0 : if (in != reloadreg)
9075 0 : emit_insn (gen_move_insn (reloadreg, real_in));
9076 0 : emit_insn (gen_add2_insn (reloadreg, inc));
9077 0 : emit_insn (gen_move_insn (incloc, reloadreg));
9078 : }
9079 : else
9080 : {
9081 : /* Postincrement.
9082 : Because this might be a jump insn or a compare, and because RELOADREG
9083 : may not be available after the insn in an input reload, we must do
9084 : the incrementation before the insn being reloaded for.
9085 :
9086 : We have already copied IN to RELOADREG. Increment the copy in
9087 : RELOADREG, save that back, then decrement RELOADREG so it has
9088 : the original value. */
9089 :
9090 0 : emit_insn (gen_add2_insn (reloadreg, inc));
9091 0 : emit_insn (gen_move_insn (incloc, reloadreg));
9092 0 : if (CONST_INT_P (inc))
9093 0 : emit_insn (gen_add2_insn (reloadreg,
9094 0 : gen_int_mode (-INTVAL (inc),
9095 0 : GET_MODE (reloadreg))));
9096 : else
9097 0 : emit_insn (gen_sub2_insn (reloadreg, inc));
9098 : }
9099 : }
|