Branch data Line data Source code
1 : : /* Reload pseudo regs into hard regs for insns that require hard regs.
2 : : Copyright (C) 1987-2025 Free Software Foundation, Inc.
3 : :
4 : : This file is part of GCC.
5 : :
6 : : GCC is free software; you can redistribute it and/or modify it under
7 : : the terms of the GNU General Public License as published by the Free
8 : : Software Foundation; either version 3, or (at your option) any later
9 : : version.
10 : :
11 : : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 : : WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 : : FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 : : for more details.
15 : :
16 : : You should have received a copy of the GNU General Public License
17 : : along with GCC; see the file COPYING3. If not see
18 : : <http://www.gnu.org/licenses/>. */
19 : :
20 : : #include "config.h"
21 : : #include "system.h"
22 : : #include "coretypes.h"
23 : : #include "backend.h"
24 : : #include "target.h"
25 : : #include "rtl.h"
26 : : #include "tree.h"
27 : : #include "predict.h"
28 : : #include "df.h"
29 : : #include "memmodel.h"
30 : : #include "tm_p.h"
31 : : #include "optabs.h"
32 : : #include "regs.h"
33 : : #include "ira.h"
34 : : #include "recog.h"
35 : :
36 : : #include "rtl-error.h"
37 : : #include "expr.h"
38 : : #include "addresses.h"
39 : : #include "cfgrtl.h"
40 : : #include "cfgbuild.h"
41 : : #include "reload.h"
42 : : #include "except.h"
43 : : #include "dumpfile.h"
44 : : #include "rtl-iter.h"
45 : : #include "function-abi.h"
46 : :
47 : : /* This file contains the reload pass of the compiler, which is
48 : : run after register allocation has been done. It checks that
49 : : each insn is valid (operands required to be in registers really
50 : : are in registers of the proper class) and fixes up invalid ones
51 : : by copying values temporarily into registers for the insns
52 : : that need them.
53 : :
54 : : The results of register allocation are described by the vector
55 : : reg_renumber; the insns still contain pseudo regs, but reg_renumber
56 : : can be used to find which hard reg, if any, a pseudo reg is in.
57 : :
58 : : The technique we always use is to free up a few hard regs that are
59 : : called ``reload regs'', and for each place where a pseudo reg
60 : : must be in a hard reg, copy it temporarily into one of the reload regs.
61 : :
62 : : Reload regs are allocated locally for every instruction that needs
63 : : reloads. When there are pseudos which are allocated to a register that
64 : : has been chosen as a reload reg, such pseudos must be ``spilled''.
65 : : This means that they go to other hard regs, or to stack slots if no other
66 : : available hard regs can be found. Spilling can invalidate more
67 : : insns, requiring additional need for reloads, so we must keep checking
68 : : until the process stabilizes.
69 : :
70 : : For machines with different classes of registers, we must keep track
71 : : of the register class needed for each reload, and make sure that
72 : : we allocate enough reload registers of each class.
73 : :
74 : : The file reload.cc contains the code that checks one insn for
75 : : validity and reports the reloads that it needs. This file
76 : : is in charge of scanning the entire rtl code, accumulating the
77 : : reload needs, spilling, assigning reload registers to use for
78 : : fixing up each insn, and generating the new insns to copy values
79 : : into the reload registers. */
80 : :
81 : : struct target_reload default_target_reload;
82 : : #if SWITCHABLE_TARGET
83 : : struct target_reload *this_target_reload = &default_target_reload;
84 : : #endif
85 : :
86 : : #define spill_indirect_levels \
87 : : (this_target_reload->x_spill_indirect_levels)
88 : :
89 : : /* During reload_as_needed, element N contains a REG rtx for the hard reg
90 : : into which reg N has been reloaded (perhaps for a previous insn). */
91 : : static rtx *reg_last_reload_reg;
92 : :
93 : : /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
94 : : for an output reload that stores into reg N. */
95 : : static regset_head reg_has_output_reload;
96 : :
97 : : /* Indicates which hard regs are reload-registers for an output reload
98 : : in the current insn. */
99 : : static HARD_REG_SET reg_is_output_reload;
100 : :
101 : : /* Widest mode in which each pseudo reg is referred to (via subreg). */
102 : : static machine_mode *reg_max_ref_mode;
103 : :
104 : : /* Vector to remember old contents of reg_renumber before spilling. */
105 : : static short *reg_old_renumber;
106 : :
107 : : /* During reload_as_needed, element N contains the last pseudo regno reloaded
108 : : into hard register N. If that pseudo reg occupied more than one register,
109 : : reg_reloaded_contents points to that pseudo for each spill register in
110 : : use; all of these must remain set for an inheritance to occur. */
111 : : static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
112 : :
113 : : /* During reload_as_needed, element N contains the insn for which
114 : : hard register N was last used. Its contents are significant only
115 : : when reg_reloaded_valid is set for this register. */
116 : : static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
117 : :
118 : : /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
119 : : static HARD_REG_SET reg_reloaded_valid;
120 : : /* Indicate if the register was dead at the end of the reload.
121 : : This is only valid if reg_reloaded_contents is set and valid. */
122 : : static HARD_REG_SET reg_reloaded_dead;
123 : :
124 : : /* Number of spill-regs so far; number of valid elements of spill_regs. */
125 : : static int n_spills;
126 : :
127 : : /* In parallel with spill_regs, contains REG rtx's for those regs.
128 : : Holds the last rtx used for any given reg, or 0 if it has never
129 : : been used for spilling yet. This rtx is reused, provided it has
130 : : the proper mode. */
131 : : static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
132 : :
133 : : /* In parallel with spill_regs, contains nonzero for a spill reg
134 : : that was stored after the last time it was used.
135 : : The precise value is the insn generated to do the store. */
136 : : static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
137 : :
138 : : /* This is the register that was stored with spill_reg_store. This is a
139 : : copy of reload_out / reload_out_reg when the value was stored; if
140 : : reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
141 : : static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
142 : :
143 : : /* This table is the inverse mapping of spill_regs:
144 : : indexed by hard reg number,
145 : : it contains the position of that reg in spill_regs,
146 : : or -1 for something that is not in spill_regs.
147 : :
148 : : ?!? This is no longer accurate. */
149 : : static short spill_reg_order[FIRST_PSEUDO_REGISTER];
150 : :
151 : : /* This reg set indicates registers that can't be used as spill registers for
152 : : the currently processed insn. These are the hard registers which are live
153 : : during the insn, but not allocated to pseudos, as well as fixed
154 : : registers. */
155 : : static HARD_REG_SET bad_spill_regs;
156 : :
157 : : /* These are the hard registers that can't be used as spill register for any
158 : : insn. This includes registers used for user variables and registers that
159 : : we can't eliminate. A register that appears in this set also can't be used
160 : : to retry register allocation. */
161 : : static HARD_REG_SET bad_spill_regs_global;
162 : :
163 : : /* Describes order of use of registers for reloading
164 : : of spilled pseudo-registers. `n_spills' is the number of
165 : : elements that are actually valid; new ones are added at the end.
166 : :
167 : : Both spill_regs and spill_reg_order are used on two occasions:
168 : : once during find_reload_regs, where they keep track of the spill registers
169 : : for a single insn, but also during reload_as_needed where they show all
170 : : the registers ever used by reload. For the latter case, the information
171 : : is calculated during finish_spills. */
172 : : static short spill_regs[FIRST_PSEUDO_REGISTER];
173 : :
174 : : /* This vector of reg sets indicates, for each pseudo, which hard registers
175 : : may not be used for retrying global allocation because the register was
176 : : formerly spilled from one of them. If we allowed reallocating a pseudo to
177 : : a register that it was already allocated to, reload might not
178 : : terminate. */
179 : : static HARD_REG_SET *pseudo_previous_regs;
180 : :
181 : : /* This vector of reg sets indicates, for each pseudo, which hard
182 : : registers may not be used for retrying global allocation because they
183 : : are used as spill registers during one of the insns in which the
184 : : pseudo is live. */
185 : : static HARD_REG_SET *pseudo_forbidden_regs;
186 : :
187 : : /* All hard regs that have been used as spill registers for any insn are
188 : : marked in this set. */
189 : : static HARD_REG_SET used_spill_regs;
190 : :
191 : : /* Index of last register assigned as a spill register. We allocate in
192 : : a round-robin fashion. */
193 : : static int last_spill_reg;
194 : :
195 : : /* Record the stack slot for each spilled hard register. */
196 : : static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
197 : :
198 : : /* Width allocated so far for that stack slot. */
199 : : static poly_uint64 spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
200 : :
201 : : /* Record which pseudos needed to be spilled. */
202 : : static regset_head spilled_pseudos;
203 : :
204 : : /* Record which pseudos changed their allocation in finish_spills. */
205 : : static regset_head changed_allocation_pseudos;
206 : :
207 : : /* Used for communication between order_regs_for_reload and count_pseudo.
208 : : Used to avoid counting one pseudo twice. */
209 : : static regset_head pseudos_counted;
210 : :
211 : : /* First uid used by insns created by reload in this function.
212 : : Used in find_equiv_reg. */
213 : : int reload_first_uid;
214 : :
215 : : /* Flag set by local-alloc or global-alloc if anything is live in
216 : : a call-clobbered reg across calls. */
217 : : int caller_save_needed;
218 : :
219 : : /* Set to 1 while reload_as_needed is operating.
220 : : Required by some machines to handle any generated moves differently. */
221 : : int reload_in_progress = 0;
222 : :
223 : : /* This obstack is used for allocation of rtl during register elimination.
224 : : The allocated storage can be freed once find_reloads has processed the
225 : : insn. */
226 : : static struct obstack reload_obstack;
227 : :
228 : : /* Points to the beginning of the reload_obstack. All insn_chain structures
229 : : are allocated first. */
230 : : static char *reload_startobj;
231 : :
232 : : /* The point after all insn_chain structures. Used to quickly deallocate
233 : : memory allocated in copy_reloads during calculate_needs_all_insns. */
234 : : static char *reload_firstobj;
235 : :
236 : : /* This points before all local rtl generated by register elimination.
237 : : Used to quickly free all memory after processing one insn. */
238 : : static char *reload_insn_firstobj;
239 : :
240 : : /* List of insn_chain instructions, one for every insn that reload needs to
241 : : examine. */
242 : : class insn_chain *reload_insn_chain;
243 : :
244 : : /* TRUE if we potentially left dead insns in the insn stream and want to
245 : : run DCE immediately after reload, FALSE otherwise. */
246 : : static bool need_dce;
247 : :
248 : : /* List of all insns needing reloads. */
249 : : static class insn_chain *insns_need_reload;
250 : :
251 : : /* This structure is used to record information about register eliminations.
252 : : Each array entry describes one possible way of eliminating a register
253 : : in favor of another. If there is more than one way of eliminating a
254 : : particular register, the most preferred should be specified first. */
255 : :
256 : : struct elim_table
257 : : {
258 : : int from; /* Register number to be eliminated. */
259 : : int to; /* Register number used as replacement. */
260 : : poly_int64 initial_offset; /* Initial difference between values. */
261 : : int can_eliminate; /* Nonzero if this elimination can be done. */
262 : : int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
263 : : target hook in previous scan over insns
264 : : made by reload. */
265 : : poly_int64 offset; /* Current offset between the two regs. */
266 : : poly_int64 previous_offset; /* Offset at end of previous insn. */
267 : : int ref_outside_mem; /* "to" has been referenced outside a MEM. */
268 : : rtx from_rtx; /* REG rtx for the register to be eliminated.
269 : : We cannot simply compare the number since
270 : : we might then spuriously replace a hard
271 : : register corresponding to a pseudo
272 : : assigned to the reg to be eliminated. */
273 : : rtx to_rtx; /* REG rtx for the replacement. */
274 : : };
275 : :
276 : : static struct elim_table *reg_eliminate = 0;
277 : :
278 : : /* This is an intermediate structure to initialize the table. It has
279 : : exactly the members provided by ELIMINABLE_REGS. */
280 : : static const struct elim_table_1
281 : : {
282 : : const int from;
283 : : const int to;
284 : : } reg_eliminate_1[] =
285 : :
286 : : /* Reload and LRA don't agree on how a multi-register frame pointer
287 : : is represented for elimination. See avr.h for a use case. */
288 : : #ifdef RELOAD_ELIMINABLE_REGS
289 : : RELOAD_ELIMINABLE_REGS;
290 : : #else
291 : : ELIMINABLE_REGS;
292 : : #endif
293 : :
294 : : #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
295 : :
296 : : /* Record the number of pending eliminations that have an offset not equal
297 : : to their initial offset. If nonzero, we use a new copy of each
298 : : replacement result in any insns encountered. */
299 : : int num_not_at_initial_offset;
300 : :
301 : : /* Count the number of registers that we may be able to eliminate. */
302 : : static int num_eliminable;
303 : : /* And the number of registers that are equivalent to a constant that
304 : : can be eliminated to frame_pointer / arg_pointer + constant. */
305 : : static int num_eliminable_invariants;
306 : :
307 : : /* For each label, we record the offset of each elimination. If we reach
308 : : a label by more than one path and an offset differs, we cannot do the
309 : : elimination. This information is indexed by the difference of the
310 : : number of the label and the first label number. We can't offset the
311 : : pointer itself as this can cause problems on machines with segmented
312 : : memory. The first table is an array of flags that records whether we
313 : : have yet encountered a label and the second table is an array of arrays,
314 : : one entry in the latter array for each elimination. */
315 : :
316 : : static int first_label_num;
317 : : static char *offsets_known_at;
318 : : static poly_int64 (*offsets_at)[NUM_ELIMINABLE_REGS];
319 : :
320 : : vec<reg_equivs_t, va_gc> *reg_equivs;
321 : :
322 : : /* Stack of addresses where an rtx has been changed. We can undo the
323 : : changes by popping items off the stack and restoring the original
324 : : value at each location.
325 : :
326 : : We use this simplistic undo capability rather than copy_rtx as copy_rtx
327 : : will not make a deep copy of a normally sharable rtx, such as
328 : : (const (plus (symbol_ref) (const_int))). If such an expression appears
329 : : as R1 in gen_reload_chain_without_interm_reg_p, then a shared
330 : : rtx expression would be changed. See PR 42431. */
331 : :
332 : : typedef rtx *rtx_p;
333 : : static vec<rtx_p> substitute_stack;
334 : :
335 : : /* Number of labels in the current function. */
336 : :
337 : : static int num_labels;
338 : :
339 : : static void replace_pseudos_in (rtx *, machine_mode, rtx);
340 : : static void maybe_fix_stack_asms (void);
341 : : static void copy_reloads (class insn_chain *);
342 : : static void calculate_needs_all_insns (int);
343 : : static int find_reg (class insn_chain *, int);
344 : : static void find_reload_regs (class insn_chain *);
345 : : static void select_reload_regs (void);
346 : : static void delete_caller_save_insns (void);
347 : :
348 : : static void spill_failure (rtx_insn *, enum reg_class);
349 : : static void count_spilled_pseudo (int, int, int);
350 : : static void delete_dead_insn (rtx_insn *);
351 : : static void alter_reg (int, int, bool);
352 : : static void set_label_offsets (rtx, rtx_insn *, int);
353 : : static void check_eliminable_occurrences (rtx);
354 : : static void elimination_effects (rtx, machine_mode);
355 : : static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
356 : : static int eliminate_regs_in_insn (rtx_insn *, int);
357 : : static void update_eliminable_offsets (void);
358 : : static void mark_not_eliminable (rtx, const_rtx, void *);
359 : : static void set_initial_elim_offsets (void);
360 : : static bool verify_initial_elim_offsets (void);
361 : : static void set_initial_label_offsets (void);
362 : : static void set_offsets_for_label (rtx_insn *);
363 : : static void init_eliminable_invariants (rtx_insn *, bool);
364 : : static void init_elim_table (void);
365 : : static void free_reg_equiv (void);
366 : : static void update_eliminables (HARD_REG_SET *);
367 : : static bool update_eliminables_and_spill (void);
368 : : static void elimination_costs_in_insn (rtx_insn *);
369 : : static void spill_hard_reg (unsigned int, int);
370 : : static int finish_spills (int);
371 : : static void scan_paradoxical_subregs (rtx);
372 : : static void count_pseudo (int);
373 : : static void order_regs_for_reload (class insn_chain *);
374 : : static void reload_as_needed (int);
375 : : static void forget_old_reloads_1 (rtx, const_rtx, void *);
376 : : static void forget_marked_reloads (regset);
377 : : static int reload_reg_class_lower (const void *, const void *);
378 : : static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
379 : : machine_mode);
380 : : static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
381 : : machine_mode);
382 : : static int reload_reg_free_p (unsigned int, int, enum reload_type);
383 : : static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
384 : : rtx, rtx, int, int);
385 : : static int free_for_value_p (int, machine_mode, int, enum reload_type,
386 : : rtx, rtx, int, int);
387 : : static int allocate_reload_reg (class insn_chain *, int, int);
388 : : static int conflicts_with_override (rtx);
389 : : static void failed_reload (rtx_insn *, int);
390 : : static int set_reload_reg (int, int);
391 : : static void choose_reload_regs_init (class insn_chain *, rtx *);
392 : : static void choose_reload_regs (class insn_chain *);
393 : : static void emit_input_reload_insns (class insn_chain *, struct reload *,
394 : : rtx, int);
395 : : static void emit_output_reload_insns (class insn_chain *, struct reload *,
396 : : int);
397 : : static void do_input_reload (class insn_chain *, struct reload *, int);
398 : : static void do_output_reload (class insn_chain *, struct reload *, int);
399 : : static void emit_reload_insns (class insn_chain *);
400 : : static void delete_output_reload (rtx_insn *, int, int, rtx);
401 : : static void delete_address_reloads (rtx_insn *, rtx_insn *);
402 : : static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
403 : : static void inc_for_reload (rtx, rtx, rtx, poly_int64);
404 : : static void substitute (rtx *, const_rtx, rtx);
405 : : static bool gen_reload_chain_without_interm_reg_p (int, int);
406 : : static int reloads_conflict (int, int);
407 : : static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
408 : : static rtx_insn *emit_insn_if_valid_for_reload (rtx);
409 : :
410 : : /* Initialize the reload pass. This is called at the beginning of compilation
411 : : and may be called again if the target is reinitialized. */
412 : :
413 : : void
414 : 0 : init_reload (void)
415 : : {
416 : 0 : int i;
417 : :
418 : : /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
419 : : Set spill_indirect_levels to the number of levels such addressing is
420 : : permitted, zero if it is not permitted at all. */
421 : :
422 : 0 : rtx tem
423 : 0 : = gen_rtx_MEM (Pmode,
424 : 0 : gen_rtx_PLUS (Pmode,
425 : : gen_rtx_REG (Pmode,
426 : : LAST_VIRTUAL_REGISTER + 1),
427 : : gen_int_mode (4, Pmode)));
428 : 0 : spill_indirect_levels = 0;
429 : :
430 : 0 : while (memory_address_p (QImode, tem))
431 : : {
432 : 0 : spill_indirect_levels++;
433 : 0 : tem = gen_rtx_MEM (Pmode, tem);
434 : : }
435 : :
436 : : /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
437 : :
438 : 0 : tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
439 : 0 : indirect_symref_ok = memory_address_p (QImode, tem);
440 : :
441 : : /* See if reg+reg is a valid (and offsettable) address. */
442 : :
443 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
444 : : {
445 : 0 : tem = gen_rtx_PLUS (Pmode,
446 : : gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
447 : : gen_rtx_REG (Pmode, i));
448 : :
449 : : /* This way, we make sure that reg+reg is an offsettable address. */
450 : 0 : tem = plus_constant (Pmode, tem, 4);
451 : :
452 : 0 : for (int mode = 0; mode < MAX_MACHINE_MODE; mode++)
453 : 0 : if (!double_reg_address_ok[mode]
454 : 0 : && memory_address_p ((enum machine_mode)mode, tem))
455 : 0 : double_reg_address_ok[mode] = 1;
456 : : }
457 : :
458 : : /* Initialize obstack for our rtl allocation. */
459 : 0 : if (reload_startobj == NULL)
460 : : {
461 : 0 : gcc_obstack_init (&reload_obstack);
462 : 0 : reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
463 : : }
464 : :
465 : 0 : INIT_REG_SET (&spilled_pseudos);
466 : 0 : INIT_REG_SET (&changed_allocation_pseudos);
467 : 0 : INIT_REG_SET (&pseudos_counted);
468 : 0 : }
469 : :
470 : : /* List of insn chains that are currently unused. */
471 : : static class insn_chain *unused_insn_chains = 0;
472 : :
473 : : /* Allocate an empty insn_chain structure. */
474 : : class insn_chain *
475 : 0 : new_insn_chain (void)
476 : : {
477 : 0 : class insn_chain *c;
478 : :
479 : 0 : if (unused_insn_chains == 0)
480 : : {
481 : 0 : c = XOBNEW (&reload_obstack, class insn_chain);
482 : 0 : INIT_REG_SET (&c->live_throughout);
483 : 0 : INIT_REG_SET (&c->dead_or_set);
484 : : }
485 : : else
486 : : {
487 : 0 : c = unused_insn_chains;
488 : 0 : unused_insn_chains = c->next;
489 : : }
490 : 0 : c->is_caller_save_insn = 0;
491 : 0 : c->need_operand_change = 0;
492 : 0 : c->need_reload = 0;
493 : 0 : c->need_elim = 0;
494 : 0 : return c;
495 : : }
496 : :
497 : : /* Small utility function to set all regs in hard reg set TO which are
498 : : allocated to pseudos in regset FROM. */
499 : :
500 : : void
501 : 5073215 : compute_use_by_pseudos (HARD_REG_SET *to, regset from)
502 : : {
503 : 5073215 : unsigned int regno;
504 : 5073215 : reg_set_iterator rsi;
505 : :
506 : 5073215 : EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
507 : : {
508 : 0 : int r = reg_renumber[regno];
509 : :
510 : 0 : if (r < 0)
511 : : {
512 : : /* reload_combine uses the information from DF_LIVE_IN,
513 : : which might still contain registers that have not
514 : : actually been allocated since they have an
515 : : equivalence. */
516 : 0 : gcc_assert (ira_conflicts_p || reload_completed);
517 : : }
518 : : else
519 : 0 : add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
520 : : }
521 : 5073215 : }
522 : :
523 : : /* Replace all pseudos found in LOC with their corresponding
524 : : equivalences. */
525 : :
526 : : static void
527 : 0 : replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
528 : : {
529 : 0 : rtx x = *loc;
530 : 0 : enum rtx_code code;
531 : 0 : const char *fmt;
532 : 0 : int i, j;
533 : :
534 : 0 : if (! x)
535 : : return;
536 : :
537 : 0 : code = GET_CODE (x);
538 : 0 : if (code == REG)
539 : : {
540 : 0 : unsigned int regno = REGNO (x);
541 : :
542 : 0 : if (regno < FIRST_PSEUDO_REGISTER)
543 : : return;
544 : :
545 : 0 : x = eliminate_regs_1 (x, mem_mode, usage, true, false);
546 : 0 : if (x != *loc)
547 : : {
548 : 0 : *loc = x;
549 : 0 : replace_pseudos_in (loc, mem_mode, usage);
550 : 0 : return;
551 : : }
552 : :
553 : 0 : if (reg_equiv_constant (regno))
554 : 0 : *loc = reg_equiv_constant (regno);
555 : 0 : else if (reg_equiv_invariant (regno))
556 : 0 : *loc = reg_equiv_invariant (regno);
557 : 0 : else if (reg_equiv_mem (regno))
558 : 0 : *loc = reg_equiv_mem (regno);
559 : 0 : else if (reg_equiv_address (regno))
560 : 0 : *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
561 : : else
562 : : {
563 : 0 : gcc_assert (!REG_P (regno_reg_rtx[regno])
564 : : || REGNO (regno_reg_rtx[regno]) != regno);
565 : 0 : *loc = regno_reg_rtx[regno];
566 : : }
567 : :
568 : 0 : return;
569 : : }
570 : 0 : else if (code == MEM)
571 : : {
572 : 0 : replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
573 : 0 : return;
574 : : }
575 : :
576 : : /* Process each of our operands recursively. */
577 : 0 : fmt = GET_RTX_FORMAT (code);
578 : 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
579 : 0 : if (*fmt == 'e')
580 : 0 : replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
581 : 0 : else if (*fmt == 'E')
582 : 0 : for (j = 0; j < XVECLEN (x, i); j++)
583 : 0 : replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
584 : : }
585 : :
586 : : /* Determine if the current function has an exception receiver block
587 : : that reaches the exit block via non-exceptional edges */
588 : :
589 : : static bool
590 : 0 : has_nonexceptional_receiver (void)
591 : : {
592 : 0 : edge e;
593 : 0 : edge_iterator ei;
594 : 0 : basic_block *tos, *worklist, bb;
595 : :
596 : : /* If we're not optimizing, then just err on the safe side. */
597 : 0 : if (!optimize)
598 : : return true;
599 : :
600 : : /* First determine which blocks can reach exit via normal paths. */
601 : 0 : tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
602 : :
603 : 0 : FOR_EACH_BB_FN (bb, cfun)
604 : 0 : bb->flags &= ~BB_REACHABLE;
605 : :
606 : : /* Place the exit block on our worklist. */
607 : 0 : EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
608 : 0 : *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
609 : :
610 : : /* Iterate: find everything reachable from what we've already seen. */
611 : 0 : while (tos != worklist)
612 : : {
613 : 0 : bb = *--tos;
614 : :
615 : 0 : FOR_EACH_EDGE (e, ei, bb->preds)
616 : 0 : if (!(e->flags & EDGE_ABNORMAL))
617 : : {
618 : 0 : basic_block src = e->src;
619 : :
620 : 0 : if (!(src->flags & BB_REACHABLE))
621 : : {
622 : 0 : src->flags |= BB_REACHABLE;
623 : 0 : *tos++ = src;
624 : : }
625 : : }
626 : : }
627 : 0 : free (worklist);
628 : :
629 : : /* Now see if there's a reachable block with an exceptional incoming
630 : : edge. */
631 : 0 : FOR_EACH_BB_FN (bb, cfun)
632 : 0 : if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
633 : : return true;
634 : :
635 : : /* No exceptional block reached exit unexceptionally. */
636 : : return false;
637 : : }
638 : :
639 : : /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
640 : : zero elements) to MAX_REG_NUM elements.
641 : :
642 : : Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
643 : : void
644 : 2721613 : grow_reg_equivs (void)
645 : : {
646 : 2721613 : int old_size = vec_safe_length (reg_equivs);
647 : 2721613 : int max_regno = max_reg_num ();
648 : 2721613 : int i;
649 : 2721613 : reg_equivs_t ze;
650 : :
651 : 2721613 : memset (&ze, 0, sizeof (reg_equivs_t));
652 : 2721613 : vec_safe_reserve (reg_equivs, max_regno);
653 : 201726802 : for (i = old_size; i < max_regno; i++)
654 : 196283576 : reg_equivs->quick_insert (i, ze);
655 : 2721613 : }
656 : :
657 : :
658 : : /* Global variables used by reload and its subroutines. */
659 : :
660 : : /* The current basic block while in calculate_elim_costs_all_insns. */
661 : : static basic_block elim_bb;
662 : :
663 : : /* Set during calculate_needs if an insn needs register elimination. */
664 : : static int something_needs_elimination;
665 : : /* Set during calculate_needs if an insn needs an operand changed. */
666 : : static int something_needs_operands_changed;
667 : : /* Set by alter_regs if we spilled a register to the stack. */
668 : : static bool something_was_spilled;
669 : :
670 : : /* Nonzero means we couldn't get enough spill regs. */
671 : : static int failure;
672 : :
673 : : /* Temporary array of pseudo-register number. */
674 : : static int *temp_pseudo_reg_arr;
675 : :
676 : : /* If a pseudo has no hard reg, delete the insns that made the equivalence.
677 : : If that insn didn't set the register (i.e., it copied the register to
678 : : memory), just delete that insn instead of the equivalencing insn plus
679 : : anything now dead. If we call delete_dead_insn on that insn, we may
680 : : delete the insn that actually sets the register if the register dies
681 : : there and that is incorrect. */
682 : : static void
683 : 0 : remove_init_insns ()
684 : : {
685 : 0 : for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
686 : : {
687 : 0 : if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
688 : : {
689 : : rtx list;
690 : 0 : for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
691 : : {
692 : 0 : rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
693 : :
694 : : /* If we already deleted the insn or if it may trap, we can't
695 : : delete it. The latter case shouldn't happen, but can
696 : : if an insn has a variable address, gets a REG_EH_REGION
697 : : note added to it, and then gets converted into a load
698 : : from a constant address. */
699 : 0 : if (NOTE_P (equiv_insn)
700 : 0 : || can_throw_internal (equiv_insn))
701 : : ;
702 : 0 : else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
703 : 0 : delete_dead_insn (equiv_insn);
704 : : else
705 : 0 : SET_INSN_DELETED (equiv_insn);
706 : : }
707 : : }
708 : : }
709 : 0 : }
710 : :
711 : : /* Return true if remove_init_insns will delete INSN. */
712 : : static bool
713 : 0 : will_delete_init_insn_p (rtx_insn *insn)
714 : : {
715 : 0 : rtx set = single_set (insn);
716 : 0 : if (!set || !REG_P (SET_DEST (set)))
717 : : return false;
718 : 0 : unsigned regno = REGNO (SET_DEST (set));
719 : :
720 : 0 : if (can_throw_internal (insn))
721 : : return false;
722 : :
723 : 0 : if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
724 : : return false;
725 : :
726 : 0 : for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
727 : : {
728 : 0 : rtx equiv_insn = XEXP (list, 0);
729 : 0 : if (equiv_insn == insn)
730 : : return true;
731 : : }
732 : : return false;
733 : : }
734 : :
735 : : /* Main entry point for the reload pass.
736 : :
737 : : FIRST is the first insn of the function being compiled.
738 : :
739 : : GLOBAL nonzero means we were called from global_alloc
740 : : and should attempt to reallocate any pseudoregs that we
741 : : displace from hard regs we will use for reloads.
742 : : If GLOBAL is zero, we do not have enough information to do that,
743 : : so any pseudo reg that is spilled must go to the stack.
744 : :
745 : : Return value is TRUE if reload likely left dead insns in the
746 : : stream and a DCE pass should be run to elimiante them. Else the
747 : : return value is FALSE. */
748 : :
749 : : bool
750 : 0 : reload (rtx_insn *first, int global)
751 : : {
752 : 0 : int i, n;
753 : 0 : rtx_insn *insn;
754 : 0 : struct elim_table *ep;
755 : 0 : basic_block bb;
756 : 0 : bool inserted;
757 : :
758 : : /* Make sure even insns with volatile mem refs are recognizable. */
759 : 0 : init_recog ();
760 : :
761 : 0 : failure = 0;
762 : :
763 : 0 : reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
764 : :
765 : : /* Make sure that the last insn in the chain
766 : : is not something that needs reloading. */
767 : 0 : emit_note (NOTE_INSN_DELETED);
768 : :
769 : : /* Enable find_equiv_reg to distinguish insns made by reload. */
770 : 0 : reload_first_uid = get_max_uid ();
771 : :
772 : : /* Initialize the secondary memory table. */
773 : 0 : clear_secondary_mem ();
774 : :
775 : : /* We don't have a stack slot for any spill reg yet. */
776 : 0 : memset (spill_stack_slot, 0, sizeof spill_stack_slot);
777 : 0 : memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
778 : :
779 : : /* Initialize the save area information for caller-save, in case some
780 : : are needed. */
781 : 0 : init_save_areas ();
782 : :
783 : : /* Compute which hard registers are now in use
784 : : as homes for pseudo registers.
785 : : This is done here rather than (eg) in global_alloc
786 : : because this point is reached even if not optimizing. */
787 : 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
788 : 0 : mark_home_live (i);
789 : :
790 : : /* A function that has a nonlocal label that can reach the exit
791 : : block via non-exceptional paths must save all call-saved
792 : : registers. */
793 : 0 : if (cfun->has_nonlocal_label
794 : 0 : && has_nonexceptional_receiver ())
795 : 0 : crtl->saves_all_registers = 1;
796 : :
797 : 0 : if (crtl->saves_all_registers)
798 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
799 : 0 : if (! crtl->abi->clobbers_full_reg_p (i)
800 : 0 : && ! fixed_regs[i]
801 : 0 : && ! LOCAL_REGNO (i))
802 : 0 : df_set_regs_ever_live (i, true);
803 : :
804 : : /* Find all the pseudo registers that didn't get hard regs
805 : : but do have known equivalent constants or memory slots.
806 : : These include parameters (known equivalent to parameter slots)
807 : : and cse'd or loop-moved constant memory addresses.
808 : :
809 : : Record constant equivalents in reg_equiv_constant
810 : : so they will be substituted by find_reloads.
811 : : Record memory equivalents in reg_mem_equiv so they can
812 : : be substituted eventually by altering the REG-rtx's. */
813 : :
814 : 0 : grow_reg_equivs ();
815 : 0 : reg_old_renumber = XCNEWVEC (short, max_regno);
816 : 0 : memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
817 : 0 : pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
818 : 0 : pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
819 : :
820 : 0 : CLEAR_HARD_REG_SET (bad_spill_regs_global);
821 : :
822 : 0 : init_eliminable_invariants (first, true);
823 : 0 : init_elim_table ();
824 : :
825 : : /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
826 : : stack slots to the pseudos that lack hard regs or equivalents.
827 : : Do not touch virtual registers. */
828 : :
829 : 0 : temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
830 : 0 : for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
831 : 0 : temp_pseudo_reg_arr[n++] = i;
832 : :
833 : 0 : if (ira_conflicts_p)
834 : : /* Ask IRA to order pseudo-registers for better stack slot
835 : : sharing. */
836 : 0 : ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode);
837 : :
838 : 0 : for (i = 0; i < n; i++)
839 : 0 : alter_reg (temp_pseudo_reg_arr[i], -1, false);
840 : :
841 : : /* If we have some registers we think can be eliminated, scan all insns to
842 : : see if there is an insn that sets one of these registers to something
843 : : other than itself plus a constant. If so, the register cannot be
844 : : eliminated. Doing this scan here eliminates an extra pass through the
845 : : main reload loop in the most common case where register elimination
846 : : cannot be done. */
847 : 0 : for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
848 : 0 : if (INSN_P (insn))
849 : 0 : note_pattern_stores (PATTERN (insn), mark_not_eliminable, NULL);
850 : :
851 : 0 : maybe_fix_stack_asms ();
852 : :
853 : 0 : insns_need_reload = 0;
854 : 0 : something_needs_elimination = 0;
855 : :
856 : : /* Initialize to -1, which means take the first spill register. */
857 : 0 : last_spill_reg = -1;
858 : :
859 : : /* Spill any hard regs that we know we can't eliminate. */
860 : 0 : CLEAR_HARD_REG_SET (used_spill_regs);
861 : : /* There can be multiple ways to eliminate a register;
862 : : they should be listed adjacently.
863 : : Elimination for any register fails only if all possible ways fail. */
864 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; )
865 : : {
866 : 0 : int from = ep->from;
867 : 0 : int can_eliminate = 0;
868 : 0 : do
869 : : {
870 : 0 : can_eliminate |= ep->can_eliminate;
871 : 0 : ep++;
872 : : }
873 : 0 : while (ep < ®_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
874 : 0 : if (! can_eliminate)
875 : 0 : spill_hard_reg (from, 1);
876 : : }
877 : :
878 : 0 : if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
879 : 0 : spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
880 : :
881 : 0 : finish_spills (global);
882 : :
883 : : /* From now on, we may need to generate moves differently. We may also
884 : : allow modifications of insns which cause them to not be recognized.
885 : : Any such modifications will be cleaned up during reload itself. */
886 : 0 : reload_in_progress = 1;
887 : :
888 : : /* This loop scans the entire function each go-round
889 : : and repeats until one repetition spills no additional hard regs. */
890 : 0 : for (;;)
891 : : {
892 : 0 : int something_changed;
893 : 0 : poly_int64 starting_frame_size;
894 : :
895 : 0 : starting_frame_size = get_frame_size ();
896 : 0 : something_was_spilled = false;
897 : :
898 : 0 : set_initial_elim_offsets ();
899 : 0 : set_initial_label_offsets ();
900 : :
901 : : /* For each pseudo register that has an equivalent location defined,
902 : : try to eliminate any eliminable registers (such as the frame pointer)
903 : : assuming initial offsets for the replacement register, which
904 : : is the normal case.
905 : :
906 : : If the resulting location is directly addressable, substitute
907 : : the MEM we just got directly for the old REG.
908 : :
909 : : If it is not addressable but is a constant or the sum of a hard reg
910 : : and constant, it is probably not addressable because the constant is
911 : : out of range, in that case record the address; we will generate
912 : : hairy code to compute the address in a register each time it is
913 : : needed. Similarly if it is a hard register, but one that is not
914 : : valid as an address register.
915 : :
916 : : If the location is not addressable, but does not have one of the
917 : : above forms, assign a stack slot. We have to do this to avoid the
918 : : potential of producing lots of reloads if, e.g., a location involves
919 : : a pseudo that didn't get a hard register and has an equivalent memory
920 : : location that also involves a pseudo that didn't get a hard register.
921 : :
922 : : Perhaps at some point we will improve reload_when_needed handling
923 : : so this problem goes away. But that's very hairy. */
924 : :
925 : 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
926 : 0 : if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
927 : : {
928 : 0 : rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
929 : : NULL_RTX);
930 : :
931 : 0 : if (strict_memory_address_addr_space_p
932 : 0 : (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
933 : 0 : MEM_ADDR_SPACE (x)))
934 : 0 : reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
935 : 0 : else if (CONSTANT_P (XEXP (x, 0))
936 : 0 : || (REG_P (XEXP (x, 0))
937 : 0 : && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
938 : 0 : || (GET_CODE (XEXP (x, 0)) == PLUS
939 : 0 : && REG_P (XEXP (XEXP (x, 0), 0))
940 : 0 : && (REGNO (XEXP (XEXP (x, 0), 0))
941 : : < FIRST_PSEUDO_REGISTER)
942 : 0 : && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
943 : 0 : reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
944 : : else
945 : : {
946 : : /* Make a new stack slot. Then indicate that something
947 : : changed so we go back and recompute offsets for
948 : : eliminable registers because the allocation of memory
949 : : below might change some offset. reg_equiv_{mem,address}
950 : : will be set up for this pseudo on the next pass around
951 : : the loop. */
952 : 0 : reg_equiv_memory_loc (i) = 0;
953 : 0 : reg_equiv_init (i) = 0;
954 : 0 : alter_reg (i, -1, true);
955 : : }
956 : : }
957 : :
958 : 0 : if (caller_save_needed)
959 : 0 : setup_save_areas ();
960 : :
961 : 0 : if (maybe_ne (starting_frame_size, 0) && crtl->stack_alignment_needed)
962 : : {
963 : : /* If we have a stack frame, we must align it now. The
964 : : stack size may be a part of the offset computation for
965 : : register elimination. So if this changes the stack size,
966 : : then repeat the elimination bookkeeping. We don't
967 : : realign when there is no stack, as that will cause a
968 : : stack frame when none is needed should
969 : : TARGET_STARTING_FRAME_OFFSET not be already aligned to
970 : : STACK_BOUNDARY. */
971 : 0 : assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
972 : : }
973 : : /* If we allocated another stack slot, redo elimination bookkeeping. */
974 : 0 : if (something_was_spilled
975 : 0 : || maybe_ne (starting_frame_size, get_frame_size ()))
976 : : {
977 : 0 : if (update_eliminables_and_spill ())
978 : 0 : finish_spills (0);
979 : 0 : continue;
980 : : }
981 : :
982 : 0 : if (caller_save_needed)
983 : : {
984 : 0 : save_call_clobbered_regs ();
985 : : /* That might have allocated new insn_chain structures. */
986 : 0 : reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
987 : : }
988 : :
989 : 0 : calculate_needs_all_insns (global);
990 : :
991 : 0 : if (! ira_conflicts_p)
992 : : /* Don't do it for IRA. We need this info because we don't
993 : : change live_throughout and dead_or_set for chains when IRA
994 : : is used. */
995 : 0 : CLEAR_REG_SET (&spilled_pseudos);
996 : :
997 : 0 : something_changed = 0;
998 : :
999 : : /* If we allocated any new memory locations, make another pass
1000 : : since it might have changed elimination offsets. */
1001 : 0 : if (something_was_spilled
1002 : 0 : || maybe_ne (starting_frame_size, get_frame_size ()))
1003 : : something_changed = 1;
1004 : :
1005 : : /* Even if the frame size remained the same, we might still have
1006 : : changed elimination offsets, e.g. if find_reloads called
1007 : : force_const_mem requiring the back end to allocate a constant
1008 : : pool base register that needs to be saved on the stack. */
1009 : 0 : else if (!verify_initial_elim_offsets ())
1010 : 0 : something_changed = 1;
1011 : :
1012 : 0 : if (update_eliminables_and_spill ())
1013 : : {
1014 : 0 : finish_spills (0);
1015 : 0 : something_changed = 1;
1016 : : }
1017 : : else
1018 : : {
1019 : 0 : select_reload_regs ();
1020 : 0 : if (failure)
1021 : 0 : goto failed;
1022 : 0 : if (insns_need_reload)
1023 : 0 : something_changed |= finish_spills (global);
1024 : : }
1025 : :
1026 : 0 : if (! something_changed)
1027 : : break;
1028 : :
1029 : 0 : if (caller_save_needed)
1030 : 0 : delete_caller_save_insns ();
1031 : :
1032 : 0 : obstack_free (&reload_obstack, reload_firstobj);
1033 : : }
1034 : :
1035 : : /* If global-alloc was run, notify it of any register eliminations we have
1036 : : done. */
1037 : 0 : if (global)
1038 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1039 : 0 : if (ep->can_eliminate)
1040 : 0 : mark_elimination (ep->from, ep->to);
1041 : :
1042 : 0 : remove_init_insns ();
1043 : :
1044 : : /* Use the reload registers where necessary
1045 : : by generating move instructions to move the must-be-register
1046 : : values into or out of the reload registers. */
1047 : :
1048 : 0 : if (insns_need_reload != 0 || something_needs_elimination
1049 : 0 : || something_needs_operands_changed)
1050 : : {
1051 : 0 : poly_int64 old_frame_size = get_frame_size ();
1052 : :
1053 : 0 : reload_as_needed (global);
1054 : :
1055 : 0 : gcc_assert (known_eq (old_frame_size, get_frame_size ()));
1056 : :
1057 : 0 : gcc_assert (verify_initial_elim_offsets ());
1058 : : }
1059 : :
1060 : : /* If we were able to eliminate the frame pointer, show that it is no
1061 : : longer live at the start of any basic block. If it ls live by
1062 : : virtue of being in a pseudo, that pseudo will be marked live
1063 : : and hence the frame pointer will be known to be live via that
1064 : : pseudo. */
1065 : :
1066 : 0 : if (! frame_pointer_needed)
1067 : 0 : FOR_EACH_BB_FN (bb, cfun)
1068 : 0 : bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1069 : :
1070 : : /* Come here (with failure set nonzero) if we can't get enough spill
1071 : : regs. */
1072 : 0 : failed:
1073 : :
1074 : 0 : CLEAR_REG_SET (&changed_allocation_pseudos);
1075 : 0 : CLEAR_REG_SET (&spilled_pseudos);
1076 : 0 : reload_in_progress = 0;
1077 : :
1078 : : /* Now eliminate all pseudo regs by modifying them into
1079 : : their equivalent memory references.
1080 : : The REG-rtx's for the pseudos are modified in place,
1081 : : so all insns that used to refer to them now refer to memory.
1082 : :
1083 : : For a reg that has a reg_equiv_address, all those insns
1084 : : were changed by reloading so that no insns refer to it any longer;
1085 : : but the DECL_RTL of a variable decl may refer to it,
1086 : : and if so this causes the debugging info to mention the variable. */
1087 : :
1088 : 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1089 : : {
1090 : 0 : rtx addr = 0;
1091 : :
1092 : 0 : if (reg_equiv_mem (i))
1093 : 0 : addr = XEXP (reg_equiv_mem (i), 0);
1094 : :
1095 : 0 : if (reg_equiv_address (i))
1096 : 0 : addr = reg_equiv_address (i);
1097 : :
1098 : 0 : if (addr)
1099 : : {
1100 : 0 : if (reg_renumber[i] < 0)
1101 : : {
1102 : 0 : rtx reg = regno_reg_rtx[i];
1103 : :
1104 : 0 : REG_USERVAR_P (reg) = 0;
1105 : 0 : PUT_CODE (reg, MEM);
1106 : 0 : XEXP (reg, 0) = addr;
1107 : 0 : if (reg_equiv_memory_loc (i))
1108 : 0 : MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1109 : : else
1110 : 0 : MEM_ATTRS (reg) = 0;
1111 : 0 : MEM_NOTRAP_P (reg) = 1;
1112 : : }
1113 : 0 : else if (reg_equiv_mem (i))
1114 : 0 : XEXP (reg_equiv_mem (i), 0) = addr;
1115 : : }
1116 : :
1117 : : /* We don't want complex addressing modes in debug insns
1118 : : if simpler ones will do, so delegitimize equivalences
1119 : : in debug insns. */
1120 : 0 : if (MAY_HAVE_DEBUG_BIND_INSNS && reg_renumber[i] < 0)
1121 : : {
1122 : 0 : rtx reg = regno_reg_rtx[i];
1123 : 0 : rtx equiv = 0;
1124 : 0 : df_ref use, next;
1125 : :
1126 : 0 : if (reg_equiv_constant (i))
1127 : : equiv = reg_equiv_constant (i);
1128 : 0 : else if (reg_equiv_invariant (i))
1129 : : equiv = reg_equiv_invariant (i);
1130 : 0 : else if (reg && MEM_P (reg))
1131 : 0 : equiv = targetm.delegitimize_address (reg);
1132 : 0 : else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1133 : : equiv = reg;
1134 : :
1135 : 0 : if (equiv == reg)
1136 : 0 : continue;
1137 : :
1138 : 0 : for (use = DF_REG_USE_CHAIN (i); use; use = next)
1139 : : {
1140 : 0 : insn = DF_REF_INSN (use);
1141 : :
1142 : : /* Make sure the next ref is for a different instruction,
1143 : : so that we're not affected by the rescan. */
1144 : 0 : next = DF_REF_NEXT_REG (use);
1145 : 0 : while (next && DF_REF_INSN (next) == insn)
1146 : 0 : next = DF_REF_NEXT_REG (next);
1147 : :
1148 : 0 : if (DEBUG_BIND_INSN_P (insn))
1149 : : {
1150 : 0 : if (!equiv)
1151 : : {
1152 : 0 : INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1153 : 0 : df_insn_rescan_debug_internal (insn);
1154 : : }
1155 : : else
1156 : 0 : INSN_VAR_LOCATION_LOC (insn)
1157 : 0 : = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1158 : : reg, equiv);
1159 : : }
1160 : : }
1161 : : }
1162 : : }
1163 : :
1164 : : /* We must set reload_completed now since the cleanup_subreg_operands call
1165 : : below will re-recognize each insn and reload may have generated insns
1166 : : which are only valid during and after reload. */
1167 : 0 : reload_completed = 1;
1168 : :
1169 : : /* Make a pass over all the insns and delete all USEs which we inserted
1170 : : only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1171 : : notes. Delete all CLOBBER insns, except those that refer to the return
1172 : : value and the special mem:BLK CLOBBERs added to prevent the scheduler
1173 : : from misarranging variable-array code, and simplify (subreg (reg))
1174 : : operands. Strip and regenerate REG_INC notes that may have been moved
1175 : : around. */
1176 : :
1177 : 0 : for (insn = first; insn; insn = NEXT_INSN (insn))
1178 : 0 : if (INSN_P (insn))
1179 : : {
1180 : 0 : rtx *pnote;
1181 : :
1182 : 0 : if (CALL_P (insn))
1183 : 0 : replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1184 : : VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1185 : :
1186 : 0 : if ((GET_CODE (PATTERN (insn)) == USE
1187 : : /* We mark with QImode USEs introduced by reload itself. */
1188 : 0 : && (GET_MODE (insn) == QImode
1189 : 0 : || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1190 : 0 : || (GET_CODE (PATTERN (insn)) == CLOBBER
1191 : 0 : && (!MEM_P (XEXP (PATTERN (insn), 0))
1192 : 0 : || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1193 : 0 : || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1194 : 0 : && XEXP (XEXP (PATTERN (insn), 0), 0)
1195 : 0 : != stack_pointer_rtx))
1196 : 0 : && (!REG_P (XEXP (PATTERN (insn), 0))
1197 : 0 : || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1198 : : {
1199 : 0 : delete_insn (insn);
1200 : 0 : continue;
1201 : : }
1202 : :
1203 : : /* Some CLOBBERs may survive until here and still reference unassigned
1204 : : pseudos with const equivalent, which may in turn cause ICE in later
1205 : : passes if the reference remains in place. */
1206 : 0 : if (GET_CODE (PATTERN (insn)) == CLOBBER)
1207 : 0 : replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1208 : 0 : VOIDmode, PATTERN (insn));
1209 : :
1210 : : /* Discard obvious no-ops, even without -O. This optimization
1211 : : is fast and doesn't interfere with debugging. */
1212 : 0 : if (NONJUMP_INSN_P (insn)
1213 : 0 : && GET_CODE (PATTERN (insn)) == SET
1214 : 0 : && REG_P (SET_SRC (PATTERN (insn)))
1215 : 0 : && REG_P (SET_DEST (PATTERN (insn)))
1216 : 0 : && (REGNO (SET_SRC (PATTERN (insn)))
1217 : 0 : == REGNO (SET_DEST (PATTERN (insn)))))
1218 : : {
1219 : 0 : delete_insn (insn);
1220 : 0 : continue;
1221 : : }
1222 : :
1223 : 0 : pnote = ®_NOTES (insn);
1224 : 0 : while (*pnote != 0)
1225 : : {
1226 : 0 : if (REG_NOTE_KIND (*pnote) == REG_DEAD
1227 : 0 : || REG_NOTE_KIND (*pnote) == REG_UNUSED
1228 : 0 : || REG_NOTE_KIND (*pnote) == REG_INC)
1229 : 0 : *pnote = XEXP (*pnote, 1);
1230 : : else
1231 : 0 : pnote = &XEXP (*pnote, 1);
1232 : : }
1233 : :
1234 : 0 : if (AUTO_INC_DEC)
1235 : : add_auto_inc_notes (insn, PATTERN (insn));
1236 : :
1237 : : /* Simplify (subreg (reg)) if it appears as an operand. */
1238 : 0 : cleanup_subreg_operands (insn);
1239 : :
1240 : : /* Clean up invalid ASMs so that they don't confuse later passes.
1241 : : See PR 21299. */
1242 : 0 : if (asm_noperands (PATTERN (insn)) >= 0)
1243 : : {
1244 : 0 : extract_insn (insn);
1245 : 0 : if (!constrain_operands (1, get_enabled_alternatives (insn)))
1246 : : {
1247 : 0 : error_for_asm (insn,
1248 : : "%<asm%> operand has impossible constraints");
1249 : 0 : delete_insn (insn);
1250 : 0 : continue;
1251 : : }
1252 : : }
1253 : : }
1254 : :
1255 : 0 : free (temp_pseudo_reg_arr);
1256 : :
1257 : : /* Indicate that we no longer have known memory locations or constants. */
1258 : 0 : free_reg_equiv ();
1259 : :
1260 : 0 : free (reg_max_ref_mode);
1261 : 0 : free (reg_old_renumber);
1262 : 0 : free (pseudo_previous_regs);
1263 : 0 : free (pseudo_forbidden_regs);
1264 : :
1265 : 0 : CLEAR_HARD_REG_SET (used_spill_regs);
1266 : 0 : for (i = 0; i < n_spills; i++)
1267 : 0 : SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1268 : :
1269 : : /* Free all the insn_chain structures at once. */
1270 : 0 : obstack_free (&reload_obstack, reload_startobj);
1271 : 0 : unused_insn_chains = 0;
1272 : :
1273 : 0 : inserted = fixup_abnormal_edges ();
1274 : :
1275 : : /* We've possibly turned single trapping insn into multiple ones. */
1276 : 0 : if (cfun->can_throw_non_call_exceptions)
1277 : : {
1278 : 0 : auto_sbitmap blocks (last_basic_block_for_fn (cfun));
1279 : 0 : bitmap_ones (blocks);
1280 : 0 : find_many_sub_basic_blocks (blocks);
1281 : 0 : }
1282 : :
1283 : 0 : if (inserted)
1284 : 0 : commit_edge_insertions ();
1285 : :
1286 : : /* Replacing pseudos with their memory equivalents might have
1287 : : created shared rtx. Subsequent passes would get confused
1288 : : by this, so unshare everything here. */
1289 : 0 : unshare_all_rtl_again (first);
1290 : :
1291 : : #ifdef STACK_BOUNDARY
1292 : : /* init_emit has set the alignment of the hard frame pointer
1293 : : to STACK_BOUNDARY. It is very likely no longer valid if
1294 : : the hard frame pointer was used for register allocation. */
1295 : 0 : if (!frame_pointer_needed)
1296 : 0 : REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1297 : : #endif
1298 : :
1299 : 0 : substitute_stack.release ();
1300 : :
1301 : 0 : gcc_assert (bitmap_empty_p (&spilled_pseudos));
1302 : :
1303 : 0 : reload_completed = !failure;
1304 : :
1305 : 0 : return need_dce;
1306 : : }
1307 : :
1308 : : /* Yet another special case. Unfortunately, reg-stack forces people to
1309 : : write incorrect clobbers in asm statements. These clobbers must not
1310 : : cause the register to appear in bad_spill_regs, otherwise we'll call
1311 : : fatal_insn later. We clear the corresponding regnos in the live
1312 : : register sets to avoid this.
1313 : : The whole thing is rather sick, I'm afraid. */
1314 : :
1315 : : static void
1316 : 0 : maybe_fix_stack_asms (void)
1317 : : {
1318 : : #ifdef STACK_REGS
1319 : 0 : const char *constraints[MAX_RECOG_OPERANDS];
1320 : 0 : machine_mode operand_mode[MAX_RECOG_OPERANDS];
1321 : 0 : class insn_chain *chain;
1322 : :
1323 : 0 : for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1324 : : {
1325 : 0 : int i, noperands;
1326 : 0 : HARD_REG_SET clobbered, allowed;
1327 : 0 : rtx pat;
1328 : :
1329 : 0 : if (! INSN_P (chain->insn)
1330 : 0 : || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1331 : 0 : continue;
1332 : 0 : pat = PATTERN (chain->insn);
1333 : 0 : if (GET_CODE (pat) != PARALLEL)
1334 : 0 : continue;
1335 : :
1336 : 0 : CLEAR_HARD_REG_SET (clobbered);
1337 : 0 : CLEAR_HARD_REG_SET (allowed);
1338 : :
1339 : : /* First, make a mask of all stack regs that are clobbered. */
1340 : 0 : for (i = 0; i < XVECLEN (pat, 0); i++)
1341 : : {
1342 : 0 : rtx t = XVECEXP (pat, 0, i);
1343 : 0 : if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1344 : 0 : SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1345 : : }
1346 : :
1347 : : /* Get the operand values and constraints out of the insn. */
1348 : 0 : decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1349 : : constraints, operand_mode, NULL);
1350 : :
1351 : : /* For every operand, see what registers are allowed. */
1352 : 0 : for (i = 0; i < noperands; i++)
1353 : : {
1354 : 0 : const char *p = constraints[i];
1355 : : /* For every alternative, we compute the class of registers allowed
1356 : : for reloading in CLS, and merge its contents into the reg set
1357 : : ALLOWED. */
1358 : 0 : int cls = (int) NO_REGS;
1359 : :
1360 : 0 : for (;;)
1361 : : {
1362 : 0 : char c = *p;
1363 : :
1364 : 0 : if (c == '\0' || c == ',' || c == '#')
1365 : : {
1366 : : /* End of one alternative - mark the regs in the current
1367 : : class, and reset the class. */
1368 : 0 : allowed |= reg_class_contents[cls];
1369 : 0 : cls = NO_REGS;
1370 : 0 : p++;
1371 : 0 : if (c == '#')
1372 : 0 : do {
1373 : 0 : c = *p++;
1374 : 0 : } while (c != '\0' && c != ',');
1375 : 0 : if (c == '\0')
1376 : : break;
1377 : 0 : continue;
1378 : : }
1379 : :
1380 : 0 : switch (c)
1381 : : {
1382 : 0 : case 'g':
1383 : 0 : cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1384 : 0 : break;
1385 : :
1386 : 0 : default:
1387 : 0 : enum constraint_num cn = lookup_constraint (p);
1388 : 0 : if (insn_extra_address_constraint (cn))
1389 : 0 : cls = (int) reg_class_subunion[cls]
1390 : 0 : [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1391 : 0 : ADDRESS, SCRATCH, chain->insn)];
1392 : : else
1393 : 0 : cls = (int) reg_class_subunion[cls]
1394 : 0 : [reg_class_for_constraint (cn)];
1395 : : break;
1396 : : }
1397 : 0 : p += CONSTRAINT_LEN (c, p);
1398 : : }
1399 : : }
1400 : : /* Those of the registers which are clobbered, but allowed by the
1401 : : constraints, must be usable as reload registers. So clear them
1402 : : out of the life information. */
1403 : 0 : allowed &= clobbered;
1404 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1405 : 0 : if (TEST_HARD_REG_BIT (allowed, i))
1406 : : {
1407 : 0 : CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1408 : 0 : CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1409 : : }
1410 : : }
1411 : :
1412 : : #endif
1413 : 0 : }
1414 : :
1415 : : /* Copy the global variables n_reloads and rld into the corresponding elts
1416 : : of CHAIN. */
1417 : : static void
1418 : 0 : copy_reloads (class insn_chain *chain)
1419 : : {
1420 : 0 : chain->n_reloads = n_reloads;
1421 : 0 : chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1422 : 0 : memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1423 : 0 : reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1424 : 0 : }
1425 : :
1426 : : /* Walk the chain of insns, and determine for each whether it needs reloads
1427 : : and/or eliminations. Build the corresponding insns_need_reload list, and
1428 : : set something_needs_elimination as appropriate. */
1429 : : static void
1430 : 0 : calculate_needs_all_insns (int global)
1431 : : {
1432 : 0 : class insn_chain **pprev_reload = &insns_need_reload;
1433 : 0 : class insn_chain *chain, *next = 0;
1434 : :
1435 : 0 : something_needs_elimination = 0;
1436 : :
1437 : 0 : reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1438 : 0 : for (chain = reload_insn_chain; chain != 0; chain = next)
1439 : : {
1440 : 0 : rtx_insn *insn = chain->insn;
1441 : :
1442 : 0 : next = chain->next;
1443 : :
1444 : : /* Clear out the shortcuts. */
1445 : 0 : chain->n_reloads = 0;
1446 : 0 : chain->need_elim = 0;
1447 : 0 : chain->need_reload = 0;
1448 : 0 : chain->need_operand_change = 0;
1449 : :
1450 : : /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1451 : : include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1452 : : what effects this has on the known offsets at labels. */
1453 : :
1454 : 0 : if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1455 : 0 : || (INSN_P (insn) && REG_NOTES (insn) != 0))
1456 : 0 : set_label_offsets (insn, insn, 0);
1457 : :
1458 : 0 : if (INSN_P (insn))
1459 : : {
1460 : 0 : rtx old_body = PATTERN (insn);
1461 : 0 : int old_code = INSN_CODE (insn);
1462 : 0 : rtx old_notes = REG_NOTES (insn);
1463 : 0 : int did_elimination = 0;
1464 : 0 : int operands_changed = 0;
1465 : :
1466 : : /* Skip insns that only set an equivalence. */
1467 : 0 : if (will_delete_init_insn_p (insn))
1468 : 0 : continue;
1469 : :
1470 : : /* If needed, eliminate any eliminable registers. */
1471 : 0 : if (num_eliminable || num_eliminable_invariants)
1472 : 0 : did_elimination = eliminate_regs_in_insn (insn, 0);
1473 : :
1474 : : /* Analyze the instruction. */
1475 : 0 : operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1476 : : global, spill_reg_order);
1477 : :
1478 : : /* If a no-op set needs more than one reload, this is likely
1479 : : to be something that needs input address reloads. We
1480 : : can't get rid of this cleanly later, and it is of no use
1481 : : anyway, so discard it now.
1482 : : We only do this when expensive_optimizations is enabled,
1483 : : since this complements reload inheritance / output
1484 : : reload deletion, and it can make debugging harder. */
1485 : 0 : if (flag_expensive_optimizations && n_reloads > 1)
1486 : : {
1487 : 0 : rtx set = single_set (insn);
1488 : 0 : if (set
1489 : 0 : &&
1490 : 0 : ((SET_SRC (set) == SET_DEST (set)
1491 : 0 : && REG_P (SET_SRC (set))
1492 : 0 : && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1493 : 0 : || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1494 : 0 : && reg_renumber[REGNO (SET_SRC (set))] < 0
1495 : 0 : && reg_renumber[REGNO (SET_DEST (set))] < 0
1496 : 0 : && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1497 : 0 : && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1498 : 0 : && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1499 : 0 : reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1500 : : {
1501 : 0 : if (ira_conflicts_p)
1502 : : /* Inform IRA about the insn deletion. */
1503 : 0 : ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1504 : 0 : REGNO (SET_SRC (set)));
1505 : 0 : delete_insn (insn);
1506 : : /* Delete it from the reload chain. */
1507 : 0 : if (chain->prev)
1508 : 0 : chain->prev->next = next;
1509 : : else
1510 : 0 : reload_insn_chain = next;
1511 : 0 : if (next)
1512 : 0 : next->prev = chain->prev;
1513 : 0 : chain->next = unused_insn_chains;
1514 : 0 : unused_insn_chains = chain;
1515 : 0 : continue;
1516 : : }
1517 : : }
1518 : 0 : if (num_eliminable)
1519 : 0 : update_eliminable_offsets ();
1520 : :
1521 : : /* Remember for later shortcuts which insns had any reloads or
1522 : : register eliminations. */
1523 : 0 : chain->need_elim = did_elimination;
1524 : 0 : chain->need_reload = n_reloads > 0;
1525 : 0 : chain->need_operand_change = operands_changed;
1526 : :
1527 : : /* Discard any register replacements done. */
1528 : 0 : if (did_elimination)
1529 : : {
1530 : 0 : obstack_free (&reload_obstack, reload_insn_firstobj);
1531 : 0 : PATTERN (insn) = old_body;
1532 : 0 : INSN_CODE (insn) = old_code;
1533 : 0 : REG_NOTES (insn) = old_notes;
1534 : 0 : something_needs_elimination = 1;
1535 : : }
1536 : :
1537 : 0 : something_needs_operands_changed |= operands_changed;
1538 : :
1539 : 0 : if (n_reloads != 0)
1540 : : {
1541 : 0 : copy_reloads (chain);
1542 : 0 : *pprev_reload = chain;
1543 : 0 : pprev_reload = &chain->next_need_reload;
1544 : : }
1545 : : }
1546 : : }
1547 : 0 : *pprev_reload = 0;
1548 : 0 : }
1549 : :
1550 : : /* This function is called from the register allocator to set up estimates
1551 : : for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1552 : : an invariant. The structure is similar to calculate_needs_all_insns. */
1553 : :
1554 : : void
1555 : 0 : calculate_elim_costs_all_insns (void)
1556 : : {
1557 : 0 : int *reg_equiv_init_cost;
1558 : 0 : basic_block bb;
1559 : 0 : int i;
1560 : :
1561 : 0 : reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1562 : 0 : init_elim_table ();
1563 : 0 : init_eliminable_invariants (get_insns (), false);
1564 : :
1565 : 0 : set_initial_elim_offsets ();
1566 : 0 : set_initial_label_offsets ();
1567 : :
1568 : 0 : FOR_EACH_BB_FN (bb, cfun)
1569 : : {
1570 : 0 : rtx_insn *insn;
1571 : 0 : elim_bb = bb;
1572 : :
1573 : 0 : FOR_BB_INSNS (bb, insn)
1574 : : {
1575 : : /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1576 : : include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1577 : : what effects this has on the known offsets at labels. */
1578 : :
1579 : 0 : if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1580 : 0 : || (INSN_P (insn) && REG_NOTES (insn) != 0))
1581 : 0 : set_label_offsets (insn, insn, 0);
1582 : :
1583 : 0 : if (INSN_P (insn))
1584 : : {
1585 : 0 : rtx set = single_set (insn);
1586 : :
1587 : : /* Skip insns that only set an equivalence. */
1588 : 0 : if (set && REG_P (SET_DEST (set))
1589 : 0 : && reg_renumber[REGNO (SET_DEST (set))] < 0
1590 : 0 : && (reg_equiv_constant (REGNO (SET_DEST (set)))
1591 : 0 : || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1592 : : {
1593 : 0 : unsigned regno = REGNO (SET_DEST (set));
1594 : 0 : rtx_insn_list *init = reg_equiv_init (regno);
1595 : 0 : if (init)
1596 : : {
1597 : 0 : rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1598 : : false, true);
1599 : 0 : machine_mode mode = GET_MODE (SET_DEST (set));
1600 : 0 : int cost = set_src_cost (t, mode,
1601 : 0 : optimize_bb_for_speed_p (bb));
1602 : 0 : int freq = REG_FREQ_FROM_BB (bb);
1603 : :
1604 : 0 : reg_equiv_init_cost[regno] = cost * freq;
1605 : 0 : continue;
1606 : 0 : }
1607 : : }
1608 : : /* If needed, eliminate any eliminable registers. */
1609 : 0 : if (num_eliminable || num_eliminable_invariants)
1610 : 0 : elimination_costs_in_insn (insn);
1611 : :
1612 : 0 : if (num_eliminable)
1613 : 0 : update_eliminable_offsets ();
1614 : : }
1615 : : }
1616 : : }
1617 : 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1618 : : {
1619 : 0 : if (reg_equiv_invariant (i))
1620 : : {
1621 : 0 : if (reg_equiv_init (i))
1622 : : {
1623 : 0 : int cost = reg_equiv_init_cost[i];
1624 : 0 : if (dump_file)
1625 : 0 : fprintf (dump_file,
1626 : : "Reg %d has equivalence, initial gains %d\n", i, cost);
1627 : 0 : if (cost != 0)
1628 : 0 : ira_adjust_equiv_reg_cost (i, cost);
1629 : : }
1630 : : else
1631 : : {
1632 : 0 : if (dump_file)
1633 : 0 : fprintf (dump_file,
1634 : : "Reg %d had equivalence, but can't be eliminated\n",
1635 : : i);
1636 : 0 : ira_adjust_equiv_reg_cost (i, 0);
1637 : : }
1638 : : }
1639 : : }
1640 : :
1641 : 0 : free (reg_equiv_init_cost);
1642 : 0 : free (offsets_known_at);
1643 : 0 : free (offsets_at);
1644 : 0 : offsets_at = NULL;
1645 : 0 : offsets_known_at = NULL;
1646 : 0 : }
1647 : :
1648 : : /* Comparison function for qsort to decide which of two reloads
1649 : : should be handled first. *P1 and *P2 are the reload numbers. */
1650 : :
1651 : : static int
1652 : 0 : reload_reg_class_lower (const void *r1p, const void *r2p)
1653 : : {
1654 : 0 : int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1655 : 0 : int t;
1656 : :
1657 : : /* Consider required reloads before optional ones. */
1658 : 0 : t = rld[r1].optional - rld[r2].optional;
1659 : 0 : if (t != 0)
1660 : : return t;
1661 : :
1662 : : /* Count all solitary classes before non-solitary ones. */
1663 : 0 : t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1664 : 0 : - (reg_class_size[(int) rld[r1].rclass] == 1));
1665 : 0 : if (t != 0)
1666 : : return t;
1667 : :
1668 : : /* Aside from solitaires, consider all multi-reg groups first. */
1669 : 0 : t = rld[r2].nregs - rld[r1].nregs;
1670 : 0 : if (t != 0)
1671 : : return t;
1672 : :
1673 : : /* Consider reloads in order of increasing reg-class number. */
1674 : 0 : t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1675 : 0 : if (t != 0)
1676 : : return t;
1677 : :
1678 : : /* If reloads are equally urgent, sort by reload number,
1679 : : so that the results of qsort leave nothing to chance. */
1680 : 0 : return r1 - r2;
1681 : : }
1682 : :
1683 : : /* The cost of spilling each hard reg. */
1684 : : static int spill_cost[FIRST_PSEUDO_REGISTER];
1685 : :
1686 : : /* When spilling multiple hard registers, we use SPILL_COST for the first
1687 : : spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1688 : : only the first hard reg for a multi-reg pseudo. */
1689 : : static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1690 : :
1691 : : /* Map of hard regno to pseudo regno currently occupying the hard
1692 : : reg. */
1693 : : static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1694 : :
1695 : : /* Update the spill cost arrays, considering that pseudo REG is live. */
1696 : :
1697 : : static void
1698 : 0 : count_pseudo (int reg)
1699 : : {
1700 : 0 : int freq = REG_FREQ (reg);
1701 : 0 : int r = reg_renumber[reg];
1702 : 0 : int nregs;
1703 : :
1704 : : /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1705 : 0 : if (ira_conflicts_p && r < 0)
1706 : : return;
1707 : :
1708 : 0 : if (REGNO_REG_SET_P (&pseudos_counted, reg)
1709 : 0 : || REGNO_REG_SET_P (&spilled_pseudos, reg))
1710 : 0 : return;
1711 : :
1712 : 0 : SET_REGNO_REG_SET (&pseudos_counted, reg);
1713 : :
1714 : 0 : gcc_assert (r >= 0);
1715 : :
1716 : 0 : spill_add_cost[r] += freq;
1717 : 0 : nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1718 : 0 : while (nregs-- > 0)
1719 : : {
1720 : 0 : hard_regno_to_pseudo_regno[r + nregs] = reg;
1721 : 0 : spill_cost[r + nregs] += freq;
1722 : : }
1723 : : }
1724 : :
1725 : : /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1726 : : contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1727 : :
1728 : : static void
1729 : 0 : order_regs_for_reload (class insn_chain *chain)
1730 : : {
1731 : 0 : unsigned i;
1732 : 0 : HARD_REG_SET used_by_pseudos;
1733 : 0 : HARD_REG_SET used_by_pseudos2;
1734 : 0 : reg_set_iterator rsi;
1735 : :
1736 : 0 : bad_spill_regs = fixed_reg_set;
1737 : :
1738 : 0 : memset (spill_cost, 0, sizeof spill_cost);
1739 : 0 : memset (spill_add_cost, 0, sizeof spill_add_cost);
1740 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1741 : 0 : hard_regno_to_pseudo_regno[i] = -1;
1742 : :
1743 : : /* Count number of uses of each hard reg by pseudo regs allocated to it
1744 : : and then order them by decreasing use. First exclude hard registers
1745 : : that are live in or across this insn. */
1746 : :
1747 : 0 : REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1748 : 0 : REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1749 : 0 : bad_spill_regs |= used_by_pseudos;
1750 : 0 : bad_spill_regs |= used_by_pseudos2;
1751 : :
1752 : : /* Now find out which pseudos are allocated to it, and update
1753 : : hard_reg_n_uses. */
1754 : 0 : CLEAR_REG_SET (&pseudos_counted);
1755 : :
1756 : 0 : EXECUTE_IF_SET_IN_REG_SET
1757 : : (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1758 : : {
1759 : 0 : count_pseudo (i);
1760 : : }
1761 : 0 : EXECUTE_IF_SET_IN_REG_SET
1762 : : (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1763 : : {
1764 : 0 : count_pseudo (i);
1765 : : }
1766 : 0 : CLEAR_REG_SET (&pseudos_counted);
1767 : 0 : }
1768 : :
1769 : : /* Vector of reload-numbers showing the order in which the reloads should
1770 : : be processed. */
1771 : : static short reload_order[MAX_RELOADS];
1772 : :
1773 : : /* This is used to keep track of the spill regs used in one insn. */
1774 : : static HARD_REG_SET used_spill_regs_local;
1775 : :
1776 : : /* We decided to spill hard register SPILLED, which has a size of
1777 : : SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1778 : : is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1779 : : update SPILL_COST/SPILL_ADD_COST. */
1780 : :
1781 : : static void
1782 : 0 : count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1783 : : {
1784 : 0 : int freq = REG_FREQ (reg);
1785 : 0 : int r = reg_renumber[reg];
1786 : 0 : int nregs;
1787 : :
1788 : : /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1789 : 0 : if (ira_conflicts_p && r < 0)
1790 : : return;
1791 : :
1792 : 0 : gcc_assert (r >= 0);
1793 : :
1794 : 0 : nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg));
1795 : :
1796 : 0 : if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1797 : 0 : || spilled + spilled_nregs <= r || r + nregs <= spilled)
1798 : : return;
1799 : :
1800 : 0 : SET_REGNO_REG_SET (&spilled_pseudos, reg);
1801 : :
1802 : 0 : spill_add_cost[r] -= freq;
1803 : 0 : while (nregs-- > 0)
1804 : : {
1805 : 0 : hard_regno_to_pseudo_regno[r + nregs] = -1;
1806 : 0 : spill_cost[r + nregs] -= freq;
1807 : : }
1808 : : }
1809 : :
1810 : : /* Find reload register to use for reload number ORDER. */
1811 : :
1812 : : static int
1813 : 0 : find_reg (class insn_chain *chain, int order)
1814 : : {
1815 : 0 : int rnum = reload_order[order];
1816 : 0 : struct reload *rl = rld + rnum;
1817 : 0 : int best_cost = INT_MAX;
1818 : 0 : int best_reg = -1;
1819 : 0 : unsigned int i, j, n;
1820 : 0 : int k;
1821 : 0 : HARD_REG_SET not_usable;
1822 : 0 : HARD_REG_SET used_by_other_reload;
1823 : 0 : reg_set_iterator rsi;
1824 : 0 : static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1825 : 0 : static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1826 : :
1827 : 0 : not_usable = (bad_spill_regs
1828 : 0 : | bad_spill_regs_global
1829 : 0 : | ~reg_class_contents[rl->rclass]);
1830 : :
1831 : 0 : CLEAR_HARD_REG_SET (used_by_other_reload);
1832 : 0 : for (k = 0; k < order; k++)
1833 : : {
1834 : 0 : int other = reload_order[k];
1835 : :
1836 : 0 : if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1837 : 0 : for (j = 0; j < rld[other].nregs; j++)
1838 : 0 : SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1839 : : }
1840 : :
1841 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1842 : : {
1843 : : #ifdef REG_ALLOC_ORDER
1844 : 0 : unsigned int regno = reg_alloc_order[i];
1845 : : #else
1846 : : unsigned int regno = i;
1847 : : #endif
1848 : :
1849 : 0 : if (! TEST_HARD_REG_BIT (not_usable, regno)
1850 : 0 : && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1851 : 0 : && targetm.hard_regno_mode_ok (regno, rl->mode))
1852 : : {
1853 : 0 : int this_cost = spill_cost[regno];
1854 : 0 : int ok = 1;
1855 : 0 : unsigned int this_nregs = hard_regno_nregs (regno, rl->mode);
1856 : :
1857 : 0 : for (j = 1; j < this_nregs; j++)
1858 : : {
1859 : 0 : this_cost += spill_add_cost[regno + j];
1860 : 0 : if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1861 : 0 : || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1862 : : ok = 0;
1863 : : }
1864 : 0 : if (! ok)
1865 : 0 : continue;
1866 : :
1867 : 0 : if (ira_conflicts_p)
1868 : : {
1869 : : /* Ask IRA to find a better pseudo-register for
1870 : : spilling. */
1871 : 0 : for (n = j = 0; j < this_nregs; j++)
1872 : : {
1873 : 0 : int r = hard_regno_to_pseudo_regno[regno + j];
1874 : :
1875 : 0 : if (r < 0)
1876 : 0 : continue;
1877 : 0 : if (n == 0 || regno_pseudo_regs[n - 1] != r)
1878 : 0 : regno_pseudo_regs[n++] = r;
1879 : : }
1880 : 0 : regno_pseudo_regs[n++] = -1;
1881 : 0 : if (best_reg < 0
1882 : 0 : || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1883 : : best_regno_pseudo_regs,
1884 : : rl->in, rl->out,
1885 : : chain->insn))
1886 : : {
1887 : 0 : best_reg = regno;
1888 : 0 : for (j = 0;; j++)
1889 : : {
1890 : 0 : best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1891 : 0 : if (regno_pseudo_regs[j] < 0)
1892 : : break;
1893 : : }
1894 : : }
1895 : 0 : continue;
1896 : 0 : }
1897 : :
1898 : 0 : if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1899 : 0 : this_cost--;
1900 : 0 : if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1901 : 0 : this_cost--;
1902 : 0 : if (this_cost < best_cost
1903 : : /* Among registers with equal cost, prefer caller-saved ones, or
1904 : : use REG_ALLOC_ORDER if it is defined. */
1905 : 0 : || (this_cost == best_cost
1906 : : #ifdef REG_ALLOC_ORDER
1907 : 0 : && (inv_reg_alloc_order[regno]
1908 : 0 : < inv_reg_alloc_order[best_reg])
1909 : : #else
1910 : : && crtl->abi->clobbers_full_reg_p (regno)
1911 : : && !crtl->abi->clobbers_full_reg_p (best_reg)
1912 : : #endif
1913 : : ))
1914 : : {
1915 : 0 : best_reg = regno;
1916 : 0 : best_cost = this_cost;
1917 : : }
1918 : : }
1919 : : }
1920 : 0 : if (best_reg == -1)
1921 : : return 0;
1922 : :
1923 : 0 : if (dump_file)
1924 : 0 : fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1925 : :
1926 : 0 : rl->nregs = hard_regno_nregs (best_reg, rl->mode);
1927 : 0 : rl->regno = best_reg;
1928 : :
1929 : 0 : EXECUTE_IF_SET_IN_REG_SET
1930 : : (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1931 : : {
1932 : 0 : count_spilled_pseudo (best_reg, rl->nregs, j);
1933 : : }
1934 : :
1935 : 0 : EXECUTE_IF_SET_IN_REG_SET
1936 : : (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1937 : : {
1938 : 0 : count_spilled_pseudo (best_reg, rl->nregs, j);
1939 : : }
1940 : :
1941 : 0 : for (i = 0; i < rl->nregs; i++)
1942 : : {
1943 : 0 : gcc_assert (spill_cost[best_reg + i] == 0);
1944 : 0 : gcc_assert (spill_add_cost[best_reg + i] == 0);
1945 : 0 : gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1946 : 0 : SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1947 : : }
1948 : : return 1;
1949 : : }
1950 : :
1951 : : /* Find more reload regs to satisfy the remaining need of an insn, which
1952 : : is given by CHAIN.
1953 : : Do it by ascending class number, since otherwise a reg
1954 : : might be spilled for a big class and might fail to count
1955 : : for a smaller class even though it belongs to that class. */
1956 : :
1957 : : static void
1958 : 0 : find_reload_regs (class insn_chain *chain)
1959 : : {
1960 : 0 : int i;
1961 : :
1962 : : /* In order to be certain of getting the registers we need,
1963 : : we must sort the reloads into order of increasing register class.
1964 : : Then our grabbing of reload registers will parallel the process
1965 : : that provided the reload registers. */
1966 : 0 : for (i = 0; i < chain->n_reloads; i++)
1967 : : {
1968 : : /* Show whether this reload already has a hard reg. */
1969 : 0 : if (chain->rld[i].reg_rtx)
1970 : : {
1971 : 0 : chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx);
1972 : 0 : chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx);
1973 : : }
1974 : : else
1975 : 0 : chain->rld[i].regno = -1;
1976 : 0 : reload_order[i] = i;
1977 : : }
1978 : :
1979 : 0 : n_reloads = chain->n_reloads;
1980 : 0 : memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1981 : :
1982 : 0 : CLEAR_HARD_REG_SET (used_spill_regs_local);
1983 : :
1984 : 0 : if (dump_file)
1985 : 0 : fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1986 : :
1987 : 0 : qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1988 : :
1989 : : /* Compute the order of preference for hard registers to spill. */
1990 : :
1991 : 0 : order_regs_for_reload (chain);
1992 : :
1993 : 0 : for (i = 0; i < n_reloads; i++)
1994 : : {
1995 : 0 : int r = reload_order[i];
1996 : :
1997 : : /* Ignore reloads that got marked inoperative. */
1998 : 0 : if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1999 : 0 : && ! rld[r].optional
2000 : 0 : && rld[r].regno == -1)
2001 : 0 : if (! find_reg (chain, i))
2002 : : {
2003 : 0 : if (dump_file)
2004 : 0 : fprintf (dump_file, "reload failure for reload %d\n", r);
2005 : 0 : spill_failure (chain->insn, rld[r].rclass);
2006 : 0 : failure = 1;
2007 : 0 : return;
2008 : : }
2009 : : }
2010 : :
2011 : 0 : chain->used_spill_regs = used_spill_regs_local;
2012 : 0 : used_spill_regs |= used_spill_regs_local;
2013 : :
2014 : 0 : memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2015 : : }
2016 : :
2017 : : static void
2018 : 0 : select_reload_regs (void)
2019 : : {
2020 : 0 : class insn_chain *chain;
2021 : :
2022 : : /* Try to satisfy the needs for each insn. */
2023 : 0 : for (chain = insns_need_reload; chain != 0;
2024 : 0 : chain = chain->next_need_reload)
2025 : 0 : find_reload_regs (chain);
2026 : 0 : }
2027 : :
2028 : : /* Delete all insns that were inserted by emit_caller_save_insns during
2029 : : this iteration. */
2030 : : static void
2031 : 0 : delete_caller_save_insns (void)
2032 : : {
2033 : 0 : class insn_chain *c = reload_insn_chain;
2034 : :
2035 : 0 : while (c != 0)
2036 : : {
2037 : 0 : while (c != 0 && c->is_caller_save_insn)
2038 : : {
2039 : 0 : class insn_chain *next = c->next;
2040 : 0 : rtx_insn *insn = c->insn;
2041 : :
2042 : 0 : if (c == reload_insn_chain)
2043 : 0 : reload_insn_chain = next;
2044 : 0 : delete_insn (insn);
2045 : :
2046 : 0 : if (next)
2047 : 0 : next->prev = c->prev;
2048 : 0 : if (c->prev)
2049 : 0 : c->prev->next = next;
2050 : 0 : c->next = unused_insn_chains;
2051 : 0 : unused_insn_chains = c;
2052 : 0 : c = next;
2053 : : }
2054 : 0 : if (c != 0)
2055 : 0 : c = c->next;
2056 : : }
2057 : 0 : }
2058 : :
2059 : : /* Handle the failure to find a register to spill.
2060 : : INSN should be one of the insns which needed this particular spill reg. */
2061 : :
2062 : : static void
2063 : 0 : spill_failure (rtx_insn *insn, enum reg_class rclass)
2064 : : {
2065 : 0 : if (asm_noperands (PATTERN (insn)) >= 0)
2066 : 0 : error_for_asm (insn, "cannot find a register in class %qs while "
2067 : : "reloading %<asm%>",
2068 : 0 : reg_class_names[rclass]);
2069 : : else
2070 : : {
2071 : 0 : error ("unable to find a register to spill in class %qs",
2072 : 0 : reg_class_names[rclass]);
2073 : :
2074 : 0 : if (dump_file)
2075 : : {
2076 : 0 : fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2077 : 0 : debug_reload_to_stream (dump_file);
2078 : : }
2079 : 0 : fatal_insn ("this is the insn:", insn);
2080 : : }
2081 : 0 : }
2082 : :
2083 : : /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2084 : : data that is dead in INSN. */
2085 : :
2086 : : static void
2087 : 0 : delete_dead_insn (rtx_insn *insn)
2088 : : {
2089 : 0 : rtx_insn *prev = prev_active_insn (insn);
2090 : 0 : rtx prev_dest;
2091 : :
2092 : : /* If the previous insn sets a register that dies in our insn make
2093 : : a note that we want to run DCE immediately after reload.
2094 : :
2095 : : We used to delete the previous insn & recurse, but that's wrong for
2096 : : block local equivalences. Instead of trying to figure out the exact
2097 : : circumstances where we can delete the potentially dead insns, just
2098 : : let DCE do the job. */
2099 : 0 : if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2100 : 0 : && GET_CODE (PATTERN (prev)) == SET
2101 : 0 : && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2102 : 0 : && reg_mentioned_p (prev_dest, PATTERN (insn))
2103 : 0 : && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2104 : 0 : && ! side_effects_p (SET_SRC (PATTERN (prev))))
2105 : 0 : need_dce = 1;
2106 : :
2107 : 0 : SET_INSN_DELETED (insn);
2108 : 0 : }
2109 : :
2110 : : /* Modify the home of pseudo-reg I.
2111 : : The new home is present in reg_renumber[I].
2112 : :
2113 : : FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2114 : : or it may be -1, meaning there is none or it is not relevant.
2115 : : This is used so that all pseudos spilled from a given hard reg
2116 : : can share one stack slot. */
2117 : :
2118 : : static void
2119 : 0 : alter_reg (int i, int from_reg, bool dont_share_p)
2120 : : {
2121 : : /* When outputting an inline function, this can happen
2122 : : for a reg that isn't actually used. */
2123 : 0 : if (regno_reg_rtx[i] == 0)
2124 : : return;
2125 : :
2126 : : /* If the reg got changed to a MEM at rtl-generation time,
2127 : : ignore it. */
2128 : 0 : if (!REG_P (regno_reg_rtx[i]))
2129 : : return;
2130 : :
2131 : : /* Modify the reg-rtx to contain the new hard reg
2132 : : number or else to contain its pseudo reg number. */
2133 : 0 : SET_REGNO (regno_reg_rtx[i],
2134 : : reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2135 : :
2136 : : /* If we have a pseudo that is needed but has no hard reg or equivalent,
2137 : : allocate a stack slot for it. */
2138 : :
2139 : 0 : if (reg_renumber[i] < 0
2140 : 0 : && REG_N_REFS (i) > 0
2141 : 0 : && reg_equiv_constant (i) == 0
2142 : 0 : && (reg_equiv_invariant (i) == 0
2143 : 0 : || reg_equiv_init (i) == 0)
2144 : 0 : && reg_equiv_memory_loc (i) == 0)
2145 : : {
2146 : 0 : rtx x = NULL_RTX;
2147 : 0 : machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2148 : 0 : poly_uint64 inherent_size = GET_MODE_SIZE (mode);
2149 : 0 : unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2150 : 0 : machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
2151 : 0 : poly_uint64 total_size = GET_MODE_SIZE (wider_mode);
2152 : : /* ??? Seems strange to derive the minimum alignment from the size,
2153 : : but that's the traditional behavior. For polynomial-size modes,
2154 : : the natural extension is to use the minimum possible size. */
2155 : 0 : unsigned int min_align
2156 : 0 : = constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i]));
2157 : 0 : poly_int64 adjust = 0;
2158 : :
2159 : 0 : something_was_spilled = true;
2160 : :
2161 : 0 : if (ira_conflicts_p)
2162 : : {
2163 : : /* Mark the spill for IRA. */
2164 : 0 : SET_REGNO_REG_SET (&spilled_pseudos, i);
2165 : 0 : if (!dont_share_p)
2166 : 0 : x = ira_reuse_stack_slot (i, inherent_size, total_size);
2167 : : }
2168 : :
2169 : 0 : if (x)
2170 : : ;
2171 : :
2172 : : /* Each pseudo reg has an inherent size which comes from its own mode,
2173 : : and a total size which provides room for paradoxical subregs
2174 : : which refer to the pseudo reg in wider modes.
2175 : :
2176 : : We can use a slot already allocated if it provides both
2177 : : enough inherent space and enough total space.
2178 : : Otherwise, we allocate a new slot, making sure that it has no less
2179 : : inherent space, and no less total space, then the previous slot. */
2180 : 0 : else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2181 : : {
2182 : 0 : rtx stack_slot;
2183 : :
2184 : : /* The sizes are taken from a subreg operation, which guarantees
2185 : : that they're ordered. */
2186 : 0 : gcc_checking_assert (ordered_p (total_size, inherent_size));
2187 : :
2188 : : /* No known place to spill from => no slot to reuse. */
2189 : 0 : x = assign_stack_local (mode, total_size,
2190 : : min_align > inherent_align
2191 : 0 : || maybe_gt (total_size, inherent_size)
2192 : : ? -1 : 0);
2193 : :
2194 : 0 : stack_slot = x;
2195 : :
2196 : : /* Cancel the big-endian correction done in assign_stack_local.
2197 : : Get the address of the beginning of the slot. This is so we
2198 : : can do a big-endian correction unconditionally below. */
2199 : 0 : if (BYTES_BIG_ENDIAN)
2200 : : {
2201 : : adjust = inherent_size - total_size;
2202 : : if (maybe_ne (adjust, 0))
2203 : : {
2204 : : poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2205 : : machine_mode mem_mode
2206 : : = int_mode_for_size (total_bits, 1).else_blk ();
2207 : : stack_slot = adjust_address_nv (x, mem_mode, adjust);
2208 : : }
2209 : : }
2210 : :
2211 : 0 : if (! dont_share_p && ira_conflicts_p)
2212 : : /* Inform IRA about allocation a new stack slot. */
2213 : 0 : ira_mark_new_stack_slot (stack_slot, i, total_size);
2214 : : }
2215 : :
2216 : : /* Reuse a stack slot if possible. */
2217 : 0 : else if (spill_stack_slot[from_reg] != 0
2218 : 0 : && known_ge (spill_stack_slot_width[from_reg], total_size)
2219 : 0 : && known_ge (GET_MODE_SIZE
2220 : : (GET_MODE (spill_stack_slot[from_reg])),
2221 : : inherent_size)
2222 : 0 : && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2223 : : x = spill_stack_slot[from_reg];
2224 : :
2225 : : /* Allocate a bigger slot. */
2226 : : else
2227 : : {
2228 : : /* Compute maximum size needed, both for inherent size
2229 : : and for total size. */
2230 : 0 : rtx stack_slot;
2231 : :
2232 : 0 : if (spill_stack_slot[from_reg])
2233 : : {
2234 : 0 : if (partial_subreg_p (mode,
2235 : 0 : GET_MODE (spill_stack_slot[from_reg])))
2236 : 0 : mode = GET_MODE (spill_stack_slot[from_reg]);
2237 : 0 : total_size = ordered_max (total_size,
2238 : 0 : spill_stack_slot_width[from_reg]);
2239 : 0 : if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2240 : : min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2241 : : }
2242 : :
2243 : : /* The sizes are taken from a subreg operation, which guarantees
2244 : : that they're ordered. */
2245 : 0 : gcc_checking_assert (ordered_p (total_size, inherent_size));
2246 : :
2247 : : /* Make a slot with that size. */
2248 : 0 : x = assign_stack_local (mode, total_size,
2249 : : min_align > inherent_align
2250 : 0 : || maybe_gt (total_size, inherent_size)
2251 : : ? -1 : 0);
2252 : 0 : stack_slot = x;
2253 : :
2254 : : /* Cancel the big-endian correction done in assign_stack_local.
2255 : : Get the address of the beginning of the slot. This is so we
2256 : : can do a big-endian correction unconditionally below. */
2257 : 0 : if (BYTES_BIG_ENDIAN)
2258 : : {
2259 : : adjust = GET_MODE_SIZE (mode) - total_size;
2260 : : if (maybe_ne (adjust, 0))
2261 : : {
2262 : : poly_uint64 total_bits = total_size * BITS_PER_UNIT;
2263 : : machine_mode mem_mode
2264 : : = int_mode_for_size (total_bits, 1).else_blk ();
2265 : : stack_slot = adjust_address_nv (x, mem_mode, adjust);
2266 : : }
2267 : : }
2268 : :
2269 : 0 : spill_stack_slot[from_reg] = stack_slot;
2270 : 0 : spill_stack_slot_width[from_reg] = total_size;
2271 : : }
2272 : :
2273 : : /* On a big endian machine, the "address" of the slot
2274 : : is the address of the low part that fits its inherent mode. */
2275 : 0 : adjust += subreg_size_lowpart_offset (inherent_size, total_size);
2276 : :
2277 : : /* If we have any adjustment to make, or if the stack slot is the
2278 : : wrong mode, make a new stack slot. */
2279 : 0 : x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2280 : :
2281 : : /* Set all of the memory attributes as appropriate for a spill. */
2282 : 0 : set_mem_attrs_for_spill (x);
2283 : :
2284 : : /* Save the stack slot for later. */
2285 : 0 : reg_equiv_memory_loc (i) = x;
2286 : : }
2287 : : }
2288 : :
2289 : : /* Mark the slots in regs_ever_live for the hard regs used by
2290 : : pseudo-reg number REGNO, accessed in MODE. */
2291 : :
2292 : : static void
2293 : 0 : mark_home_live_1 (int regno, machine_mode mode)
2294 : : {
2295 : 0 : int i, lim;
2296 : :
2297 : 0 : i = reg_renumber[regno];
2298 : 0 : if (i < 0)
2299 : : return;
2300 : 0 : lim = end_hard_regno (mode, i);
2301 : 0 : while (i < lim)
2302 : 0 : df_set_regs_ever_live (i++, true);
2303 : : }
2304 : :
2305 : : /* Mark the slots in regs_ever_live for the hard regs
2306 : : used by pseudo-reg number REGNO. */
2307 : :
2308 : : void
2309 : 0 : mark_home_live (int regno)
2310 : : {
2311 : 0 : if (reg_renumber[regno] >= 0)
2312 : 0 : mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2313 : 0 : }
2314 : :
2315 : : /* This function handles the tracking of elimination offsets around branches.
2316 : :
2317 : : X is a piece of RTL being scanned.
2318 : :
2319 : : INSN is the insn that it came from, if any.
2320 : :
2321 : : INITIAL_P is nonzero if we are to set the offset to be the initial
2322 : : offset and zero if we are setting the offset of the label to be the
2323 : : current offset. */
2324 : :
2325 : : static void
2326 : 0 : set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2327 : : {
2328 : 0 : enum rtx_code code = GET_CODE (x);
2329 : 0 : rtx tem;
2330 : 0 : unsigned int i;
2331 : 0 : struct elim_table *p;
2332 : :
2333 : 0 : switch (code)
2334 : : {
2335 : 0 : case LABEL_REF:
2336 : 0 : if (LABEL_REF_NONLOCAL_P (x))
2337 : : return;
2338 : :
2339 : 0 : x = label_ref_label (x);
2340 : :
2341 : : /* fall through */
2342 : :
2343 : 0 : case CODE_LABEL:
2344 : : /* If we know nothing about this label, set the desired offsets. Note
2345 : : that this sets the offset at a label to be the offset before a label
2346 : : if we don't know anything about the label. This is not correct for
2347 : : the label after a BARRIER, but is the best guess we can make. If
2348 : : we guessed wrong, we will suppress an elimination that might have
2349 : : been possible had we been able to guess correctly. */
2350 : :
2351 : 0 : if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2352 : : {
2353 : 0 : for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2354 : 0 : offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2355 : 0 : = (initial_p ? reg_eliminate[i].initial_offset
2356 : 0 : : reg_eliminate[i].offset);
2357 : 0 : offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2358 : : }
2359 : :
2360 : : /* Otherwise, if this is the definition of a label and it is
2361 : : preceded by a BARRIER, set our offsets to the known offset of
2362 : : that label. */
2363 : :
2364 : 0 : else if (x == insn
2365 : 0 : && (tem = prev_nonnote_insn (insn)) != 0
2366 : 0 : && BARRIER_P (tem))
2367 : 0 : set_offsets_for_label (insn);
2368 : : else
2369 : : /* If neither of the above cases is true, compare each offset
2370 : : with those previously recorded and suppress any eliminations
2371 : : where the offsets disagree. */
2372 : :
2373 : 0 : for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2374 : 0 : if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i],
2375 : 0 : (initial_p ? reg_eliminate[i].initial_offset
2376 : 0 : : reg_eliminate[i].offset)))
2377 : 0 : reg_eliminate[i].can_eliminate = 0;
2378 : :
2379 : : return;
2380 : :
2381 : 0 : case JUMP_TABLE_DATA:
2382 : 0 : set_label_offsets (PATTERN (insn), insn, initial_p);
2383 : 0 : return;
2384 : :
2385 : 0 : case JUMP_INSN:
2386 : 0 : set_label_offsets (PATTERN (insn), insn, initial_p);
2387 : :
2388 : : /* fall through */
2389 : :
2390 : 0 : case INSN:
2391 : 0 : case CALL_INSN:
2392 : : /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2393 : : to indirectly and hence must have all eliminations at their
2394 : : initial offsets. */
2395 : 0 : for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2396 : 0 : if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2397 : 0 : set_label_offsets (XEXP (tem, 0), insn, 1);
2398 : : return;
2399 : :
2400 : : case PARALLEL:
2401 : : case ADDR_VEC:
2402 : : case ADDR_DIFF_VEC:
2403 : : /* Each of the labels in the parallel or address vector must be
2404 : : at their initial offsets. We want the first field for PARALLEL
2405 : : and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2406 : :
2407 : 0 : for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2408 : 0 : set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2409 : : insn, initial_p);
2410 : : return;
2411 : :
2412 : 0 : case SET:
2413 : : /* We only care about setting PC. If the source is not RETURN,
2414 : : IF_THEN_ELSE, or a label, disable any eliminations not at
2415 : : their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2416 : : isn't one of those possibilities. For branches to a label,
2417 : : call ourselves recursively.
2418 : :
2419 : : Note that this can disable elimination unnecessarily when we have
2420 : : a non-local goto since it will look like a non-constant jump to
2421 : : someplace in the current function. This isn't a significant
2422 : : problem since such jumps will normally be when all elimination
2423 : : pairs are back to their initial offsets. */
2424 : :
2425 : 0 : if (SET_DEST (x) != pc_rtx)
2426 : : return;
2427 : :
2428 : 0 : switch (GET_CODE (SET_SRC (x)))
2429 : : {
2430 : : case PC:
2431 : : case RETURN:
2432 : : return;
2433 : :
2434 : : case LABEL_REF:
2435 : : set_label_offsets (SET_SRC (x), insn, initial_p);
2436 : : return;
2437 : :
2438 : 0 : case IF_THEN_ELSE:
2439 : 0 : tem = XEXP (SET_SRC (x), 1);
2440 : 0 : if (GET_CODE (tem) == LABEL_REF)
2441 : 0 : set_label_offsets (label_ref_label (tem), insn, initial_p);
2442 : 0 : else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2443 : : break;
2444 : :
2445 : 0 : tem = XEXP (SET_SRC (x), 2);
2446 : 0 : if (GET_CODE (tem) == LABEL_REF)
2447 : 0 : set_label_offsets (label_ref_label (tem), insn, initial_p);
2448 : 0 : else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2449 : : break;
2450 : : return;
2451 : :
2452 : : default:
2453 : : break;
2454 : : }
2455 : :
2456 : : /* If we reach here, all eliminations must be at their initial
2457 : : offset because we are doing a jump to a variable address. */
2458 : 0 : for (p = reg_eliminate; p < ®_eliminate[NUM_ELIMINABLE_REGS]; p++)
2459 : 0 : if (maybe_ne (p->offset, p->initial_offset))
2460 : 0 : p->can_eliminate = 0;
2461 : : break;
2462 : :
2463 : : default:
2464 : : break;
2465 : : }
2466 : : }
2467 : :
2468 : : /* This function examines every reg that occurs in X and adjusts the
2469 : : costs for its elimination which are gathered by IRA. INSN is the
2470 : : insn in which X occurs. We do not recurse into MEM expressions. */
2471 : :
2472 : : static void
2473 : 0 : note_reg_elim_costly (const_rtx x, rtx insn)
2474 : : {
2475 : 0 : subrtx_iterator::array_type array;
2476 : 0 : FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2477 : : {
2478 : 0 : const_rtx x = *iter;
2479 : 0 : if (MEM_P (x))
2480 : 0 : iter.skip_subrtxes ();
2481 : 0 : else if (REG_P (x)
2482 : 0 : && REGNO (x) >= FIRST_PSEUDO_REGISTER
2483 : 0 : && reg_equiv_init (REGNO (x))
2484 : 0 : && reg_equiv_invariant (REGNO (x)))
2485 : : {
2486 : 0 : rtx t = reg_equiv_invariant (REGNO (x));
2487 : 0 : rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2488 : 0 : int cost = set_src_cost (new_rtx, Pmode,
2489 : 0 : optimize_bb_for_speed_p (elim_bb));
2490 : 0 : int freq = REG_FREQ_FROM_BB (elim_bb);
2491 : :
2492 : 0 : if (cost != 0)
2493 : 0 : ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2494 : : }
2495 : : }
2496 : 0 : }
2497 : :
2498 : : /* Scan X and replace any eliminable registers (such as fp) with a
2499 : : replacement (such as sp), plus an offset.
2500 : :
2501 : : MEM_MODE is the mode of an enclosing MEM. We need this to know how
2502 : : much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2503 : : MEM, we are allowed to replace a sum of a register and the constant zero
2504 : : with the register, which we cannot do outside a MEM. In addition, we need
2505 : : to record the fact that a register is referenced outside a MEM.
2506 : :
2507 : : If INSN is an insn, it is the insn containing X. If we replace a REG
2508 : : in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2509 : : CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2510 : : the REG is being modified.
2511 : :
2512 : : Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2513 : : That's used when we eliminate in expressions stored in notes.
2514 : : This means, do not set ref_outside_mem even if the reference
2515 : : is outside of MEMs.
2516 : :
2517 : : If FOR_COSTS is true, we are being called before reload in order to
2518 : : estimate the costs of keeping registers with an equivalence unallocated.
2519 : :
2520 : : REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2521 : : replacements done assuming all offsets are at their initial values. If
2522 : : they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2523 : : encounter, return the actual location so that find_reloads will do
2524 : : the proper thing. */
2525 : :
2526 : : static rtx
2527 : 0 : eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2528 : : bool may_use_invariant, bool for_costs)
2529 : : {
2530 : 0 : enum rtx_code code = GET_CODE (x);
2531 : 0 : struct elim_table *ep;
2532 : 0 : int regno;
2533 : 0 : rtx new_rtx;
2534 : 0 : int i, j;
2535 : 0 : const char *fmt;
2536 : 0 : int copied = 0;
2537 : :
2538 : 0 : if (! current_function_decl)
2539 : : return x;
2540 : :
2541 : 0 : switch (code)
2542 : : {
2543 : : CASE_CONST_ANY:
2544 : : case CONST:
2545 : : case SYMBOL_REF:
2546 : : case CODE_LABEL:
2547 : : case PC:
2548 : : case ASM_INPUT:
2549 : : case ADDR_VEC:
2550 : : case ADDR_DIFF_VEC:
2551 : : case RETURN:
2552 : : return x;
2553 : :
2554 : 0 : case REG:
2555 : 0 : regno = REGNO (x);
2556 : :
2557 : : /* First handle the case where we encounter a bare register that
2558 : : is eliminable. Replace it with a PLUS. */
2559 : 0 : if (regno < FIRST_PSEUDO_REGISTER)
2560 : : {
2561 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2562 : : ep++)
2563 : 0 : if (ep->from_rtx == x && ep->can_eliminate)
2564 : 0 : return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2565 : :
2566 : : }
2567 : 0 : else if (reg_renumber && reg_renumber[regno] < 0
2568 : 0 : && reg_equivs
2569 : 0 : && reg_equiv_invariant (regno))
2570 : : {
2571 : 0 : if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2572 : 0 : return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2573 : 0 : mem_mode, insn, true, for_costs);
2574 : : /* There exists at least one use of REGNO that cannot be
2575 : : eliminated. Prevent the defining insn from being deleted. */
2576 : 0 : reg_equiv_init (regno) = NULL;
2577 : 0 : if (!for_costs)
2578 : 0 : alter_reg (regno, -1, true);
2579 : : }
2580 : : return x;
2581 : :
2582 : : /* You might think handling MINUS in a manner similar to PLUS is a
2583 : : good idea. It is not. It has been tried multiple times and every
2584 : : time the change has had to have been reverted.
2585 : :
2586 : : Other parts of reload know a PLUS is special (gen_reload for example)
2587 : : and require special code to handle code a reloaded PLUS operand.
2588 : :
2589 : : Also consider backends where the flags register is clobbered by a
2590 : : MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2591 : : lea instruction comes to mind). If we try to reload a MINUS, we
2592 : : may kill the flags register that was holding a useful value.
2593 : :
2594 : : So, please before trying to handle MINUS, consider reload as a
2595 : : whole instead of this little section as well as the backend issues. */
2596 : 0 : case PLUS:
2597 : : /* If this is the sum of an eliminable register and a constant, rework
2598 : : the sum. */
2599 : 0 : if (REG_P (XEXP (x, 0))
2600 : 0 : && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2601 : 0 : && CONSTANT_P (XEXP (x, 1)))
2602 : : {
2603 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2604 : : ep++)
2605 : 0 : if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2606 : : {
2607 : : /* The only time we want to replace a PLUS with a REG (this
2608 : : occurs when the constant operand of the PLUS is the negative
2609 : : of the offset) is when we are inside a MEM. We won't want
2610 : : to do so at other times because that would change the
2611 : : structure of the insn in a way that reload can't handle.
2612 : : We special-case the commonest situation in
2613 : : eliminate_regs_in_insn, so just replace a PLUS with a
2614 : : PLUS here, unless inside a MEM. In DEBUG_INSNs, it is
2615 : : always ok to replace a PLUS with just a REG. */
2616 : 0 : if ((mem_mode != 0 || (insn && DEBUG_INSN_P (insn)))
2617 : 0 : && CONST_INT_P (XEXP (x, 1))
2618 : 0 : && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset))
2619 : 0 : return ep->to_rtx;
2620 : : else
2621 : 0 : return gen_rtx_PLUS (Pmode, ep->to_rtx,
2622 : : plus_constant (Pmode, XEXP (x, 1),
2623 : : ep->previous_offset));
2624 : : }
2625 : :
2626 : : /* If the register is not eliminable, we are done since the other
2627 : : operand is a constant. */
2628 : : return x;
2629 : : }
2630 : :
2631 : : /* If this is part of an address, we want to bring any constant to the
2632 : : outermost PLUS. We will do this by doing register replacement in
2633 : : our operands and seeing if a constant shows up in one of them.
2634 : :
2635 : : Note that there is no risk of modifying the structure of the insn,
2636 : : since we only get called for its operands, thus we are either
2637 : : modifying the address inside a MEM, or something like an address
2638 : : operand of a load-address insn. */
2639 : :
2640 : 0 : {
2641 : 0 : rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2642 : : for_costs);
2643 : 0 : rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2644 : : for_costs);
2645 : :
2646 : 0 : if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2647 : : {
2648 : : /* If one side is a PLUS and the other side is a pseudo that
2649 : : didn't get a hard register but has a reg_equiv_constant,
2650 : : we must replace the constant here since it may no longer
2651 : : be in the position of any operand. */
2652 : 0 : if (GET_CODE (new0) == PLUS && REG_P (new1)
2653 : 0 : && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2654 : 0 : && reg_renumber[REGNO (new1)] < 0
2655 : 0 : && reg_equivs
2656 : 0 : && reg_equiv_constant (REGNO (new1)) != 0)
2657 : : new1 = reg_equiv_constant (REGNO (new1));
2658 : 0 : else if (GET_CODE (new1) == PLUS && REG_P (new0)
2659 : 0 : && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2660 : 0 : && reg_renumber[REGNO (new0)] < 0
2661 : 0 : && reg_equiv_constant (REGNO (new0)) != 0)
2662 : : new0 = reg_equiv_constant (REGNO (new0));
2663 : :
2664 : 0 : new_rtx = form_sum (GET_MODE (x), new0, new1);
2665 : :
2666 : : /* As above, if we are not inside a MEM we do not want to
2667 : : turn a PLUS into something else. We might try to do so here
2668 : : for an addition of 0 if we aren't optimizing. */
2669 : 0 : if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2670 : 0 : return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2671 : : else
2672 : : return new_rtx;
2673 : : }
2674 : : }
2675 : : return x;
2676 : :
2677 : 0 : case MULT:
2678 : : /* If this is the product of an eliminable register and a
2679 : : constant, apply the distribute law and move the constant out
2680 : : so that we have (plus (mult ..) ..). This is needed in order
2681 : : to keep load-address insns valid. This case is pathological.
2682 : : We ignore the possibility of overflow here. */
2683 : 0 : if (REG_P (XEXP (x, 0))
2684 : 0 : && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2685 : 0 : && CONST_INT_P (XEXP (x, 1)))
2686 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2687 : : ep++)
2688 : 0 : if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2689 : : {
2690 : 0 : if (! mem_mode
2691 : : /* Refs inside notes or in DEBUG_INSNs don't count for
2692 : : this purpose. */
2693 : 0 : && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2694 : 0 : || GET_CODE (insn) == INSN_LIST
2695 : 0 : || DEBUG_INSN_P (insn))))
2696 : 0 : ep->ref_outside_mem = 1;
2697 : :
2698 : 0 : return
2699 : 0 : plus_constant (Pmode,
2700 : 0 : gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2701 : 0 : ep->previous_offset * INTVAL (XEXP (x, 1)));
2702 : : }
2703 : :
2704 : : /* fall through */
2705 : :
2706 : 0 : case CALL:
2707 : 0 : case COMPARE:
2708 : : /* See comments before PLUS about handling MINUS. */
2709 : 0 : case MINUS:
2710 : 0 : case DIV: case UDIV:
2711 : 0 : case MOD: case UMOD:
2712 : 0 : case AND: case IOR: case XOR:
2713 : 0 : case ROTATERT: case ROTATE:
2714 : 0 : case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2715 : 0 : case NE: case EQ:
2716 : 0 : case GE: case GT: case GEU: case GTU:
2717 : 0 : case LE: case LT: case LEU: case LTU:
2718 : 0 : {
2719 : 0 : rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2720 : : for_costs);
2721 : 0 : rtx new1 = XEXP (x, 1)
2722 : 0 : ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2723 : 0 : for_costs) : 0;
2724 : :
2725 : 0 : if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2726 : 0 : return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2727 : : }
2728 : : return x;
2729 : :
2730 : 0 : case EXPR_LIST:
2731 : : /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2732 : 0 : if (XEXP (x, 0))
2733 : : {
2734 : 0 : new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2735 : : for_costs);
2736 : 0 : if (new_rtx != XEXP (x, 0))
2737 : : {
2738 : : /* If this is a REG_DEAD note, it is not valid anymore.
2739 : : Using the eliminated version could result in creating a
2740 : : REG_DEAD note for the stack or frame pointer. */
2741 : 0 : if (REG_NOTE_KIND (x) == REG_DEAD)
2742 : 0 : return (XEXP (x, 1)
2743 : 0 : ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2744 : : for_costs)
2745 : : : NULL_RTX);
2746 : :
2747 : 0 : x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2748 : : }
2749 : : }
2750 : :
2751 : : /* fall through */
2752 : :
2753 : 0 : case INSN_LIST:
2754 : 0 : case INT_LIST:
2755 : : /* Now do eliminations in the rest of the chain. If this was
2756 : : an EXPR_LIST, this might result in allocating more memory than is
2757 : : strictly needed, but it simplifies the code. */
2758 : 0 : if (XEXP (x, 1))
2759 : : {
2760 : 0 : new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2761 : : for_costs);
2762 : 0 : if (new_rtx != XEXP (x, 1))
2763 : 0 : return
2764 : 0 : gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2765 : : }
2766 : : return x;
2767 : :
2768 : : case PRE_INC:
2769 : : case POST_INC:
2770 : : case PRE_DEC:
2771 : : case POST_DEC:
2772 : : /* We do not support elimination of a register that is modified.
2773 : : elimination_effects has already make sure that this does not
2774 : : happen. */
2775 : : return x;
2776 : :
2777 : 0 : case PRE_MODIFY:
2778 : 0 : case POST_MODIFY:
2779 : : /* We do not support elimination of a register that is modified.
2780 : : elimination_effects has already make sure that this does not
2781 : : happen. The only remaining case we need to consider here is
2782 : : that the increment value may be an eliminable register. */
2783 : 0 : if (GET_CODE (XEXP (x, 1)) == PLUS
2784 : 0 : && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2785 : : {
2786 : 0 : rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2787 : : insn, true, for_costs);
2788 : :
2789 : 0 : if (new_rtx != XEXP (XEXP (x, 1), 1))
2790 : 0 : return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2791 : : gen_rtx_PLUS (GET_MODE (x),
2792 : : XEXP (x, 0), new_rtx));
2793 : : }
2794 : : return x;
2795 : :
2796 : 0 : case STRICT_LOW_PART:
2797 : 0 : case NEG: case NOT:
2798 : 0 : case SIGN_EXTEND: case ZERO_EXTEND:
2799 : 0 : case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2800 : 0 : case FLOAT: case FIX:
2801 : 0 : case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2802 : 0 : case ABS:
2803 : 0 : case SQRT:
2804 : 0 : case FFS:
2805 : 0 : case CLZ:
2806 : 0 : case CTZ:
2807 : 0 : case POPCOUNT:
2808 : 0 : case PARITY:
2809 : 0 : case BSWAP:
2810 : 0 : new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2811 : : for_costs);
2812 : 0 : if (new_rtx != XEXP (x, 0))
2813 : 0 : return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2814 : : return x;
2815 : :
2816 : 0 : case SUBREG:
2817 : : /* Similar to above processing, but preserve SUBREG_BYTE.
2818 : : Convert (subreg (mem)) to (mem) if not paradoxical.
2819 : : Also, if we have a non-paradoxical (subreg (pseudo)) and the
2820 : : pseudo didn't get a hard reg, we must replace this with the
2821 : : eliminated version of the memory location because push_reload
2822 : : may do the replacement in certain circumstances. */
2823 : 0 : if (REG_P (SUBREG_REG (x))
2824 : 0 : && !paradoxical_subreg_p (x)
2825 : 0 : && reg_equivs
2826 : 0 : && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2827 : : {
2828 : : new_rtx = SUBREG_REG (x);
2829 : : }
2830 : : else
2831 : 0 : new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2832 : :
2833 : 0 : if (new_rtx != SUBREG_REG (x))
2834 : : {
2835 : 0 : poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x));
2836 : 0 : poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2837 : :
2838 : 0 : if (MEM_P (new_rtx)
2839 : 0 : && ((partial_subreg_p (GET_MODE (x), GET_MODE (new_rtx))
2840 : : /* On RISC machines, combine can create rtl of the form
2841 : : (set (subreg:m1 (reg:m2 R) 0) ...)
2842 : : where m1 < m2, and expects something interesting to
2843 : : happen to the entire word. Moreover, it will use the
2844 : : (reg:m2 R) later, expecting all bits to be preserved.
2845 : : So if the number of words is the same, preserve the
2846 : : subreg so that push_reload can see it. */
2847 : : && !(WORD_REGISTER_OPERATIONS
2848 : : && known_equal_after_align_down (x_size - 1,
2849 : : new_size - 1,
2850 : : UNITS_PER_WORD)))
2851 : 0 : || known_eq (x_size, new_size))
2852 : : )
2853 : 0 : return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2854 : 0 : else if (insn && GET_CODE (insn) == DEBUG_INSN)
2855 : 0 : return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2856 : : else
2857 : 0 : return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2858 : : }
2859 : :
2860 : : return x;
2861 : :
2862 : 0 : case MEM:
2863 : : /* Our only special processing is to pass the mode of the MEM to our
2864 : : recursive call and copy the flags. While we are here, handle this
2865 : : case more efficiently. */
2866 : :
2867 : 0 : new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2868 : : for_costs);
2869 : 0 : if (for_costs
2870 : 0 : && memory_address_p (GET_MODE (x), XEXP (x, 0))
2871 : 0 : && !memory_address_p (GET_MODE (x), new_rtx))
2872 : 0 : note_reg_elim_costly (XEXP (x, 0), insn);
2873 : :
2874 : 0 : return replace_equiv_address_nv (x, new_rtx);
2875 : :
2876 : 0 : case USE:
2877 : : /* Handle insn_list USE that a call to a pure function may generate. */
2878 : 0 : new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2879 : : for_costs);
2880 : 0 : if (new_rtx != XEXP (x, 0))
2881 : 0 : return gen_rtx_USE (GET_MODE (x), new_rtx);
2882 : : return x;
2883 : :
2884 : 0 : case CLOBBER:
2885 : 0 : case ASM_OPERANDS:
2886 : 0 : gcc_assert (insn && DEBUG_INSN_P (insn));
2887 : : break;
2888 : :
2889 : 0 : case SET:
2890 : 0 : gcc_unreachable ();
2891 : :
2892 : : default:
2893 : : break;
2894 : : }
2895 : :
2896 : : /* Process each of our operands recursively. If any have changed, make a
2897 : : copy of the rtx. */
2898 : 0 : fmt = GET_RTX_FORMAT (code);
2899 : 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2900 : : {
2901 : 0 : if (*fmt == 'e')
2902 : : {
2903 : 0 : new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2904 : : for_costs);
2905 : 0 : if (new_rtx != XEXP (x, i) && ! copied)
2906 : : {
2907 : 0 : x = shallow_copy_rtx (x);
2908 : 0 : copied = 1;
2909 : : }
2910 : 0 : XEXP (x, i) = new_rtx;
2911 : : }
2912 : 0 : else if (*fmt == 'E')
2913 : : {
2914 : : int copied_vec = 0;
2915 : 0 : for (j = 0; j < XVECLEN (x, i); j++)
2916 : : {
2917 : 0 : new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2918 : : for_costs);
2919 : 0 : if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2920 : : {
2921 : 0 : rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2922 : 0 : XVEC (x, i)->elem);
2923 : 0 : if (! copied)
2924 : : {
2925 : 0 : x = shallow_copy_rtx (x);
2926 : 0 : copied = 1;
2927 : : }
2928 : 0 : XVEC (x, i) = new_v;
2929 : 0 : copied_vec = 1;
2930 : : }
2931 : 0 : XVECEXP (x, i, j) = new_rtx;
2932 : : }
2933 : : }
2934 : : }
2935 : :
2936 : : return x;
2937 : : }
2938 : :
2939 : : rtx
2940 : 0 : eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2941 : : {
2942 : 0 : if (reg_eliminate == NULL)
2943 : : {
2944 : 0 : gcc_assert (targetm.no_register_allocation);
2945 : : return x;
2946 : : }
2947 : 0 : return eliminate_regs_1 (x, mem_mode, insn, false, false);
2948 : : }
2949 : :
2950 : : /* Scan rtx X for modifications of elimination target registers. Update
2951 : : the table of eliminables to reflect the changed state. MEM_MODE is
2952 : : the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2953 : :
2954 : : static void
2955 : 0 : elimination_effects (rtx x, machine_mode mem_mode)
2956 : : {
2957 : 0 : enum rtx_code code = GET_CODE (x);
2958 : 0 : struct elim_table *ep;
2959 : 0 : int regno;
2960 : 0 : int i, j;
2961 : 0 : const char *fmt;
2962 : :
2963 : 0 : switch (code)
2964 : : {
2965 : : CASE_CONST_ANY:
2966 : : case CONST:
2967 : : case SYMBOL_REF:
2968 : : case CODE_LABEL:
2969 : : case PC:
2970 : : case ASM_INPUT:
2971 : : case ADDR_VEC:
2972 : : case ADDR_DIFF_VEC:
2973 : : case RETURN:
2974 : : return;
2975 : :
2976 : 0 : case REG:
2977 : 0 : regno = REGNO (x);
2978 : :
2979 : : /* First handle the case where we encounter a bare register that
2980 : : is eliminable. Replace it with a PLUS. */
2981 : 0 : if (regno < FIRST_PSEUDO_REGISTER)
2982 : : {
2983 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
2984 : : ep++)
2985 : 0 : if (ep->from_rtx == x && ep->can_eliminate)
2986 : : {
2987 : 0 : if (! mem_mode)
2988 : 0 : ep->ref_outside_mem = 1;
2989 : 0 : return;
2990 : : }
2991 : :
2992 : : }
2993 : 0 : else if (reg_renumber[regno] < 0
2994 : 0 : && reg_equivs
2995 : 0 : && reg_equiv_constant (regno)
2996 : 0 : && ! function_invariant_p (reg_equiv_constant (regno)))
2997 : 0 : elimination_effects (reg_equiv_constant (regno), mem_mode);
2998 : : return;
2999 : :
3000 : 0 : case PRE_INC:
3001 : 0 : case POST_INC:
3002 : 0 : case PRE_DEC:
3003 : 0 : case POST_DEC:
3004 : 0 : case POST_MODIFY:
3005 : 0 : case PRE_MODIFY:
3006 : : /* If we modify the source of an elimination rule, disable it. */
3007 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3008 : 0 : if (ep->from_rtx == XEXP (x, 0))
3009 : 0 : ep->can_eliminate = 0;
3010 : :
3011 : : /* If we modify the target of an elimination rule by adding a constant,
3012 : : update its offset. If we modify the target in any other way, we'll
3013 : : have to disable the rule as well. */
3014 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3015 : 0 : if (ep->to_rtx == XEXP (x, 0))
3016 : : {
3017 : 0 : poly_int64 size = GET_MODE_SIZE (mem_mode);
3018 : :
3019 : : /* If more bytes than MEM_MODE are pushed, account for them. */
3020 : : #ifdef PUSH_ROUNDING
3021 : 0 : if (ep->to_rtx == stack_pointer_rtx)
3022 : 0 : size = PUSH_ROUNDING (size);
3023 : : #endif
3024 : 0 : if (code == PRE_DEC || code == POST_DEC)
3025 : 0 : ep->offset += size;
3026 : 0 : else if (code == PRE_INC || code == POST_INC)
3027 : 0 : ep->offset -= size;
3028 : 0 : else if (code == PRE_MODIFY || code == POST_MODIFY)
3029 : : {
3030 : 0 : if (GET_CODE (XEXP (x, 1)) == PLUS
3031 : 0 : && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3032 : 0 : && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3033 : 0 : ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3034 : : else
3035 : 0 : ep->can_eliminate = 0;
3036 : : }
3037 : : }
3038 : :
3039 : : /* These two aren't unary operators. */
3040 : 0 : if (code == POST_MODIFY || code == PRE_MODIFY)
3041 : : break;
3042 : :
3043 : : /* Fall through to generic unary operation case. */
3044 : 0 : gcc_fallthrough ();
3045 : 0 : case STRICT_LOW_PART:
3046 : 0 : case NEG: case NOT:
3047 : 0 : case SIGN_EXTEND: case ZERO_EXTEND:
3048 : 0 : case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3049 : 0 : case FLOAT: case FIX:
3050 : 0 : case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3051 : 0 : case ABS:
3052 : 0 : case SQRT:
3053 : 0 : case FFS:
3054 : 0 : case CLZ:
3055 : 0 : case CTZ:
3056 : 0 : case POPCOUNT:
3057 : 0 : case PARITY:
3058 : 0 : case BSWAP:
3059 : 0 : elimination_effects (XEXP (x, 0), mem_mode);
3060 : 0 : return;
3061 : :
3062 : 0 : case SUBREG:
3063 : 0 : if (REG_P (SUBREG_REG (x))
3064 : 0 : && !paradoxical_subreg_p (x)
3065 : 0 : && reg_equivs
3066 : 0 : && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3067 : : return;
3068 : :
3069 : : elimination_effects (SUBREG_REG (x), mem_mode);
3070 : : return;
3071 : :
3072 : 0 : case USE:
3073 : : /* If using a register that is the source of an eliminate we still
3074 : : think can be performed, note it cannot be performed since we don't
3075 : : know how this register is used. */
3076 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3077 : 0 : if (ep->from_rtx == XEXP (x, 0))
3078 : 0 : ep->can_eliminate = 0;
3079 : :
3080 : 0 : elimination_effects (XEXP (x, 0), mem_mode);
3081 : 0 : return;
3082 : :
3083 : 0 : case CLOBBER:
3084 : : /* If clobbering a register that is the replacement register for an
3085 : : elimination we still think can be performed, note that it cannot
3086 : : be performed. Otherwise, we need not be concerned about it. */
3087 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3088 : 0 : if (ep->to_rtx == XEXP (x, 0))
3089 : 0 : ep->can_eliminate = 0;
3090 : :
3091 : 0 : elimination_effects (XEXP (x, 0), mem_mode);
3092 : 0 : return;
3093 : :
3094 : 0 : case SET:
3095 : : /* Check for setting a register that we know about. */
3096 : 0 : if (REG_P (SET_DEST (x)))
3097 : : {
3098 : : /* See if this is setting the replacement register for an
3099 : : elimination.
3100 : :
3101 : : If DEST is the hard frame pointer, we do nothing because we
3102 : : assume that all assignments to the frame pointer are for
3103 : : non-local gotos and are being done at a time when they are valid
3104 : : and do not disturb anything else. Some machines want to
3105 : : eliminate a fake argument pointer (or even a fake frame pointer)
3106 : : with either the real frame or the stack pointer. Assignments to
3107 : : the hard frame pointer must not prevent this elimination. */
3108 : :
3109 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3110 : : ep++)
3111 : 0 : if (ep->to_rtx == SET_DEST (x)
3112 : 0 : && SET_DEST (x) != hard_frame_pointer_rtx)
3113 : : {
3114 : : /* If it is being incremented, adjust the offset. Otherwise,
3115 : : this elimination can't be done. */
3116 : 0 : rtx src = SET_SRC (x);
3117 : :
3118 : 0 : if (GET_CODE (src) == PLUS
3119 : 0 : && XEXP (src, 0) == SET_DEST (x)
3120 : 0 : && CONST_INT_P (XEXP (src, 1)))
3121 : 0 : ep->offset -= INTVAL (XEXP (src, 1));
3122 : : else
3123 : 0 : ep->can_eliminate = 0;
3124 : : }
3125 : : }
3126 : :
3127 : 0 : elimination_effects (SET_DEST (x), VOIDmode);
3128 : 0 : elimination_effects (SET_SRC (x), VOIDmode);
3129 : 0 : return;
3130 : :
3131 : 0 : case MEM:
3132 : : /* Our only special processing is to pass the mode of the MEM to our
3133 : : recursive call. */
3134 : 0 : elimination_effects (XEXP (x, 0), GET_MODE (x));
3135 : 0 : return;
3136 : :
3137 : : default:
3138 : : break;
3139 : : }
3140 : :
3141 : 0 : fmt = GET_RTX_FORMAT (code);
3142 : 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3143 : : {
3144 : 0 : if (*fmt == 'e')
3145 : 0 : elimination_effects (XEXP (x, i), mem_mode);
3146 : 0 : else if (*fmt == 'E')
3147 : 0 : for (j = 0; j < XVECLEN (x, i); j++)
3148 : 0 : elimination_effects (XVECEXP (x, i, j), mem_mode);
3149 : : }
3150 : : }
3151 : :
3152 : : /* Descend through rtx X and verify that no references to eliminable registers
3153 : : remain. If any do remain, mark the involved register as not
3154 : : eliminable. */
3155 : :
3156 : : static void
3157 : 0 : check_eliminable_occurrences (rtx x)
3158 : : {
3159 : 0 : const char *fmt;
3160 : 0 : int i;
3161 : 0 : enum rtx_code code;
3162 : :
3163 : 0 : if (x == 0)
3164 : : return;
3165 : :
3166 : 0 : code = GET_CODE (x);
3167 : :
3168 : 0 : if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3169 : : {
3170 : 0 : struct elim_table *ep;
3171 : :
3172 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3173 : 0 : if (ep->from_rtx == x)
3174 : 0 : ep->can_eliminate = 0;
3175 : : return;
3176 : : }
3177 : :
3178 : 0 : fmt = GET_RTX_FORMAT (code);
3179 : 0 : for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3180 : : {
3181 : 0 : if (*fmt == 'e')
3182 : 0 : check_eliminable_occurrences (XEXP (x, i));
3183 : 0 : else if (*fmt == 'E')
3184 : : {
3185 : : int j;
3186 : 0 : for (j = 0; j < XVECLEN (x, i); j++)
3187 : 0 : check_eliminable_occurrences (XVECEXP (x, i, j));
3188 : : }
3189 : : }
3190 : : }
3191 : :
3192 : : /* Scan INSN and eliminate all eliminable registers in it.
3193 : :
3194 : : If REPLACE is nonzero, do the replacement destructively. Also
3195 : : delete the insn as dead it if it is setting an eliminable register.
3196 : :
3197 : : If REPLACE is zero, do all our allocations in reload_obstack.
3198 : :
3199 : : If no eliminations were done and this insn doesn't require any elimination
3200 : : processing (these are not identical conditions: it might be updating sp,
3201 : : but not referencing fp; this needs to be seen during reload_as_needed so
3202 : : that the offset between fp and sp can be taken into consideration), zero
3203 : : is returned. Otherwise, 1 is returned. */
3204 : :
3205 : : static int
3206 : 0 : eliminate_regs_in_insn (rtx_insn *insn, int replace)
3207 : : {
3208 : 0 : int icode = recog_memoized (insn);
3209 : 0 : rtx old_body = PATTERN (insn);
3210 : 0 : int insn_is_asm = asm_noperands (old_body) >= 0;
3211 : 0 : rtx old_set = single_set (insn);
3212 : 0 : rtx new_body;
3213 : 0 : int val = 0;
3214 : 0 : int i;
3215 : 0 : rtx substed_operand[MAX_RECOG_OPERANDS];
3216 : 0 : rtx orig_operand[MAX_RECOG_OPERANDS];
3217 : 0 : struct elim_table *ep;
3218 : 0 : rtx plus_src, plus_cst_src;
3219 : :
3220 : 0 : if (! insn_is_asm && icode < 0)
3221 : : {
3222 : 0 : gcc_assert (DEBUG_INSN_P (insn)
3223 : : || GET_CODE (PATTERN (insn)) == USE
3224 : : || GET_CODE (PATTERN (insn)) == CLOBBER
3225 : : || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3226 : 0 : if (DEBUG_BIND_INSN_P (insn))
3227 : 0 : INSN_VAR_LOCATION_LOC (insn)
3228 : 0 : = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3229 : 0 : return 0;
3230 : : }
3231 : :
3232 : : /* We allow one special case which happens to work on all machines we
3233 : : currently support: a single set with the source or a REG_EQUAL
3234 : : note being a PLUS of an eliminable register and a constant. */
3235 : 0 : plus_src = plus_cst_src = 0;
3236 : 0 : if (old_set && REG_P (SET_DEST (old_set)))
3237 : : {
3238 : 0 : if (GET_CODE (SET_SRC (old_set)) == PLUS)
3239 : 0 : plus_src = SET_SRC (old_set);
3240 : : /* First see if the source is of the form (plus (...) CST). */
3241 : 0 : if (plus_src
3242 : 0 : && CONST_INT_P (XEXP (plus_src, 1)))
3243 : : plus_cst_src = plus_src;
3244 : 0 : else if (REG_P (SET_SRC (old_set))
3245 : 0 : || plus_src)
3246 : : {
3247 : : /* Otherwise, see if we have a REG_EQUAL note of the form
3248 : : (plus (...) CST). */
3249 : 0 : rtx links;
3250 : 0 : for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3251 : : {
3252 : 0 : if ((REG_NOTE_KIND (links) == REG_EQUAL
3253 : 0 : || REG_NOTE_KIND (links) == REG_EQUIV)
3254 : 0 : && GET_CODE (XEXP (links, 0)) == PLUS
3255 : 0 : && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3256 : : {
3257 : : plus_cst_src = XEXP (links, 0);
3258 : : break;
3259 : : }
3260 : : }
3261 : : }
3262 : :
3263 : : /* Check that the first operand of the PLUS is a hard reg or
3264 : : the lowpart subreg of one. */
3265 : 0 : if (plus_cst_src)
3266 : : {
3267 : 0 : rtx reg = XEXP (plus_cst_src, 0);
3268 : 0 : if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3269 : 0 : reg = SUBREG_REG (reg);
3270 : :
3271 : 0 : if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3272 : : plus_cst_src = 0;
3273 : : }
3274 : : }
3275 : 0 : if (plus_cst_src)
3276 : : {
3277 : 0 : rtx reg = XEXP (plus_cst_src, 0);
3278 : 0 : poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1));
3279 : :
3280 : 0 : if (GET_CODE (reg) == SUBREG)
3281 : 0 : reg = SUBREG_REG (reg);
3282 : :
3283 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3284 : 0 : if (ep->from_rtx == reg && ep->can_eliminate)
3285 : : {
3286 : 0 : rtx to_rtx = ep->to_rtx;
3287 : 0 : offset += ep->offset;
3288 : 0 : offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3289 : :
3290 : 0 : if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3291 : 0 : to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3292 : : to_rtx);
3293 : : /* If we have a nonzero offset, and the source is already
3294 : : a simple REG, the following transformation would
3295 : : increase the cost of the insn by replacing a simple REG
3296 : : with (plus (reg sp) CST). So try only when we already
3297 : : had a PLUS before. */
3298 : 0 : if (known_eq (offset, 0) || plus_src)
3299 : : {
3300 : 0 : rtx new_src = plus_constant (GET_MODE (to_rtx),
3301 : : to_rtx, offset);
3302 : :
3303 : 0 : new_body = old_body;
3304 : 0 : if (! replace)
3305 : : {
3306 : 0 : new_body = copy_insn (old_body);
3307 : 0 : if (REG_NOTES (insn))
3308 : 0 : REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3309 : : }
3310 : 0 : PATTERN (insn) = new_body;
3311 : 0 : old_set = single_set (insn);
3312 : :
3313 : : /* First see if this insn remains valid when we make the
3314 : : change. If not, try to replace the whole pattern with
3315 : : a simple set (this may help if the original insn was a
3316 : : PARALLEL that was only recognized as single_set due to
3317 : : REG_UNUSED notes). If this isn't valid either, keep
3318 : : the INSN_CODE the same and let reload fix it up. */
3319 : 0 : if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3320 : : {
3321 : 0 : rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3322 : :
3323 : 0 : if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3324 : 0 : SET_SRC (old_set) = new_src;
3325 : : }
3326 : : }
3327 : : else
3328 : : break;
3329 : :
3330 : 0 : val = 1;
3331 : : /* This can't have an effect on elimination offsets, so skip right
3332 : : to the end. */
3333 : 0 : goto done;
3334 : : }
3335 : : }
3336 : :
3337 : : /* Determine the effects of this insn on elimination offsets. */
3338 : 0 : elimination_effects (old_body, VOIDmode);
3339 : :
3340 : : /* Eliminate all eliminable registers occurring in operands that
3341 : : can be handled by reload. */
3342 : 0 : extract_insn (insn);
3343 : 0 : for (i = 0; i < recog_data.n_operands; i++)
3344 : : {
3345 : 0 : orig_operand[i] = recog_data.operand[i];
3346 : 0 : substed_operand[i] = recog_data.operand[i];
3347 : :
3348 : : /* For an asm statement, every operand is eliminable. */
3349 : 0 : if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3350 : : {
3351 : 0 : bool is_set_src, in_plus;
3352 : :
3353 : : /* Check for setting a register that we know about. */
3354 : 0 : if (recog_data.operand_type[i] != OP_IN
3355 : 0 : && REG_P (orig_operand[i]))
3356 : : {
3357 : : /* If we are assigning to a register that can be eliminated, it
3358 : : must be as part of a PARALLEL, since the code above handles
3359 : : single SETs. We must indicate that we can no longer
3360 : : eliminate this reg. */
3361 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3362 : : ep++)
3363 : 0 : if (ep->from_rtx == orig_operand[i])
3364 : 0 : ep->can_eliminate = 0;
3365 : : }
3366 : :
3367 : : /* Companion to the above plus substitution, we can allow
3368 : : invariants as the source of a plain move. */
3369 : 0 : is_set_src = false;
3370 : 0 : if (old_set
3371 : 0 : && recog_data.operand_loc[i] == &SET_SRC (old_set))
3372 : 0 : is_set_src = true;
3373 : 0 : in_plus = false;
3374 : 0 : if (plus_src
3375 : 0 : && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3376 : 0 : || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3377 : 0 : in_plus = true;
3378 : :
3379 : 0 : substed_operand[i]
3380 : 0 : = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3381 : : replace ? insn : NULL_RTX,
3382 : 0 : is_set_src || in_plus, false);
3383 : 0 : if (substed_operand[i] != orig_operand[i])
3384 : 0 : val = 1;
3385 : : /* Terminate the search in check_eliminable_occurrences at
3386 : : this point. */
3387 : 0 : *recog_data.operand_loc[i] = 0;
3388 : :
3389 : : /* If an output operand changed from a REG to a MEM and INSN is an
3390 : : insn, write a CLOBBER insn. */
3391 : 0 : if (recog_data.operand_type[i] != OP_IN
3392 : 0 : && REG_P (orig_operand[i])
3393 : 0 : && MEM_P (substed_operand[i])
3394 : 0 : && replace)
3395 : 0 : emit_insn_after (gen_clobber (orig_operand[i]), insn);
3396 : : }
3397 : : }
3398 : :
3399 : 0 : for (i = 0; i < recog_data.n_dups; i++)
3400 : 0 : *recog_data.dup_loc[i]
3401 : 0 : = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3402 : :
3403 : : /* If any eliminable remain, they aren't eliminable anymore. */
3404 : 0 : check_eliminable_occurrences (old_body);
3405 : :
3406 : : /* Substitute the operands; the new values are in the substed_operand
3407 : : array. */
3408 : 0 : for (i = 0; i < recog_data.n_operands; i++)
3409 : 0 : *recog_data.operand_loc[i] = substed_operand[i];
3410 : 0 : for (i = 0; i < recog_data.n_dups; i++)
3411 : 0 : *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3412 : :
3413 : : /* If we are replacing a body that was a (set X (plus Y Z)), try to
3414 : : re-recognize the insn. We do this in case we had a simple addition
3415 : : but now can do this as a load-address. This saves an insn in this
3416 : : common case.
3417 : : If re-recognition fails, the old insn code number will still be used,
3418 : : and some register operands may have changed into PLUS expressions.
3419 : : These will be handled by find_reloads by loading them into a register
3420 : : again. */
3421 : :
3422 : 0 : if (val)
3423 : : {
3424 : : /* If we aren't replacing things permanently and we changed something,
3425 : : make another copy to ensure that all the RTL is new. Otherwise
3426 : : things can go wrong if find_reload swaps commutative operands
3427 : : and one is inside RTL that has been copied while the other is not. */
3428 : 0 : new_body = old_body;
3429 : 0 : if (! replace)
3430 : : {
3431 : 0 : new_body = copy_insn (old_body);
3432 : 0 : if (REG_NOTES (insn))
3433 : 0 : REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3434 : : }
3435 : 0 : PATTERN (insn) = new_body;
3436 : :
3437 : : /* If we had a move insn but now we don't, rerecognize it. This will
3438 : : cause spurious re-recognition if the old move had a PARALLEL since
3439 : : the new one still will, but we can't call single_set without
3440 : : having put NEW_BODY into the insn and the re-recognition won't
3441 : : hurt in this rare case. */
3442 : : /* ??? Why this huge if statement - why don't we just rerecognize the
3443 : : thing always? */
3444 : 0 : if (! insn_is_asm
3445 : 0 : && old_set != 0
3446 : 0 : && ((REG_P (SET_SRC (old_set))
3447 : 0 : && (GET_CODE (new_body) != SET
3448 : 0 : || !REG_P (SET_SRC (new_body))))
3449 : : /* If this was a load from or store to memory, compare
3450 : : the MEM in recog_data.operand to the one in the insn.
3451 : : If they are not equal, then rerecognize the insn. */
3452 : : || (old_set != 0
3453 : 0 : && ((MEM_P (SET_SRC (old_set))
3454 : 0 : && SET_SRC (old_set) != recog_data.operand[1])
3455 : 0 : || (MEM_P (SET_DEST (old_set))
3456 : 0 : && SET_DEST (old_set) != recog_data.operand[0])))
3457 : : /* If this was an add insn before, rerecognize. */
3458 : 0 : || GET_CODE (SET_SRC (old_set)) == PLUS))
3459 : : {
3460 : 0 : int new_icode = recog (PATTERN (insn), insn, 0);
3461 : 0 : if (new_icode >= 0)
3462 : 0 : INSN_CODE (insn) = new_icode;
3463 : : }
3464 : : }
3465 : :
3466 : : /* Restore the old body. If there were any changes to it, we made a copy
3467 : : of it while the changes were still in place, so we'll correctly return
3468 : : a modified insn below. */
3469 : 0 : if (! replace)
3470 : : {
3471 : : /* Restore the old body. */
3472 : 0 : for (i = 0; i < recog_data.n_operands; i++)
3473 : : /* Restoring a top-level match_parallel would clobber the new_body
3474 : : we installed in the insn. */
3475 : 0 : if (recog_data.operand_loc[i] != &PATTERN (insn))
3476 : 0 : *recog_data.operand_loc[i] = orig_operand[i];
3477 : 0 : for (i = 0; i < recog_data.n_dups; i++)
3478 : 0 : *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3479 : : }
3480 : :
3481 : : /* Update all elimination pairs to reflect the status after the current
3482 : : insn. The changes we make were determined by the earlier call to
3483 : : elimination_effects.
3484 : :
3485 : : We also detect cases where register elimination cannot be done,
3486 : : namely, if a register would be both changed and referenced outside a MEM
3487 : : in the resulting insn since such an insn is often undefined and, even if
3488 : : not, we cannot know what meaning will be given to it. Note that it is
3489 : : valid to have a register used in an address in an insn that changes it
3490 : : (presumably with a pre- or post-increment or decrement).
3491 : :
3492 : : If anything changes, return nonzero. */
3493 : :
3494 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3495 : : {
3496 : 0 : if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3497 : 0 : ep->can_eliminate = 0;
3498 : :
3499 : 0 : ep->ref_outside_mem = 0;
3500 : :
3501 : 0 : if (maybe_ne (ep->previous_offset, ep->offset))
3502 : 0 : val = 1;
3503 : : }
3504 : :
3505 : 0 : done:
3506 : : /* If we changed something, perform elimination in REG_NOTES. This is
3507 : : needed even when REPLACE is zero because a REG_DEAD note might refer
3508 : : to a register that we eliminate and could cause a different number
3509 : : of spill registers to be needed in the final reload pass than in
3510 : : the pre-passes. */
3511 : 0 : if (val && REG_NOTES (insn) != 0)
3512 : 0 : REG_NOTES (insn)
3513 : 0 : = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3514 : : false);
3515 : :
3516 : : return val;
3517 : : }
3518 : :
3519 : : /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3520 : : register allocator. INSN is the instruction we need to examine, we perform
3521 : : eliminations in its operands and record cases where eliminating a reg with
3522 : : an invariant equivalence would add extra cost. */
3523 : :
3524 : : #pragma GCC diagnostic push
3525 : : #pragma GCC diagnostic warning "-Wmaybe-uninitialized"
3526 : : static void
3527 : 0 : elimination_costs_in_insn (rtx_insn *insn)
3528 : : {
3529 : 0 : int icode = recog_memoized (insn);
3530 : 0 : rtx old_body = PATTERN (insn);
3531 : 0 : int insn_is_asm = asm_noperands (old_body) >= 0;
3532 : 0 : rtx old_set = single_set (insn);
3533 : 0 : int i;
3534 : 0 : rtx orig_operand[MAX_RECOG_OPERANDS];
3535 : 0 : rtx orig_dup[MAX_RECOG_OPERANDS];
3536 : 0 : struct elim_table *ep;
3537 : 0 : rtx plus_src, plus_cst_src;
3538 : 0 : bool sets_reg_p;
3539 : :
3540 : 0 : if (! insn_is_asm && icode < 0)
3541 : : {
3542 : 0 : gcc_assert (DEBUG_INSN_P (insn)
3543 : : || GET_CODE (PATTERN (insn)) == USE
3544 : : || GET_CODE (PATTERN (insn)) == CLOBBER
3545 : : || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3546 : : return;
3547 : : }
3548 : :
3549 : 0 : if (old_set != 0 && REG_P (SET_DEST (old_set))
3550 : 0 : && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3551 : : {
3552 : : /* Check for setting an eliminable register. */
3553 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3554 : 0 : if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3555 : : return;
3556 : : }
3557 : :
3558 : : /* We allow one special case which happens to work on all machines we
3559 : : currently support: a single set with the source or a REG_EQUAL
3560 : : note being a PLUS of an eliminable register and a constant. */
3561 : 0 : plus_src = plus_cst_src = 0;
3562 : 0 : sets_reg_p = false;
3563 : 0 : if (old_set && REG_P (SET_DEST (old_set)))
3564 : : {
3565 : 0 : sets_reg_p = true;
3566 : 0 : if (GET_CODE (SET_SRC (old_set)) == PLUS)
3567 : 0 : plus_src = SET_SRC (old_set);
3568 : : /* First see if the source is of the form (plus (...) CST). */
3569 : 0 : if (plus_src
3570 : 0 : && CONST_INT_P (XEXP (plus_src, 1)))
3571 : 0 : plus_cst_src = plus_src;
3572 : 0 : else if (REG_P (SET_SRC (old_set))
3573 : : || plus_src)
3574 : : {
3575 : : /* Otherwise, see if we have a REG_EQUAL note of the form
3576 : : (plus (...) CST). */
3577 : 0 : rtx links;
3578 : 0 : for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3579 : : {
3580 : : if ((REG_NOTE_KIND (links) == REG_EQUAL
3581 : : || REG_NOTE_KIND (links) == REG_EQUIV)
3582 : : && GET_CODE (XEXP (links, 0)) == PLUS
3583 : : && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3584 : : {
3585 : : plus_cst_src = XEXP (links, 0);
3586 : : break;
3587 : : }
3588 : : }
3589 : : }
3590 : : }
3591 : :
3592 : : /* Determine the effects of this insn on elimination offsets. */
3593 : 0 : elimination_effects (old_body, VOIDmode);
3594 : :
3595 : : /* Eliminate all eliminable registers occurring in operands that
3596 : : can be handled by reload. */
3597 : 0 : extract_insn (insn);
3598 : 0 : int n_dups = recog_data.n_dups;
3599 : 0 : for (i = 0; i < n_dups; i++)
3600 : 0 : orig_dup[i] = *recog_data.dup_loc[i];
3601 : :
3602 : 0 : int n_operands = recog_data.n_operands;
3603 : 0 : for (i = 0; i < n_operands; i++)
3604 : : {
3605 : 0 : orig_operand[i] = recog_data.operand[i];
3606 : :
3607 : : /* For an asm statement, every operand is eliminable. */
3608 : 0 : if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3609 : : {
3610 : 0 : bool is_set_src, in_plus;
3611 : :
3612 : : /* Check for setting a register that we know about. */
3613 : 0 : if (recog_data.operand_type[i] != OP_IN
3614 : 0 : && REG_P (orig_operand[i]))
3615 : : {
3616 : : /* If we are assigning to a register that can be eliminated, it
3617 : : must be as part of a PARALLEL, since the code above handles
3618 : : single SETs. We must indicate that we can no longer
3619 : : eliminate this reg. */
3620 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS];
3621 : : ep++)
3622 : 0 : if (ep->from_rtx == orig_operand[i])
3623 : 0 : ep->can_eliminate = 0;
3624 : : }
3625 : :
3626 : : /* Companion to the above plus substitution, we can allow
3627 : : invariants as the source of a plain move. */
3628 : 0 : is_set_src = false;
3629 : 0 : if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3630 : 0 : is_set_src = true;
3631 : 0 : if (is_set_src && !sets_reg_p)
3632 : 0 : note_reg_elim_costly (SET_SRC (old_set), insn);
3633 : 0 : in_plus = false;
3634 : 0 : if (plus_src && sets_reg_p
3635 : 0 : && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3636 : 0 : || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3637 : 0 : in_plus = true;
3638 : :
3639 : 0 : eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3640 : : NULL_RTX,
3641 : 0 : is_set_src || in_plus, true);
3642 : : /* Terminate the search in check_eliminable_occurrences at
3643 : : this point. */
3644 : 0 : *recog_data.operand_loc[i] = 0;
3645 : : }
3646 : : }
3647 : :
3648 : 0 : for (i = 0; i < n_dups; i++)
3649 : 0 : *recog_data.dup_loc[i]
3650 : 0 : = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3651 : :
3652 : : /* If any eliminable remain, they aren't eliminable anymore. */
3653 : 0 : check_eliminable_occurrences (old_body);
3654 : :
3655 : : /* Restore the old body. */
3656 : 0 : for (i = 0; i < n_operands; i++)
3657 : 0 : *recog_data.operand_loc[i] = orig_operand[i];
3658 : 0 : for (i = 0; i < n_dups; i++)
3659 : 0 : *recog_data.dup_loc[i] = orig_dup[i];
3660 : :
3661 : : /* Update all elimination pairs to reflect the status after the current
3662 : : insn. The changes we make were determined by the earlier call to
3663 : : elimination_effects. */
3664 : :
3665 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3666 : : {
3667 : 0 : if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem)
3668 : 0 : ep->can_eliminate = 0;
3669 : :
3670 : 0 : ep->ref_outside_mem = 0;
3671 : : }
3672 : :
3673 : : return;
3674 : : }
3675 : : #pragma GCC diagnostic pop
3676 : :
3677 : : /* Loop through all elimination pairs.
3678 : : Recalculate the number not at initial offset.
3679 : :
3680 : : Compute the maximum offset (minimum offset if the stack does not
3681 : : grow downward) for each elimination pair. */
3682 : :
3683 : : static void
3684 : 0 : update_eliminable_offsets (void)
3685 : : {
3686 : 0 : struct elim_table *ep;
3687 : :
3688 : 0 : num_not_at_initial_offset = 0;
3689 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3690 : : {
3691 : 0 : ep->previous_offset = ep->offset;
3692 : 0 : if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3693 : 0 : num_not_at_initial_offset++;
3694 : : }
3695 : 0 : }
3696 : :
3697 : : /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3698 : : replacement we currently believe is valid, mark it as not eliminable if X
3699 : : modifies DEST in any way other than by adding a constant integer to it.
3700 : :
3701 : : If DEST is the frame pointer, we do nothing because we assume that
3702 : : all assignments to the hard frame pointer are nonlocal gotos and are being
3703 : : done at a time when they are valid and do not disturb anything else.
3704 : : Some machines want to eliminate a fake argument pointer with either the
3705 : : frame or stack pointer. Assignments to the hard frame pointer must not
3706 : : prevent this elimination.
3707 : :
3708 : : Called via note_stores from reload before starting its passes to scan
3709 : : the insns of the function. */
3710 : :
3711 : : static void
3712 : 0 : mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3713 : : {
3714 : 0 : unsigned int i;
3715 : :
3716 : : /* A SUBREG of a hard register here is just changing its mode. We should
3717 : : not see a SUBREG of an eliminable hard register, but check just in
3718 : : case. */
3719 : 0 : if (GET_CODE (dest) == SUBREG)
3720 : 0 : dest = SUBREG_REG (dest);
3721 : :
3722 : 0 : if (dest == hard_frame_pointer_rtx)
3723 : : return;
3724 : :
3725 : 0 : for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3726 : 0 : if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3727 : 0 : && (GET_CODE (x) != SET
3728 : 0 : || GET_CODE (SET_SRC (x)) != PLUS
3729 : 0 : || XEXP (SET_SRC (x), 0) != dest
3730 : 0 : || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3731 : : {
3732 : 0 : reg_eliminate[i].can_eliminate_previous
3733 : 0 : = reg_eliminate[i].can_eliminate = 0;
3734 : 0 : num_eliminable--;
3735 : : }
3736 : : }
3737 : :
3738 : : /* Verify that the initial elimination offsets did not change since the
3739 : : last call to set_initial_elim_offsets. This is used to catch cases
3740 : : where something illegal happened during reload_as_needed that could
3741 : : cause incorrect code to be generated if we did not check for it. */
3742 : :
3743 : : static bool
3744 : 0 : verify_initial_elim_offsets (void)
3745 : : {
3746 : 0 : poly_int64 t;
3747 : 0 : struct elim_table *ep;
3748 : :
3749 : 0 : if (!num_eliminable)
3750 : : return true;
3751 : :
3752 : 0 : targetm.compute_frame_layout ();
3753 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3754 : : {
3755 : 0 : INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3756 : 0 : if (maybe_ne (t, ep->initial_offset))
3757 : : return false;
3758 : : }
3759 : :
3760 : : return true;
3761 : : }
3762 : :
3763 : : /* Reset all offsets on eliminable registers to their initial values. */
3764 : :
3765 : : static void
3766 : 0 : set_initial_elim_offsets (void)
3767 : : {
3768 : 0 : struct elim_table *ep = reg_eliminate;
3769 : :
3770 : 0 : targetm.compute_frame_layout ();
3771 : 0 : for (; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3772 : : {
3773 : 0 : INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3774 : 0 : ep->previous_offset = ep->offset = ep->initial_offset;
3775 : : }
3776 : :
3777 : 0 : num_not_at_initial_offset = 0;
3778 : 0 : }
3779 : :
3780 : : /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3781 : :
3782 : : static void
3783 : 0 : set_initial_eh_label_offset (rtx label)
3784 : : {
3785 : 0 : set_label_offsets (label, NULL, 1);
3786 : 0 : }
3787 : :
3788 : : /* Initialize the known label offsets.
3789 : : Set a known offset for each forced label to be at the initial offset
3790 : : of each elimination. We do this because we assume that all
3791 : : computed jumps occur from a location where each elimination is
3792 : : at its initial offset.
3793 : : For all other labels, show that we don't know the offsets. */
3794 : :
3795 : : static void
3796 : 0 : set_initial_label_offsets (void)
3797 : : {
3798 : 0 : memset (offsets_known_at, 0, num_labels);
3799 : :
3800 : 0 : unsigned int i;
3801 : 0 : rtx_insn *insn;
3802 : 0 : FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)
3803 : 0 : set_label_offsets (insn, NULL, 1);
3804 : :
3805 : 0 : for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3806 : 0 : if (x->insn ())
3807 : 0 : set_label_offsets (x->insn (), NULL, 1);
3808 : :
3809 : 0 : for_each_eh_label (set_initial_eh_label_offset);
3810 : 0 : }
3811 : :
3812 : : /* Set all elimination offsets to the known values for the code label given
3813 : : by INSN. */
3814 : :
3815 : : static void
3816 : 0 : set_offsets_for_label (rtx_insn *insn)
3817 : : {
3818 : 0 : unsigned int i;
3819 : 0 : int label_nr = CODE_LABEL_NUMBER (insn);
3820 : 0 : struct elim_table *ep;
3821 : :
3822 : 0 : num_not_at_initial_offset = 0;
3823 : 0 : for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3824 : : {
3825 : 0 : ep->offset = ep->previous_offset
3826 : 0 : = offsets_at[label_nr - first_label_num][i];
3827 : 0 : if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset))
3828 : 0 : num_not_at_initial_offset++;
3829 : : }
3830 : 0 : }
3831 : :
3832 : : /* See if anything that happened changes which eliminations are valid.
3833 : : For example, on the SPARC, whether or not the frame pointer can
3834 : : be eliminated can depend on what registers have been used. We need
3835 : : not check some conditions again (such as flag_omit_frame_pointer)
3836 : : since they can't have changed. */
3837 : :
3838 : : static void
3839 : 0 : update_eliminables (HARD_REG_SET *pset)
3840 : : {
3841 : 0 : int previous_frame_pointer_needed = frame_pointer_needed;
3842 : 0 : struct elim_table *ep;
3843 : :
3844 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3845 : 0 : if ((ep->from == HARD_FRAME_POINTER_REGNUM
3846 : 0 : && targetm.frame_pointer_required ())
3847 : 0 : || ! targetm.can_eliminate (ep->from, ep->to)
3848 : : )
3849 : 0 : ep->can_eliminate = 0;
3850 : :
3851 : : /* Look for the case where we have discovered that we can't replace
3852 : : register A with register B and that means that we will now be
3853 : : trying to replace register A with register C. This means we can
3854 : : no longer replace register C with register B and we need to disable
3855 : : such an elimination, if it exists. This occurs often with A == ap,
3856 : : B == sp, and C == fp. */
3857 : :
3858 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3859 : : {
3860 : 0 : struct elim_table *op;
3861 : 0 : int new_to = -1;
3862 : :
3863 : 0 : if (! ep->can_eliminate && ep->can_eliminate_previous)
3864 : : {
3865 : : /* Find the current elimination for ep->from, if there is a
3866 : : new one. */
3867 : 0 : for (op = reg_eliminate;
3868 : 0 : op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
3869 : 0 : if (op->from == ep->from && op->can_eliminate)
3870 : : {
3871 : 0 : new_to = op->to;
3872 : 0 : break;
3873 : : }
3874 : :
3875 : : /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3876 : : disable it. */
3877 : 0 : for (op = reg_eliminate;
3878 : 0 : op < ®_eliminate[NUM_ELIMINABLE_REGS]; op++)
3879 : 0 : if (op->from == new_to && op->to == ep->to)
3880 : 0 : op->can_eliminate = 0;
3881 : : }
3882 : : }
3883 : :
3884 : : /* See if any registers that we thought we could eliminate the previous
3885 : : time are no longer eliminable. If so, something has changed and we
3886 : : must spill the register. Also, recompute the number of eliminable
3887 : : registers and see if the frame pointer is needed; it is if there is
3888 : : no elimination of the frame pointer that we can perform. */
3889 : :
3890 : 0 : frame_pointer_needed = 1;
3891 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3892 : : {
3893 : 0 : if (ep->can_eliminate
3894 : 0 : && ep->from == FRAME_POINTER_REGNUM
3895 : 0 : && ep->to != HARD_FRAME_POINTER_REGNUM
3896 : 0 : && (! SUPPORTS_STACK_ALIGNMENT
3897 : 0 : || ! crtl->stack_realign_needed))
3898 : 0 : frame_pointer_needed = 0;
3899 : :
3900 : 0 : if (! ep->can_eliminate && ep->can_eliminate_previous)
3901 : : {
3902 : 0 : ep->can_eliminate_previous = 0;
3903 : 0 : SET_HARD_REG_BIT (*pset, ep->from);
3904 : 0 : num_eliminable--;
3905 : : }
3906 : : }
3907 : :
3908 : : /* If we didn't need a frame pointer last time, but we do now, spill
3909 : : the hard frame pointer. */
3910 : 0 : if (frame_pointer_needed && ! previous_frame_pointer_needed)
3911 : 0 : SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3912 : 0 : }
3913 : :
3914 : : /* Call update_eliminables an spill any registers we can't eliminate anymore.
3915 : : Return true iff a register was spilled. */
3916 : :
3917 : : static bool
3918 : 0 : update_eliminables_and_spill (void)
3919 : : {
3920 : 0 : int i;
3921 : 0 : bool did_spill = false;
3922 : 0 : HARD_REG_SET to_spill;
3923 : 0 : CLEAR_HARD_REG_SET (to_spill);
3924 : 0 : update_eliminables (&to_spill);
3925 : 0 : used_spill_regs &= ~to_spill;
3926 : :
3927 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3928 : 0 : if (TEST_HARD_REG_BIT (to_spill, i))
3929 : : {
3930 : 0 : spill_hard_reg (i, 1);
3931 : 0 : did_spill = true;
3932 : :
3933 : : /* Regardless of the state of spills, if we previously had
3934 : : a register that we thought we could eliminate, but now
3935 : : cannot eliminate, we must run another pass.
3936 : :
3937 : : Consider pseudos which have an entry in reg_equiv_* which
3938 : : reference an eliminable register. We must make another pass
3939 : : to update reg_equiv_* so that we do not substitute in the
3940 : : old value from when we thought the elimination could be
3941 : : performed. */
3942 : : }
3943 : 0 : return did_spill;
3944 : : }
3945 : :
3946 : : /* Return true if X is used as the target register of an elimination. */
3947 : :
3948 : : bool
3949 : 0 : elimination_target_reg_p (rtx x)
3950 : : {
3951 : 0 : struct elim_table *ep;
3952 : :
3953 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3954 : 0 : if (ep->to_rtx == x && ep->can_eliminate)
3955 : : return true;
3956 : :
3957 : : return false;
3958 : : }
3959 : :
3960 : : /* Initialize the table of registers to eliminate.
3961 : : Pre-condition: global flag frame_pointer_needed has been set before
3962 : : calling this function. */
3963 : :
3964 : : static void
3965 : 0 : init_elim_table (void)
3966 : : {
3967 : 0 : struct elim_table *ep;
3968 : 0 : const struct elim_table_1 *ep1;
3969 : :
3970 : 0 : if (!reg_eliminate)
3971 : 0 : reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
3972 : :
3973 : 0 : num_eliminable = 0;
3974 : :
3975 : 0 : for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3976 : 0 : ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3977 : : {
3978 : 0 : ep->from = ep1->from;
3979 : 0 : ep->to = ep1->to;
3980 : 0 : ep->can_eliminate = ep->can_eliminate_previous
3981 : 0 : = (targetm.can_eliminate (ep->from, ep->to)
3982 : 0 : && ! (ep->to == STACK_POINTER_REGNUM
3983 : 0 : && frame_pointer_needed
3984 : 0 : && (! SUPPORTS_STACK_ALIGNMENT
3985 : 0 : || ! stack_realign_fp)));
3986 : : }
3987 : :
3988 : : /* Count the number of eliminable registers and build the FROM and TO
3989 : : REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
3990 : : gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3991 : : We depend on this. */
3992 : 0 : for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3993 : : {
3994 : 0 : num_eliminable += ep->can_eliminate;
3995 : 0 : ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3996 : 0 : ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3997 : : }
3998 : 0 : }
3999 : :
4000 : : /* Find all the pseudo registers that didn't get hard regs
4001 : : but do have known equivalent constants or memory slots.
4002 : : These include parameters (known equivalent to parameter slots)
4003 : : and cse'd or loop-moved constant memory addresses.
4004 : :
4005 : : Record constant equivalents in reg_equiv_constant
4006 : : so they will be substituted by find_reloads.
4007 : : Record memory equivalents in reg_mem_equiv so they can
4008 : : be substituted eventually by altering the REG-rtx's. */
4009 : :
4010 : : static void
4011 : 0 : init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4012 : : {
4013 : 0 : int i;
4014 : 0 : rtx_insn *insn;
4015 : :
4016 : 0 : grow_reg_equivs ();
4017 : 0 : if (do_subregs)
4018 : 0 : reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno);
4019 : : else
4020 : 0 : reg_max_ref_mode = NULL;
4021 : :
4022 : 0 : num_eliminable_invariants = 0;
4023 : :
4024 : 0 : first_label_num = get_first_label_num ();
4025 : 0 : num_labels = max_label_num () - first_label_num;
4026 : :
4027 : : /* Allocate the tables used to store offset information at labels. */
4028 : 0 : offsets_known_at = XNEWVEC (char, num_labels);
4029 : 0 : offsets_at = (poly_int64 (*)[NUM_ELIMINABLE_REGS])
4030 : 0 : xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (poly_int64));
4031 : :
4032 : : /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4033 : : to. If DO_SUBREGS is true, also find all paradoxical subregs and
4034 : : find largest such for each pseudo. FIRST is the head of the insn
4035 : : list. */
4036 : :
4037 : 0 : for (insn = first; insn; insn = NEXT_INSN (insn))
4038 : : {
4039 : 0 : rtx set = single_set (insn);
4040 : :
4041 : : /* We may introduce USEs that we want to remove at the end, so
4042 : : we'll mark them with QImode. Make sure there are no
4043 : : previously-marked insns left by say regmove. */
4044 : 0 : if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4045 : 0 : && GET_MODE (insn) != VOIDmode)
4046 : 0 : PUT_MODE (insn, VOIDmode);
4047 : :
4048 : 0 : if (do_subregs && NONDEBUG_INSN_P (insn))
4049 : 0 : scan_paradoxical_subregs (PATTERN (insn));
4050 : :
4051 : 0 : if (set != 0 && REG_P (SET_DEST (set)))
4052 : : {
4053 : 0 : rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4054 : 0 : rtx x;
4055 : :
4056 : 0 : if (! note)
4057 : 0 : continue;
4058 : :
4059 : 0 : i = REGNO (SET_DEST (set));
4060 : 0 : x = XEXP (note, 0);
4061 : :
4062 : 0 : if (i <= LAST_VIRTUAL_REGISTER)
4063 : 0 : continue;
4064 : :
4065 : : /* If flag_pic and we have constant, verify it's legitimate. */
4066 : 0 : if (!CONSTANT_P (x)
4067 : 0 : || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4068 : : {
4069 : : /* It can happen that a REG_EQUIV note contains a MEM
4070 : : that is not a legitimate memory operand. As later
4071 : : stages of reload assume that all addresses found
4072 : : in the reg_equiv_* arrays were originally legitimate,
4073 : : we ignore such REG_EQUIV notes. */
4074 : 0 : if (memory_operand (x, VOIDmode))
4075 : : {
4076 : : /* Always unshare the equivalence, so we can
4077 : : substitute into this insn without touching the
4078 : : equivalence. */
4079 : 0 : reg_equiv_memory_loc (i) = copy_rtx (x);
4080 : : }
4081 : 0 : else if (function_invariant_p (x))
4082 : : {
4083 : 0 : machine_mode mode;
4084 : :
4085 : 0 : mode = GET_MODE (SET_DEST (set));
4086 : 0 : if (GET_CODE (x) == PLUS)
4087 : : {
4088 : : /* This is PLUS of frame pointer and a constant,
4089 : : and might be shared. Unshare it. */
4090 : 0 : reg_equiv_invariant (i) = copy_rtx (x);
4091 : 0 : num_eliminable_invariants++;
4092 : : }
4093 : 0 : else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4094 : : {
4095 : 0 : reg_equiv_invariant (i) = x;
4096 : 0 : num_eliminable_invariants++;
4097 : : }
4098 : 0 : else if (targetm.legitimate_constant_p (mode, x))
4099 : 0 : reg_equiv_constant (i) = x;
4100 : : else
4101 : : {
4102 : 0 : reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4103 : 0 : if (! reg_equiv_memory_loc (i))
4104 : 0 : reg_equiv_init (i) = NULL;
4105 : : }
4106 : : }
4107 : : else
4108 : : {
4109 : 0 : reg_equiv_init (i) = NULL;
4110 : 0 : continue;
4111 : : }
4112 : : }
4113 : : else
4114 : 0 : reg_equiv_init (i) = NULL;
4115 : : }
4116 : : }
4117 : :
4118 : 0 : if (dump_file)
4119 : 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4120 : 0 : if (reg_equiv_init (i))
4121 : : {
4122 : 0 : fprintf (dump_file, "init_insns for %u: ", i);
4123 : 0 : print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4124 : 0 : fprintf (dump_file, "\n");
4125 : : }
4126 : 0 : }
4127 : :
4128 : : /* Indicate that we no longer have known memory locations or constants.
4129 : : Free all data involved in tracking these. */
4130 : :
4131 : : static void
4132 : 0 : free_reg_equiv (void)
4133 : : {
4134 : 0 : int i;
4135 : :
4136 : 0 : free (offsets_known_at);
4137 : 0 : free (offsets_at);
4138 : 0 : offsets_at = 0;
4139 : 0 : offsets_known_at = 0;
4140 : :
4141 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4142 : 0 : if (reg_equiv_alt_mem_list (i))
4143 : 0 : free_EXPR_LIST_list (®_equiv_alt_mem_list (i));
4144 : 0 : vec_free (reg_equivs);
4145 : 0 : }
4146 : :
4147 : : /* Kick all pseudos out of hard register REGNO.
4148 : :
4149 : : If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4150 : : because we found we can't eliminate some register. In the case, no pseudos
4151 : : are allowed to be in the register, even if they are only in a block that
4152 : : doesn't require spill registers, unlike the case when we are spilling this
4153 : : hard reg to produce another spill register.
4154 : :
4155 : : Return nonzero if any pseudos needed to be kicked out. */
4156 : :
4157 : : static void
4158 : 0 : spill_hard_reg (unsigned int regno, int cant_eliminate)
4159 : : {
4160 : 0 : int i;
4161 : :
4162 : 0 : if (cant_eliminate)
4163 : : {
4164 : 0 : SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4165 : 0 : df_set_regs_ever_live (regno, true);
4166 : : }
4167 : :
4168 : : /* Spill every pseudo reg that was allocated to this reg
4169 : : or to something that overlaps this reg. */
4170 : :
4171 : 0 : for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4172 : 0 : if (reg_renumber[i] >= 0
4173 : 0 : && (unsigned int) reg_renumber[i] <= regno
4174 : 0 : && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4175 : 0 : SET_REGNO_REG_SET (&spilled_pseudos, i);
4176 : 0 : }
4177 : :
4178 : : /* After spill_hard_reg was called and/or find_reload_regs was run for all
4179 : : insns that need reloads, this function is used to actually spill pseudo
4180 : : registers and try to reallocate them. It also sets up the spill_regs
4181 : : array for use by choose_reload_regs.
4182 : :
4183 : : GLOBAL nonzero means we should attempt to reallocate any pseudo registers
4184 : : that we displace from hard registers. */
4185 : :
4186 : : static int
4187 : 0 : finish_spills (int global)
4188 : : {
4189 : 0 : class insn_chain *chain;
4190 : 0 : int something_changed = 0;
4191 : 0 : unsigned i;
4192 : 0 : reg_set_iterator rsi;
4193 : :
4194 : : /* Build the spill_regs array for the function. */
4195 : : /* If there are some registers still to eliminate and one of the spill regs
4196 : : wasn't ever used before, additional stack space may have to be
4197 : : allocated to store this register. Thus, we may have changed the offset
4198 : : between the stack and frame pointers, so mark that something has changed.
4199 : :
4200 : : One might think that we need only set VAL to 1 if this is a call-used
4201 : : register. However, the set of registers that must be saved by the
4202 : : prologue is not identical to the call-used set. For example, the
4203 : : register used by the call insn for the return PC is a call-used register,
4204 : : but must be saved by the prologue. */
4205 : :
4206 : 0 : n_spills = 0;
4207 : 0 : for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4208 : 0 : if (TEST_HARD_REG_BIT (used_spill_regs, i))
4209 : : {
4210 : 0 : spill_reg_order[i] = n_spills;
4211 : 0 : spill_regs[n_spills++] = i;
4212 : 0 : if (num_eliminable && ! df_regs_ever_live_p (i))
4213 : : something_changed = 1;
4214 : 0 : df_set_regs_ever_live (i, true);
4215 : : }
4216 : : else
4217 : 0 : spill_reg_order[i] = -1;
4218 : :
4219 : 0 : EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4220 : 0 : if (reg_renumber[i] >= 0)
4221 : : {
4222 : 0 : SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4223 : : /* Mark it as no longer having a hard register home. */
4224 : 0 : reg_renumber[i] = -1;
4225 : 0 : if (ira_conflicts_p)
4226 : : /* Inform IRA about the change. */
4227 : 0 : ira_mark_allocation_change (i);
4228 : : /* We will need to scan everything again. */
4229 : : something_changed = 1;
4230 : : }
4231 : :
4232 : : /* Retry global register allocation if possible. */
4233 : 0 : if (global && ira_conflicts_p)
4234 : : {
4235 : 0 : unsigned int n;
4236 : :
4237 : 0 : memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4238 : : /* For every insn that needs reloads, set the registers used as spill
4239 : : regs in pseudo_forbidden_regs for every pseudo live across the
4240 : : insn. */
4241 : 0 : for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4242 : : {
4243 : 0 : EXECUTE_IF_SET_IN_REG_SET
4244 : : (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4245 : : {
4246 : 0 : pseudo_forbidden_regs[i] |= chain->used_spill_regs;
4247 : : }
4248 : 0 : EXECUTE_IF_SET_IN_REG_SET
4249 : : (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4250 : : {
4251 : 0 : pseudo_forbidden_regs[i] |= chain->used_spill_regs;
4252 : : }
4253 : : }
4254 : :
4255 : : /* Retry allocating the pseudos spilled in IRA and the
4256 : : reload. For each reg, merge the various reg sets that
4257 : : indicate which hard regs can't be used, and call
4258 : : ira_reassign_pseudos. */
4259 : 0 : for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4260 : 0 : if (reg_old_renumber[i] != reg_renumber[i])
4261 : : {
4262 : 0 : if (reg_renumber[i] < 0)
4263 : 0 : temp_pseudo_reg_arr[n++] = i;
4264 : : else
4265 : 0 : CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4266 : : }
4267 : 0 : if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4268 : : bad_spill_regs_global,
4269 : : pseudo_forbidden_regs, pseudo_previous_regs,
4270 : : &spilled_pseudos))
4271 : 0 : something_changed = 1;
4272 : : }
4273 : : /* Fix up the register information in the insn chain.
4274 : : This involves deleting those of the spilled pseudos which did not get
4275 : : a new hard register home from the live_{before,after} sets. */
4276 : 0 : for (chain = reload_insn_chain; chain; chain = chain->next)
4277 : : {
4278 : 0 : HARD_REG_SET used_by_pseudos;
4279 : 0 : HARD_REG_SET used_by_pseudos2;
4280 : :
4281 : 0 : if (! ira_conflicts_p)
4282 : : {
4283 : : /* Don't do it for IRA because IRA and the reload still can
4284 : : assign hard registers to the spilled pseudos on next
4285 : : reload iterations. */
4286 : 0 : AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4287 : 0 : AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4288 : : }
4289 : : /* Mark any unallocated hard regs as available for spills. That
4290 : : makes inheritance work somewhat better. */
4291 : 0 : if (chain->need_reload)
4292 : : {
4293 : 0 : REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4294 : 0 : REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4295 : 0 : used_by_pseudos |= used_by_pseudos2;
4296 : :
4297 : 0 : compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4298 : 0 : compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4299 : : /* Value of chain->used_spill_regs from previous iteration
4300 : : may be not included in the value calculated here because
4301 : : of possible removing caller-saves insns (see function
4302 : : delete_caller_save_insns. */
4303 : 0 : chain->used_spill_regs = ~used_by_pseudos & used_spill_regs;
4304 : : }
4305 : : }
4306 : :
4307 : 0 : CLEAR_REG_SET (&changed_allocation_pseudos);
4308 : : /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4309 : 0 : for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4310 : : {
4311 : 0 : int regno = reg_renumber[i];
4312 : 0 : if (reg_old_renumber[i] == regno)
4313 : 0 : continue;
4314 : :
4315 : 0 : SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4316 : :
4317 : 0 : alter_reg (i, reg_old_renumber[i], false);
4318 : 0 : reg_old_renumber[i] = regno;
4319 : 0 : if (dump_file)
4320 : : {
4321 : 0 : if (regno == -1)
4322 : 0 : fprintf (dump_file, " Register %d now on stack.\n\n", i);
4323 : : else
4324 : 0 : fprintf (dump_file, " Register %d now in %d.\n\n",
4325 : 0 : i, reg_renumber[i]);
4326 : : }
4327 : : }
4328 : :
4329 : 0 : return something_changed;
4330 : : }
4331 : :
4332 : : /* Find all paradoxical subregs within X and update reg_max_ref_mode. */
4333 : :
4334 : : static void
4335 : 0 : scan_paradoxical_subregs (rtx x)
4336 : : {
4337 : 0 : int i;
4338 : 0 : const char *fmt;
4339 : 0 : enum rtx_code code = GET_CODE (x);
4340 : :
4341 : 0 : switch (code)
4342 : : {
4343 : : case REG:
4344 : : case CONST:
4345 : : case SYMBOL_REF:
4346 : : case LABEL_REF:
4347 : : CASE_CONST_ANY:
4348 : : case PC:
4349 : : case USE:
4350 : : case CLOBBER:
4351 : : return;
4352 : :
4353 : 0 : case SUBREG:
4354 : 0 : if (REG_P (SUBREG_REG (x)))
4355 : : {
4356 : 0 : unsigned int regno = REGNO (SUBREG_REG (x));
4357 : 0 : if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)))
4358 : : {
4359 : 0 : reg_max_ref_mode[regno] = GET_MODE (x);
4360 : 0 : mark_home_live_1 (regno, GET_MODE (x));
4361 : : }
4362 : : }
4363 : : return;
4364 : :
4365 : 0 : default:
4366 : 0 : break;
4367 : : }
4368 : :
4369 : 0 : fmt = GET_RTX_FORMAT (code);
4370 : 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4371 : : {
4372 : 0 : if (fmt[i] == 'e')
4373 : 0 : scan_paradoxical_subregs (XEXP (x, i));
4374 : 0 : else if (fmt[i] == 'E')
4375 : : {
4376 : 0 : int j;
4377 : 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4378 : 0 : scan_paradoxical_subregs (XVECEXP (x, i, j));
4379 : : }
4380 : : }
4381 : : }
4382 : :
4383 : : /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4384 : : If *OP_PTR is a paradoxical subreg, try to remove that subreg
4385 : : and apply the corresponding narrowing subreg to *OTHER_PTR.
4386 : : Return true if the operands were changed, false otherwise. */
4387 : :
4388 : : static bool
4389 : 0 : strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4390 : : {
4391 : 0 : rtx op, inner, other, tem;
4392 : :
4393 : 0 : op = *op_ptr;
4394 : 0 : if (!paradoxical_subreg_p (op))
4395 : : return false;
4396 : 0 : inner = SUBREG_REG (op);
4397 : :
4398 : 0 : other = *other_ptr;
4399 : 0 : tem = gen_lowpart_common (GET_MODE (inner), other);
4400 : 0 : if (!tem)
4401 : : return false;
4402 : :
4403 : : /* If the lowpart operation turned a hard register into a subreg,
4404 : : rather than simplifying it to another hard register, then the
4405 : : mode change cannot be properly represented. For example, OTHER
4406 : : might be valid in its current mode, but not in the new one. */
4407 : 0 : if (GET_CODE (tem) == SUBREG
4408 : 0 : && REG_P (other)
4409 : 0 : && HARD_REGISTER_P (other))
4410 : : return false;
4411 : :
4412 : 0 : *op_ptr = inner;
4413 : 0 : *other_ptr = tem;
4414 : 0 : return true;
4415 : : }
4416 : :
4417 : : /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4418 : : examine all of the reload insns between PREV and NEXT exclusive, and
4419 : : annotate all that may trap. */
4420 : :
4421 : : static void
4422 : 0 : fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4423 : : {
4424 : 0 : rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4425 : 0 : if (note == NULL)
4426 : : return;
4427 : 0 : if (!insn_could_throw_p (insn))
4428 : 0 : remove_note (insn, note);
4429 : 0 : copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4430 : : }
4431 : :
4432 : : /* Reload pseudo-registers into hard regs around each insn as needed.
4433 : : Additional register load insns are output before the insn that needs it
4434 : : and perhaps store insns after insns that modify the reloaded pseudo reg.
4435 : :
4436 : : reg_last_reload_reg and reg_reloaded_contents keep track of
4437 : : which registers are already available in reload registers.
4438 : : We update these for the reloads that we perform,
4439 : : as the insns are scanned. */
4440 : :
4441 : : static void
4442 : 0 : reload_as_needed (int live_known)
4443 : : {
4444 : 0 : class insn_chain *chain;
4445 : : #if AUTO_INC_DEC
4446 : : int i;
4447 : : #endif
4448 : 0 : rtx_note *marker;
4449 : :
4450 : 0 : memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4451 : 0 : memset (spill_reg_store, 0, sizeof spill_reg_store);
4452 : 0 : reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4453 : 0 : INIT_REG_SET (®_has_output_reload);
4454 : 0 : CLEAR_HARD_REG_SET (reg_reloaded_valid);
4455 : :
4456 : 0 : set_initial_elim_offsets ();
4457 : :
4458 : : /* Generate a marker insn that we will move around. */
4459 : 0 : marker = emit_note (NOTE_INSN_DELETED);
4460 : 0 : unlink_insn_chain (marker, marker);
4461 : :
4462 : 0 : for (chain = reload_insn_chain; chain; chain = chain->next)
4463 : : {
4464 : 0 : rtx_insn *prev = 0;
4465 : 0 : rtx_insn *insn = chain->insn;
4466 : 0 : rtx_insn *old_next = NEXT_INSN (insn);
4467 : : #if AUTO_INC_DEC
4468 : : rtx_insn *old_prev = PREV_INSN (insn);
4469 : : #endif
4470 : :
4471 : 0 : if (will_delete_init_insn_p (insn))
4472 : 0 : continue;
4473 : :
4474 : : /* If we pass a label, copy the offsets from the label information
4475 : : into the current offsets of each elimination. */
4476 : 0 : if (LABEL_P (insn))
4477 : 0 : set_offsets_for_label (insn);
4478 : :
4479 : 0 : else if (INSN_P (insn))
4480 : : {
4481 : 0 : regset_head regs_to_forget;
4482 : 0 : INIT_REG_SET (®s_to_forget);
4483 : 0 : note_stores (insn, forget_old_reloads_1, ®s_to_forget);
4484 : :
4485 : : /* If this is a USE and CLOBBER of a MEM, ensure that any
4486 : : references to eliminable registers have been removed. */
4487 : :
4488 : 0 : if ((GET_CODE (PATTERN (insn)) == USE
4489 : 0 : || GET_CODE (PATTERN (insn)) == CLOBBER)
4490 : 0 : && MEM_P (XEXP (PATTERN (insn), 0)))
4491 : 0 : XEXP (XEXP (PATTERN (insn), 0), 0)
4492 : 0 : = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4493 : 0 : GET_MODE (XEXP (PATTERN (insn), 0)),
4494 : : NULL_RTX);
4495 : :
4496 : : /* If we need to do register elimination processing, do so.
4497 : : This might delete the insn, in which case we are done. */
4498 : 0 : if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4499 : : {
4500 : 0 : eliminate_regs_in_insn (insn, 1);
4501 : 0 : if (NOTE_P (insn))
4502 : : {
4503 : 0 : update_eliminable_offsets ();
4504 : 0 : CLEAR_REG_SET (®s_to_forget);
4505 : 0 : continue;
4506 : : }
4507 : : }
4508 : :
4509 : : /* If need_elim is nonzero but need_reload is zero, one might think
4510 : : that we could simply set n_reloads to 0. However, find_reloads
4511 : : could have done some manipulation of the insn (such as swapping
4512 : : commutative operands), and these manipulations are lost during
4513 : : the first pass for every insn that needs register elimination.
4514 : : So the actions of find_reloads must be redone here. */
4515 : :
4516 : 0 : if (! chain->need_elim && ! chain->need_reload
4517 : 0 : && ! chain->need_operand_change)
4518 : 0 : n_reloads = 0;
4519 : : /* First find the pseudo regs that must be reloaded for this insn.
4520 : : This info is returned in the tables reload_... (see reload.h).
4521 : : Also modify the body of INSN by substituting RELOAD
4522 : : rtx's for those pseudo regs. */
4523 : : else
4524 : : {
4525 : 0 : CLEAR_REG_SET (®_has_output_reload);
4526 : 0 : CLEAR_HARD_REG_SET (reg_is_output_reload);
4527 : :
4528 : 0 : find_reloads (insn, 1, spill_indirect_levels, live_known,
4529 : : spill_reg_order);
4530 : : }
4531 : :
4532 : 0 : if (n_reloads > 0)
4533 : : {
4534 : 0 : rtx_insn *next = NEXT_INSN (insn);
4535 : :
4536 : : /* ??? PREV can get deleted by reload inheritance.
4537 : : Work around this by emitting a marker note. */
4538 : 0 : prev = PREV_INSN (insn);
4539 : 0 : reorder_insns_nobb (marker, marker, prev);
4540 : :
4541 : : /* Now compute which reload regs to reload them into. Perhaps
4542 : : reusing reload regs from previous insns, or else output
4543 : : load insns to reload them. Maybe output store insns too.
4544 : : Record the choices of reload reg in reload_reg_rtx. */
4545 : 0 : choose_reload_regs (chain);
4546 : :
4547 : : /* Generate the insns to reload operands into or out of
4548 : : their reload regs. */
4549 : 0 : emit_reload_insns (chain);
4550 : :
4551 : : /* Substitute the chosen reload regs from reload_reg_rtx
4552 : : into the insn's body (or perhaps into the bodies of other
4553 : : load and store insn that we just made for reloading
4554 : : and that we moved the structure into). */
4555 : 0 : subst_reloads (insn);
4556 : :
4557 : 0 : prev = PREV_INSN (marker);
4558 : 0 : unlink_insn_chain (marker, marker);
4559 : :
4560 : : /* Adjust the exception region notes for loads and stores. */
4561 : 0 : if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4562 : 0 : fixup_eh_region_note (insn, prev, next);
4563 : :
4564 : : /* Adjust the location of REG_ARGS_SIZE. */
4565 : 0 : rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4566 : 0 : if (p)
4567 : : {
4568 : 0 : remove_note (insn, p);
4569 : 0 : fixup_args_size_notes (prev, PREV_INSN (next),
4570 : : get_args_size (p));
4571 : : }
4572 : :
4573 : : /* If this was an ASM, make sure that all the reload insns
4574 : : we have generated are valid. If not, give an error
4575 : : and delete them. */
4576 : 0 : if (asm_noperands (PATTERN (insn)) >= 0)
4577 : 0 : for (rtx_insn *p = NEXT_INSN (prev);
4578 : 0 : p != next;
4579 : 0 : p = NEXT_INSN (p))
4580 : 0 : if (p != insn && INSN_P (p)
4581 : 0 : && GET_CODE (PATTERN (p)) != USE
4582 : 0 : && (recog_memoized (p) < 0
4583 : 0 : || (extract_insn (p),
4584 : 0 : !(constrain_operands (1,
4585 : : get_enabled_alternatives (p))))))
4586 : : {
4587 : 0 : error_for_asm (insn,
4588 : : "%<asm%> operand requires "
4589 : : "impossible reload");
4590 : 0 : delete_insn (p);
4591 : : }
4592 : : }
4593 : :
4594 : 0 : if (num_eliminable && chain->need_elim)
4595 : 0 : update_eliminable_offsets ();
4596 : :
4597 : : /* Any previously reloaded spilled pseudo reg, stored in this insn,
4598 : : is no longer validly lying around to save a future reload.
4599 : : Note that this does not detect pseudos that were reloaded
4600 : : for this insn in order to be stored in
4601 : : (obeying register constraints). That is correct; such reload
4602 : : registers ARE still valid. */
4603 : 0 : forget_marked_reloads (®s_to_forget);
4604 : 0 : CLEAR_REG_SET (®s_to_forget);
4605 : :
4606 : : /* There may have been CLOBBER insns placed after INSN. So scan
4607 : : between INSN and NEXT and use them to forget old reloads. */
4608 : 0 : for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4609 : 0 : if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4610 : 0 : note_stores (x, forget_old_reloads_1, NULL);
4611 : :
4612 : : #if AUTO_INC_DEC
4613 : : /* Likewise for regs altered by auto-increment in this insn.
4614 : : REG_INC notes have been changed by reloading:
4615 : : find_reloads_address_1 records substitutions for them,
4616 : : which have been performed by subst_reloads above. */
4617 : : for (i = n_reloads - 1; i >= 0; i--)
4618 : : {
4619 : : rtx in_reg = rld[i].in_reg;
4620 : : if (in_reg)
4621 : : {
4622 : : enum rtx_code code = GET_CODE (in_reg);
4623 : : /* PRE_INC / PRE_DEC will have the reload register ending up
4624 : : with the same value as the stack slot, but that doesn't
4625 : : hold true for POST_INC / POST_DEC. Either we have to
4626 : : convert the memory access to a true POST_INC / POST_DEC,
4627 : : or we can't use the reload register for inheritance. */
4628 : : if ((code == POST_INC || code == POST_DEC)
4629 : : && TEST_HARD_REG_BIT (reg_reloaded_valid,
4630 : : REGNO (rld[i].reg_rtx))
4631 : : /* Make sure it is the inc/dec pseudo, and not
4632 : : some other (e.g. output operand) pseudo. */
4633 : : && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4634 : : == REGNO (XEXP (in_reg, 0))))
4635 : :
4636 : : {
4637 : : rtx reload_reg = rld[i].reg_rtx;
4638 : : machine_mode mode = GET_MODE (reload_reg);
4639 : : int n = 0;
4640 : : rtx_insn *p;
4641 : :
4642 : : for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4643 : : {
4644 : : /* We really want to ignore REG_INC notes here, so
4645 : : use PATTERN (p) as argument to reg_set_p . */
4646 : : if (reg_set_p (reload_reg, PATTERN (p)))
4647 : : break;
4648 : : n = count_occurrences (PATTERN (p), reload_reg, 0);
4649 : : if (! n)
4650 : : continue;
4651 : : if (n == 1)
4652 : : {
4653 : : rtx replace_reg
4654 : : = gen_rtx_fmt_e (code, mode, reload_reg);
4655 : :
4656 : : validate_replace_rtx_group (reload_reg,
4657 : : replace_reg, p);
4658 : : n = verify_changes (0);
4659 : :
4660 : : /* We must also verify that the constraints
4661 : : are met after the replacement. Make sure
4662 : : extract_insn is only called for an insn
4663 : : where the replacements were found to be
4664 : : valid so far. */
4665 : : if (n)
4666 : : {
4667 : : extract_insn (p);
4668 : : n = constrain_operands (1,
4669 : : get_enabled_alternatives (p));
4670 : : }
4671 : :
4672 : : /* If the constraints were not met, then
4673 : : undo the replacement, else confirm it. */
4674 : : if (!n)
4675 : : cancel_changes (0);
4676 : : else
4677 : : confirm_change_group ();
4678 : : }
4679 : : break;
4680 : : }
4681 : : if (n == 1)
4682 : : {
4683 : : add_reg_note (p, REG_INC, reload_reg);
4684 : : /* Mark this as having an output reload so that the
4685 : : REG_INC processing code below won't invalidate
4686 : : the reload for inheritance. */
4687 : : SET_HARD_REG_BIT (reg_is_output_reload,
4688 : : REGNO (reload_reg));
4689 : : SET_REGNO_REG_SET (®_has_output_reload,
4690 : : REGNO (XEXP (in_reg, 0)));
4691 : : }
4692 : : else
4693 : : forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4694 : : NULL);
4695 : : }
4696 : : else if ((code == PRE_INC || code == PRE_DEC)
4697 : : && TEST_HARD_REG_BIT (reg_reloaded_valid,
4698 : : REGNO (rld[i].reg_rtx))
4699 : : /* Make sure it is the inc/dec pseudo, and not
4700 : : some other (e.g. output operand) pseudo. */
4701 : : && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4702 : : == REGNO (XEXP (in_reg, 0))))
4703 : : {
4704 : : SET_HARD_REG_BIT (reg_is_output_reload,
4705 : : REGNO (rld[i].reg_rtx));
4706 : : SET_REGNO_REG_SET (®_has_output_reload,
4707 : : REGNO (XEXP (in_reg, 0)));
4708 : : }
4709 : : else if (code == PRE_INC || code == PRE_DEC
4710 : : || code == POST_INC || code == POST_DEC)
4711 : : {
4712 : : int in_regno = REGNO (XEXP (in_reg, 0));
4713 : :
4714 : : if (reg_last_reload_reg[in_regno] != NULL_RTX)
4715 : : {
4716 : : int in_hard_regno;
4717 : : bool forget_p = true;
4718 : :
4719 : : in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4720 : : if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4721 : : in_hard_regno))
4722 : : {
4723 : : for (rtx_insn *x = (old_prev ?
4724 : : NEXT_INSN (old_prev) : insn);
4725 : : x != old_next;
4726 : : x = NEXT_INSN (x))
4727 : : if (x == reg_reloaded_insn[in_hard_regno])
4728 : : {
4729 : : forget_p = false;
4730 : : break;
4731 : : }
4732 : : }
4733 : : /* If for some reasons, we didn't set up
4734 : : reg_last_reload_reg in this insn,
4735 : : invalidate inheritance from previous
4736 : : insns for the incremented/decremented
4737 : : register. Such registers will be not in
4738 : : reg_has_output_reload. Invalidate it
4739 : : also if the corresponding element in
4740 : : reg_reloaded_insn is also
4741 : : invalidated. */
4742 : : if (forget_p)
4743 : : forget_old_reloads_1 (XEXP (in_reg, 0),
4744 : : NULL_RTX, NULL);
4745 : : }
4746 : : }
4747 : : }
4748 : : }
4749 : : /* If a pseudo that got a hard register is auto-incremented,
4750 : : we must purge records of copying it into pseudos without
4751 : : hard registers. */
4752 : : for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4753 : : if (REG_NOTE_KIND (x) == REG_INC)
4754 : : {
4755 : : /* See if this pseudo reg was reloaded in this insn.
4756 : : If so, its last-reload info is still valid
4757 : : because it is based on this insn's reload. */
4758 : : for (i = 0; i < n_reloads; i++)
4759 : : if (rld[i].out == XEXP (x, 0))
4760 : : break;
4761 : :
4762 : : if (i == n_reloads)
4763 : : forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4764 : : }
4765 : : #endif
4766 : : }
4767 : : /* A reload reg's contents are unknown after a label. */
4768 : 0 : if (LABEL_P (insn))
4769 : 0 : CLEAR_HARD_REG_SET (reg_reloaded_valid);
4770 : :
4771 : : /* Don't assume a reload reg is still good after a call insn
4772 : : if it is a call-used reg, or if it contains a value that will
4773 : : be partially clobbered by the call. */
4774 : 0 : else if (CALL_P (insn))
4775 : : {
4776 : 0 : reg_reloaded_valid
4777 : 0 : &= ~insn_callee_abi (insn).full_and_partial_reg_clobbers ();
4778 : :
4779 : : /* If this is a call to a setjmp-type function, we must not
4780 : : reuse any reload reg contents across the call; that will
4781 : : just be clobbered by other uses of the register in later
4782 : : code, before the longjmp. */
4783 : 0 : if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4784 : 0 : CLEAR_HARD_REG_SET (reg_reloaded_valid);
4785 : : }
4786 : : }
4787 : :
4788 : : /* Clean up. */
4789 : 0 : free (reg_last_reload_reg);
4790 : 0 : CLEAR_REG_SET (®_has_output_reload);
4791 : 0 : }
4792 : :
4793 : : /* Discard all record of any value reloaded from X,
4794 : : or reloaded in X from someplace else;
4795 : : unless X is an output reload reg of the current insn.
4796 : :
4797 : : X may be a hard reg (the reload reg)
4798 : : or it may be a pseudo reg that was reloaded from.
4799 : :
4800 : : When DATA is non-NULL just mark the registers in regset
4801 : : to be forgotten later. */
4802 : :
4803 : : static void
4804 : 0 : forget_old_reloads_1 (rtx x, const_rtx, void *data)
4805 : : {
4806 : 0 : unsigned int regno;
4807 : 0 : unsigned int nr;
4808 : 0 : regset regs = (regset) data;
4809 : :
4810 : : /* note_stores does give us subregs of hard regs,
4811 : : subreg_regno_offset requires a hard reg. */
4812 : 0 : while (GET_CODE (x) == SUBREG)
4813 : : {
4814 : : /* We ignore the subreg offset when calculating the regno,
4815 : : because we are using the entire underlying hard register
4816 : : below. */
4817 : 0 : x = SUBREG_REG (x);
4818 : : }
4819 : :
4820 : 0 : if (!REG_P (x))
4821 : : return;
4822 : :
4823 : 0 : regno = REGNO (x);
4824 : :
4825 : 0 : if (regno >= FIRST_PSEUDO_REGISTER)
4826 : : nr = 1;
4827 : : else
4828 : : {
4829 : 0 : unsigned int i;
4830 : :
4831 : 0 : nr = REG_NREGS (x);
4832 : : /* Storing into a spilled-reg invalidates its contents.
4833 : : This can happen if a block-local pseudo is allocated to that reg
4834 : : and it wasn't spilled because this block's total need is 0.
4835 : : Then some insn might have an optional reload and use this reg. */
4836 : 0 : if (!regs)
4837 : 0 : for (i = 0; i < nr; i++)
4838 : : /* But don't do this if the reg actually serves as an output
4839 : : reload reg in the current instruction. */
4840 : 0 : if (n_reloads == 0
4841 : 0 : || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4842 : : {
4843 : 0 : CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4844 : 0 : spill_reg_store[regno + i] = 0;
4845 : : }
4846 : : }
4847 : :
4848 : 0 : if (regs)
4849 : 0 : while (nr-- > 0)
4850 : 0 : SET_REGNO_REG_SET (regs, regno + nr);
4851 : : else
4852 : : {
4853 : : /* Since value of X has changed,
4854 : : forget any value previously copied from it. */
4855 : :
4856 : 0 : while (nr-- > 0)
4857 : : /* But don't forget a copy if this is the output reload
4858 : : that establishes the copy's validity. */
4859 : 0 : if (n_reloads == 0
4860 : 0 : || !REGNO_REG_SET_P (®_has_output_reload, regno + nr))
4861 : 0 : reg_last_reload_reg[regno + nr] = 0;
4862 : : }
4863 : : }
4864 : :
4865 : : /* Forget the reloads marked in regset by previous function. */
4866 : : static void
4867 : 0 : forget_marked_reloads (regset regs)
4868 : : {
4869 : 0 : unsigned int reg;
4870 : 0 : reg_set_iterator rsi;
4871 : 0 : EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
4872 : : {
4873 : 0 : if (reg < FIRST_PSEUDO_REGISTER
4874 : : /* But don't do this if the reg actually serves as an output
4875 : : reload reg in the current instruction. */
4876 : 0 : && (n_reloads == 0
4877 : 0 : || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
4878 : : {
4879 : 0 : CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
4880 : 0 : spill_reg_store[reg] = 0;
4881 : : }
4882 : 0 : if (n_reloads == 0
4883 : 0 : || !REGNO_REG_SET_P (®_has_output_reload, reg))
4884 : 0 : reg_last_reload_reg[reg] = 0;
4885 : : }
4886 : 0 : }
4887 : :
4888 : : /* The following HARD_REG_SETs indicate when each hard register is
4889 : : used for a reload of various parts of the current insn. */
4890 : :
4891 : : /* If reg is unavailable for all reloads. */
4892 : : static HARD_REG_SET reload_reg_unavailable;
4893 : : /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4894 : : static HARD_REG_SET reload_reg_used;
4895 : : /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4896 : : static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4897 : : /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4898 : : static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4899 : : /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4900 : : static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4901 : : /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4902 : : static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4903 : : /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4904 : : static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4905 : : /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4906 : : static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4907 : : /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4908 : : static HARD_REG_SET reload_reg_used_in_op_addr;
4909 : : /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4910 : : static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4911 : : /* If reg is in use for a RELOAD_FOR_INSN reload. */
4912 : : static HARD_REG_SET reload_reg_used_in_insn;
4913 : : /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4914 : : static HARD_REG_SET reload_reg_used_in_other_addr;
4915 : :
4916 : : /* If reg is in use as a reload reg for any sort of reload. */
4917 : : static HARD_REG_SET reload_reg_used_at_all;
4918 : :
4919 : : /* If reg is use as an inherited reload. We just mark the first register
4920 : : in the group. */
4921 : : static HARD_REG_SET reload_reg_used_for_inherit;
4922 : :
4923 : : /* Records which hard regs are used in any way, either as explicit use or
4924 : : by being allocated to a pseudo during any point of the current insn. */
4925 : : static HARD_REG_SET reg_used_in_insn;
4926 : :
4927 : : /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4928 : : TYPE. MODE is used to indicate how many consecutive regs are
4929 : : actually used. */
4930 : :
4931 : : static void
4932 : 0 : mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4933 : : machine_mode mode)
4934 : : {
4935 : 0 : switch (type)
4936 : : {
4937 : 0 : case RELOAD_OTHER:
4938 : 0 : add_to_hard_reg_set (&reload_reg_used, mode, regno);
4939 : 0 : break;
4940 : :
4941 : 0 : case RELOAD_FOR_INPUT_ADDRESS:
4942 : 0 : add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
4943 : 0 : break;
4944 : :
4945 : 0 : case RELOAD_FOR_INPADDR_ADDRESS:
4946 : 0 : add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
4947 : 0 : break;
4948 : :
4949 : 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
4950 : 0 : add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
4951 : 0 : break;
4952 : :
4953 : 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
4954 : 0 : add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
4955 : 0 : break;
4956 : :
4957 : 0 : case RELOAD_FOR_OPERAND_ADDRESS:
4958 : 0 : add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
4959 : 0 : break;
4960 : :
4961 : 0 : case RELOAD_FOR_OPADDR_ADDR:
4962 : 0 : add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
4963 : 0 : break;
4964 : :
4965 : 0 : case RELOAD_FOR_OTHER_ADDRESS:
4966 : 0 : add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
4967 : 0 : break;
4968 : :
4969 : 0 : case RELOAD_FOR_INPUT:
4970 : 0 : add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
4971 : 0 : break;
4972 : :
4973 : 0 : case RELOAD_FOR_OUTPUT:
4974 : 0 : add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
4975 : 0 : break;
4976 : :
4977 : 0 : case RELOAD_FOR_INSN:
4978 : 0 : add_to_hard_reg_set (&reload_reg_used_in_insn, mode, regno);
4979 : 0 : break;
4980 : : }
4981 : :
4982 : 0 : add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
4983 : 0 : }
4984 : :
4985 : : /* Similarly, but show REGNO is no longer in use for a reload. */
4986 : :
4987 : : static void
4988 : 0 : clear_reload_reg_in_use (unsigned int regno, int opnum,
4989 : : enum reload_type type, machine_mode mode)
4990 : : {
4991 : 0 : unsigned int nregs = hard_regno_nregs (regno, mode);
4992 : 0 : unsigned int start_regno, end_regno, r;
4993 : 0 : int i;
4994 : : /* A complication is that for some reload types, inheritance might
4995 : : allow multiple reloads of the same types to share a reload register.
4996 : : We set check_opnum if we have to check only reloads with the same
4997 : : operand number, and check_any if we have to check all reloads. */
4998 : 0 : int check_opnum = 0;
4999 : 0 : int check_any = 0;
5000 : 0 : HARD_REG_SET *used_in_set;
5001 : :
5002 : 0 : switch (type)
5003 : : {
5004 : : case RELOAD_OTHER:
5005 : : used_in_set = &reload_reg_used;
5006 : : break;
5007 : :
5008 : 0 : case RELOAD_FOR_INPUT_ADDRESS:
5009 : 0 : used_in_set = &reload_reg_used_in_input_addr[opnum];
5010 : 0 : break;
5011 : :
5012 : 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5013 : 0 : check_opnum = 1;
5014 : 0 : used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5015 : 0 : break;
5016 : :
5017 : 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5018 : 0 : used_in_set = &reload_reg_used_in_output_addr[opnum];
5019 : 0 : break;
5020 : :
5021 : 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5022 : 0 : check_opnum = 1;
5023 : 0 : used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5024 : 0 : break;
5025 : :
5026 : 0 : case RELOAD_FOR_OPERAND_ADDRESS:
5027 : 0 : used_in_set = &reload_reg_used_in_op_addr;
5028 : 0 : break;
5029 : :
5030 : 0 : case RELOAD_FOR_OPADDR_ADDR:
5031 : 0 : check_any = 1;
5032 : 0 : used_in_set = &reload_reg_used_in_op_addr_reload;
5033 : 0 : break;
5034 : :
5035 : 0 : case RELOAD_FOR_OTHER_ADDRESS:
5036 : 0 : used_in_set = &reload_reg_used_in_other_addr;
5037 : 0 : check_any = 1;
5038 : 0 : break;
5039 : :
5040 : 0 : case RELOAD_FOR_INPUT:
5041 : 0 : used_in_set = &reload_reg_used_in_input[opnum];
5042 : 0 : break;
5043 : :
5044 : 0 : case RELOAD_FOR_OUTPUT:
5045 : 0 : used_in_set = &reload_reg_used_in_output[opnum];
5046 : 0 : break;
5047 : :
5048 : 0 : case RELOAD_FOR_INSN:
5049 : 0 : used_in_set = &reload_reg_used_in_insn;
5050 : 0 : break;
5051 : 0 : default:
5052 : 0 : gcc_unreachable ();
5053 : : }
5054 : : /* We resolve conflicts with remaining reloads of the same type by
5055 : : excluding the intervals of reload registers by them from the
5056 : : interval of freed reload registers. Since we only keep track of
5057 : : one set of interval bounds, we might have to exclude somewhat
5058 : : more than what would be necessary if we used a HARD_REG_SET here.
5059 : : But this should only happen very infrequently, so there should
5060 : : be no reason to worry about it. */
5061 : :
5062 : 0 : start_regno = regno;
5063 : 0 : end_regno = regno + nregs;
5064 : 0 : if (check_opnum || check_any)
5065 : : {
5066 : 0 : for (i = n_reloads - 1; i >= 0; i--)
5067 : : {
5068 : 0 : if (rld[i].when_needed == type
5069 : 0 : && (check_any || rld[i].opnum == opnum)
5070 : 0 : && rld[i].reg_rtx)
5071 : : {
5072 : 0 : unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5073 : 0 : unsigned int conflict_end
5074 : 0 : = end_hard_regno (rld[i].mode, conflict_start);
5075 : :
5076 : : /* If there is an overlap with the first to-be-freed register,
5077 : : adjust the interval start. */
5078 : 0 : if (conflict_start <= start_regno && conflict_end > start_regno)
5079 : 0 : start_regno = conflict_end;
5080 : : /* Otherwise, if there is a conflict with one of the other
5081 : : to-be-freed registers, adjust the interval end. */
5082 : 0 : if (conflict_start > start_regno && conflict_start < end_regno)
5083 : 0 : end_regno = conflict_start;
5084 : : }
5085 : : }
5086 : : }
5087 : :
5088 : 0 : for (r = start_regno; r < end_regno; r++)
5089 : 0 : CLEAR_HARD_REG_BIT (*used_in_set, r);
5090 : 0 : }
5091 : :
5092 : : /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5093 : : specified by OPNUM and TYPE. */
5094 : :
5095 : : static int
5096 : 0 : reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5097 : : {
5098 : 0 : int i;
5099 : :
5100 : : /* In use for a RELOAD_OTHER means it's not available for anything. */
5101 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5102 : 0 : || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5103 : : return 0;
5104 : :
5105 : 0 : switch (type)
5106 : : {
5107 : 0 : case RELOAD_OTHER:
5108 : : /* In use for anything means we can't use it for RELOAD_OTHER. */
5109 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5110 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5111 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5112 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5113 : : return 0;
5114 : :
5115 : 0 : for (i = 0; i < reload_n_operands; i++)
5116 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5117 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5118 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5119 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5120 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5121 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5122 : : return 0;
5123 : :
5124 : : return 1;
5125 : :
5126 : 0 : case RELOAD_FOR_INPUT:
5127 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5128 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5129 : : return 0;
5130 : :
5131 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5132 : : return 0;
5133 : :
5134 : : /* If it is used for some other input, can't use it. */
5135 : 0 : for (i = 0; i < reload_n_operands; i++)
5136 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5137 : : return 0;
5138 : :
5139 : : /* If it is used in a later operand's address, can't use it. */
5140 : 0 : for (i = opnum + 1; i < reload_n_operands; i++)
5141 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5142 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5143 : : return 0;
5144 : :
5145 : : return 1;
5146 : :
5147 : 0 : case RELOAD_FOR_INPUT_ADDRESS:
5148 : : /* Can't use a register if it is used for an input address for this
5149 : : operand or used as an input in an earlier one. */
5150 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5151 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5152 : : return 0;
5153 : :
5154 : 0 : for (i = 0; i < opnum; i++)
5155 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5156 : : return 0;
5157 : :
5158 : : return 1;
5159 : :
5160 : 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5161 : : /* Can't use a register if it is used for an input address
5162 : : for this operand or used as an input in an earlier
5163 : : one. */
5164 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5165 : : return 0;
5166 : :
5167 : 0 : for (i = 0; i < opnum; i++)
5168 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5169 : : return 0;
5170 : :
5171 : : return 1;
5172 : :
5173 : 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5174 : : /* Can't use a register if it is used for an output address for this
5175 : : operand or used as an output in this or a later operand. Note
5176 : : that multiple output operands are emitted in reverse order, so
5177 : : the conflicting ones are those with lower indices. */
5178 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5179 : : return 0;
5180 : :
5181 : 0 : for (i = 0; i <= opnum; i++)
5182 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5183 : : return 0;
5184 : :
5185 : : return 1;
5186 : :
5187 : 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5188 : : /* Can't use a register if it is used for an output address
5189 : : for this operand or used as an output in this or a
5190 : : later operand. Note that multiple output operands are
5191 : : emitted in reverse order, so the conflicting ones are
5192 : : those with lower indices. */
5193 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5194 : : return 0;
5195 : :
5196 : 0 : for (i = 0; i <= opnum; i++)
5197 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5198 : : return 0;
5199 : :
5200 : : return 1;
5201 : :
5202 : : case RELOAD_FOR_OPERAND_ADDRESS:
5203 : 0 : for (i = 0; i < reload_n_operands; i++)
5204 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5205 : : return 0;
5206 : :
5207 : 0 : return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5208 : 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5209 : :
5210 : : case RELOAD_FOR_OPADDR_ADDR:
5211 : 0 : for (i = 0; i < reload_n_operands; i++)
5212 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5213 : : return 0;
5214 : :
5215 : 0 : return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5216 : :
5217 : 0 : case RELOAD_FOR_OUTPUT:
5218 : : /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5219 : : outputs, or an operand address for this or an earlier output.
5220 : : Note that multiple output operands are emitted in reverse order,
5221 : : so the conflicting ones are those with higher indices. */
5222 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5223 : : return 0;
5224 : :
5225 : 0 : for (i = 0; i < reload_n_operands; i++)
5226 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5227 : : return 0;
5228 : :
5229 : 0 : for (i = opnum; i < reload_n_operands; i++)
5230 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5231 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5232 : : return 0;
5233 : :
5234 : : return 1;
5235 : :
5236 : : case RELOAD_FOR_INSN:
5237 : 0 : for (i = 0; i < reload_n_operands; i++)
5238 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5239 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5240 : : return 0;
5241 : :
5242 : 0 : return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5243 : 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5244 : :
5245 : 0 : case RELOAD_FOR_OTHER_ADDRESS:
5246 : 0 : return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5247 : :
5248 : 0 : default:
5249 : 0 : gcc_unreachable ();
5250 : : }
5251 : : }
5252 : :
5253 : : /* Return 1 if the value in reload reg REGNO, as used by the reload with
5254 : : the number RELOADNUM, is still available in REGNO at the end of the insn.
5255 : :
5256 : : We can assume that the reload reg was already tested for availability
5257 : : at the time it is needed, and we should not check this again,
5258 : : in case the reg has already been marked in use. */
5259 : :
5260 : : static int
5261 : 0 : reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5262 : : {
5263 : 0 : int opnum = rld[reloadnum].opnum;
5264 : 0 : enum reload_type type = rld[reloadnum].when_needed;
5265 : 0 : int i;
5266 : :
5267 : : /* See if there is a reload with the same type for this operand, using
5268 : : the same register. This case is not handled by the code below. */
5269 : 0 : for (i = reloadnum + 1; i < n_reloads; i++)
5270 : : {
5271 : 0 : rtx reg;
5272 : :
5273 : 0 : if (rld[i].opnum != opnum || rld[i].when_needed != type)
5274 : 0 : continue;
5275 : 0 : reg = rld[i].reg_rtx;
5276 : 0 : if (reg == NULL_RTX)
5277 : 0 : continue;
5278 : 0 : if (regno >= REGNO (reg) && regno < END_REGNO (reg))
5279 : : return 0;
5280 : : }
5281 : :
5282 : 0 : switch (type)
5283 : : {
5284 : : case RELOAD_OTHER:
5285 : : /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5286 : : its value must reach the end. */
5287 : : return 1;
5288 : :
5289 : : /* If this use is for part of the insn,
5290 : : its value reaches if no subsequent part uses the same register.
5291 : : Just like the above function, don't try to do this with lots
5292 : : of fallthroughs. */
5293 : :
5294 : : case RELOAD_FOR_OTHER_ADDRESS:
5295 : : /* Here we check for everything else, since these don't conflict
5296 : : with anything else and everything comes later. */
5297 : :
5298 : 0 : for (i = 0; i < reload_n_operands; i++)
5299 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5300 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5301 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5302 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5303 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5304 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5305 : : return 0;
5306 : :
5307 : 0 : return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5308 : 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5309 : 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5310 : 0 : && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5311 : :
5312 : : case RELOAD_FOR_INPUT_ADDRESS:
5313 : : case RELOAD_FOR_INPADDR_ADDRESS:
5314 : : /* Similar, except that we check only for this and subsequent inputs
5315 : : and the address of only subsequent inputs and we do not need
5316 : : to check for RELOAD_OTHER objects since they are known not to
5317 : : conflict. */
5318 : :
5319 : 0 : for (i = opnum; i < reload_n_operands; i++)
5320 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5321 : : return 0;
5322 : :
5323 : : /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5324 : : could be killed if the register is also used by reload with type
5325 : : RELOAD_FOR_INPUT_ADDRESS, so check it. */
5326 : 0 : if (type == RELOAD_FOR_INPADDR_ADDRESS
5327 : 0 : && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5328 : : return 0;
5329 : :
5330 : 0 : for (i = opnum + 1; i < reload_n_operands; i++)
5331 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5332 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5333 : : return 0;
5334 : :
5335 : 0 : for (i = 0; i < reload_n_operands; i++)
5336 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5337 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5338 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5339 : : return 0;
5340 : :
5341 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5342 : : return 0;
5343 : :
5344 : 0 : return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5345 : 0 : && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5346 : 0 : && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5347 : :
5348 : 0 : case RELOAD_FOR_INPUT:
5349 : : /* Similar to input address, except we start at the next operand for
5350 : : both input and input address and we do not check for
5351 : : RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5352 : : would conflict. */
5353 : :
5354 : 0 : for (i = opnum + 1; i < reload_n_operands; i++)
5355 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5356 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5357 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5358 : : return 0;
5359 : :
5360 : : /* ... fall through ... */
5361 : :
5362 : : case RELOAD_FOR_OPERAND_ADDRESS:
5363 : : /* Check outputs and their addresses. */
5364 : :
5365 : 0 : for (i = 0; i < reload_n_operands; i++)
5366 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5367 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5368 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5369 : : return 0;
5370 : :
5371 : 0 : return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5372 : :
5373 : : case RELOAD_FOR_OPADDR_ADDR:
5374 : 0 : for (i = 0; i < reload_n_operands; i++)
5375 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5376 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5377 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5378 : : return 0;
5379 : :
5380 : 0 : return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5381 : 0 : && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5382 : 0 : && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5383 : :
5384 : 0 : case RELOAD_FOR_INSN:
5385 : : /* These conflict with other outputs with RELOAD_OTHER. So
5386 : : we need only check for output addresses. */
5387 : :
5388 : 0 : opnum = reload_n_operands;
5389 : :
5390 : : /* fall through */
5391 : :
5392 : 0 : case RELOAD_FOR_OUTPUT:
5393 : 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5394 : 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5395 : : /* We already know these can't conflict with a later output. So the
5396 : : only thing to check are later output addresses.
5397 : : Note that multiple output operands are emitted in reverse order,
5398 : : so the conflicting ones are those with lower indices. */
5399 : 0 : for (i = 0; i < opnum; i++)
5400 : 0 : if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5401 : 0 : || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5402 : : return 0;
5403 : :
5404 : : /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5405 : : could be killed if the register is also used by reload with type
5406 : : RELOAD_FOR_OUTPUT_ADDRESS, so check it. */
5407 : 0 : if (type == RELOAD_FOR_OUTADDR_ADDRESS
5408 : 0 : && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5409 : : return 0;
5410 : :
5411 : : return 1;
5412 : :
5413 : 0 : default:
5414 : 0 : gcc_unreachable ();
5415 : : }
5416 : : }
5417 : :
5418 : : /* Like reload_reg_reaches_end_p, but check that the condition holds for
5419 : : every register in REG. */
5420 : :
5421 : : static bool
5422 : 0 : reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5423 : : {
5424 : 0 : unsigned int i;
5425 : :
5426 : 0 : for (i = REGNO (reg); i < END_REGNO (reg); i++)
5427 : 0 : if (!reload_reg_reaches_end_p (i, reloadnum))
5428 : : return false;
5429 : : return true;
5430 : : }
5431 : :
5432 : :
5433 : : /* Returns whether R1 and R2 are uniquely chained: the value of one
5434 : : is used by the other, and that value is not used by any other
5435 : : reload for this insn. This is used to partially undo the decision
5436 : : made in find_reloads when in the case of multiple
5437 : : RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5438 : : RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5439 : : reloads. This code tries to avoid the conflict created by that
5440 : : change. It might be cleaner to explicitly keep track of which
5441 : : RELOAD_FOR_OPADDR_ADDR reload is associated with which
5442 : : RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5443 : : this after the fact. */
5444 : : static bool
5445 : 0 : reloads_unique_chain_p (int r1, int r2)
5446 : : {
5447 : 0 : int i;
5448 : :
5449 : : /* We only check input reloads. */
5450 : 0 : if (! rld[r1].in || ! rld[r2].in)
5451 : : return false;
5452 : :
5453 : : /* Avoid anything with output reloads. */
5454 : 0 : if (rld[r1].out || rld[r2].out)
5455 : : return false;
5456 : :
5457 : : /* "chained" means one reload is a component of the other reload,
5458 : : not the same as the other reload. */
5459 : 0 : if (rld[r1].opnum != rld[r2].opnum
5460 : 0 : || rtx_equal_p (rld[r1].in, rld[r2].in)
5461 : 0 : || rld[r1].optional || rld[r2].optional
5462 : 0 : || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5463 : 0 : || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5464 : 0 : return false;
5465 : :
5466 : : /* The following loop assumes that r1 is the reload that feeds r2. */
5467 : 0 : if (r1 > r2)
5468 : 0 : std::swap (r1, r2);
5469 : :
5470 : 0 : for (i = 0; i < n_reloads; i ++)
5471 : : /* Look for input reloads that aren't our two */
5472 : 0 : if (i != r1 && i != r2 && rld[i].in)
5473 : : {
5474 : : /* If our reload is mentioned at all, it isn't a simple chain. */
5475 : 0 : if (reg_mentioned_p (rld[r1].in, rld[i].in))
5476 : : return false;
5477 : : }
5478 : : return true;
5479 : : }
5480 : :
5481 : : /* The recursive function change all occurrences of WHAT in *WHERE
5482 : : to REPL. */
5483 : : static void
5484 : 0 : substitute (rtx *where, const_rtx what, rtx repl)
5485 : : {
5486 : 0 : const char *fmt;
5487 : 0 : int i;
5488 : 0 : enum rtx_code code;
5489 : :
5490 : 0 : if (*where == 0)
5491 : : return;
5492 : :
5493 : 0 : if (*where == what || rtx_equal_p (*where, what))
5494 : : {
5495 : : /* Record the location of the changed rtx. */
5496 : 0 : substitute_stack.safe_push (where);
5497 : 0 : *where = repl;
5498 : 0 : return;
5499 : : }
5500 : :
5501 : 0 : code = GET_CODE (*where);
5502 : 0 : fmt = GET_RTX_FORMAT (code);
5503 : 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5504 : : {
5505 : 0 : if (fmt[i] == 'E')
5506 : : {
5507 : 0 : int j;
5508 : :
5509 : 0 : for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5510 : 0 : substitute (&XVECEXP (*where, i, j), what, repl);
5511 : : }
5512 : 0 : else if (fmt[i] == 'e')
5513 : 0 : substitute (&XEXP (*where, i), what, repl);
5514 : : }
5515 : : }
5516 : :
5517 : : /* The function returns TRUE if chain of reload R1 and R2 (in any
5518 : : order) can be evaluated without usage of intermediate register for
5519 : : the reload containing another reload. It is important to see
5520 : : gen_reload to understand what the function is trying to do. As an
5521 : : example, let us have reload chain
5522 : :
5523 : : r2: const
5524 : : r1: <something> + const
5525 : :
5526 : : and reload R2 got reload reg HR. The function returns true if
5527 : : there is a correct insn HR = HR + <something>. Otherwise,
5528 : : gen_reload will use intermediate register (and this is the reload
5529 : : reg for R1) to reload <something>.
5530 : :
5531 : : We need this function to find a conflict for chain reloads. In our
5532 : : example, if HR = HR + <something> is incorrect insn, then we cannot
5533 : : use HR as a reload register for R2. If we do use it then we get a
5534 : : wrong code:
5535 : :
5536 : : HR = const
5537 : : HR = <something>
5538 : : HR = HR + HR
5539 : :
5540 : : */
5541 : : static bool
5542 : 0 : gen_reload_chain_without_interm_reg_p (int r1, int r2)
5543 : : {
5544 : : /* Assume other cases in gen_reload are not possible for
5545 : : chain reloads or do need an intermediate hard registers. */
5546 : 0 : bool result = true;
5547 : 0 : int regno, code;
5548 : 0 : rtx out, in;
5549 : 0 : rtx_insn *insn;
5550 : 0 : rtx_insn *last = get_last_insn ();
5551 : :
5552 : : /* Make r2 a component of r1. */
5553 : 0 : if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5554 : 0 : std::swap (r1, r2);
5555 : :
5556 : 0 : gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5557 : 0 : regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5558 : 0 : gcc_assert (regno >= 0);
5559 : 0 : out = gen_rtx_REG (rld[r1].mode, regno);
5560 : 0 : in = rld[r1].in;
5561 : 0 : substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5562 : :
5563 : : /* If IN is a paradoxical SUBREG, remove it and try to put the
5564 : : opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5565 : 0 : strip_paradoxical_subreg (&in, &out);
5566 : :
5567 : 0 : if (GET_CODE (in) == PLUS
5568 : 0 : && (REG_P (XEXP (in, 0))
5569 : : || GET_CODE (XEXP (in, 0)) == SUBREG
5570 : : || MEM_P (XEXP (in, 0)))
5571 : 0 : && (REG_P (XEXP (in, 1))
5572 : 0 : || GET_CODE (XEXP (in, 1)) == SUBREG
5573 : 0 : || CONSTANT_P (XEXP (in, 1))
5574 : 0 : || MEM_P (XEXP (in, 1))))
5575 : : {
5576 : 0 : insn = emit_insn (gen_rtx_SET (out, in));
5577 : 0 : code = recog_memoized (insn);
5578 : 0 : result = false;
5579 : :
5580 : 0 : if (code >= 0)
5581 : : {
5582 : 0 : extract_insn (insn);
5583 : : /* We want constrain operands to treat this insn strictly in
5584 : : its validity determination, i.e., the way it would after
5585 : : reload has completed. */
5586 : 0 : result = constrain_operands (1, get_enabled_alternatives (insn));
5587 : : }
5588 : :
5589 : 0 : delete_insns_since (last);
5590 : : }
5591 : :
5592 : : /* Restore the original value at each changed address within R1. */
5593 : 0 : while (!substitute_stack.is_empty ())
5594 : : {
5595 : 0 : rtx *where = substitute_stack.pop ();
5596 : 0 : *where = rld[r2].in;
5597 : : }
5598 : :
5599 : 0 : return result;
5600 : : }
5601 : :
5602 : : /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5603 : : Return 0 otherwise.
5604 : :
5605 : : This function uses the same algorithm as reload_reg_free_p above. */
5606 : :
5607 : : static int
5608 : 0 : reloads_conflict (int r1, int r2)
5609 : : {
5610 : 0 : enum reload_type r1_type = rld[r1].when_needed;
5611 : 0 : enum reload_type r2_type = rld[r2].when_needed;
5612 : 0 : int r1_opnum = rld[r1].opnum;
5613 : 0 : int r2_opnum = rld[r2].opnum;
5614 : :
5615 : : /* RELOAD_OTHER conflicts with everything. */
5616 : 0 : if (r2_type == RELOAD_OTHER)
5617 : : return 1;
5618 : :
5619 : : /* Otherwise, check conflicts differently for each type. */
5620 : :
5621 : 0 : switch (r1_type)
5622 : : {
5623 : 0 : case RELOAD_FOR_INPUT:
5624 : 0 : return (r2_type == RELOAD_FOR_INSN
5625 : 0 : || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5626 : 0 : || r2_type == RELOAD_FOR_OPADDR_ADDR
5627 : 0 : || r2_type == RELOAD_FOR_INPUT
5628 : 0 : || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5629 : 0 : || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5630 : 0 : && r2_opnum > r1_opnum));
5631 : :
5632 : 0 : case RELOAD_FOR_INPUT_ADDRESS:
5633 : 0 : return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5634 : 0 : || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5635 : :
5636 : 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5637 : 0 : return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5638 : 0 : || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5639 : :
5640 : 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5641 : 0 : return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5642 : 0 : || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5643 : :
5644 : 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5645 : 0 : return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5646 : 0 : || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5647 : :
5648 : 0 : case RELOAD_FOR_OPERAND_ADDRESS:
5649 : 0 : return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5650 : 0 : || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5651 : 0 : && (!reloads_unique_chain_p (r1, r2)
5652 : 0 : || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5653 : :
5654 : 0 : case RELOAD_FOR_OPADDR_ADDR:
5655 : 0 : return (r2_type == RELOAD_FOR_INPUT
5656 : 0 : || r2_type == RELOAD_FOR_OPADDR_ADDR);
5657 : :
5658 : 0 : case RELOAD_FOR_OUTPUT:
5659 : 0 : return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5660 : 0 : || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5661 : 0 : || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5662 : 0 : && r2_opnum >= r1_opnum));
5663 : :
5664 : 0 : case RELOAD_FOR_INSN:
5665 : 0 : return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5666 : 0 : || r2_type == RELOAD_FOR_INSN
5667 : 0 : || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5668 : :
5669 : 0 : case RELOAD_FOR_OTHER_ADDRESS:
5670 : 0 : return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5671 : :
5672 : : case RELOAD_OTHER:
5673 : : return 1;
5674 : :
5675 : 0 : default:
5676 : 0 : gcc_unreachable ();
5677 : : }
5678 : : }
5679 : :
5680 : : /* Indexed by reload number, 1 if incoming value
5681 : : inherited from previous insns. */
5682 : : static char reload_inherited[MAX_RELOADS];
5683 : :
5684 : : /* For an inherited reload, this is the insn the reload was inherited from,
5685 : : if we know it. Otherwise, this is 0. */
5686 : : static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5687 : :
5688 : : /* If nonzero, this is a place to get the value of the reload,
5689 : : rather than using reload_in. */
5690 : : static rtx reload_override_in[MAX_RELOADS];
5691 : :
5692 : : /* For each reload, the hard register number of the register used,
5693 : : or -1 if we did not need a register for this reload. */
5694 : : static int reload_spill_index[MAX_RELOADS];
5695 : :
5696 : : /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5697 : : static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5698 : :
5699 : : /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5700 : : static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5701 : :
5702 : : /* Subroutine of free_for_value_p, used to check a single register.
5703 : : START_REGNO is the starting regno of the full reload register
5704 : : (possibly comprising multiple hard registers) that we are considering. */
5705 : :
5706 : : static int
5707 : 0 : reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5708 : : enum reload_type type, rtx value, rtx out,
5709 : : int reloadnum, int ignore_address_reloads)
5710 : : {
5711 : 0 : int time1;
5712 : : /* Set if we see an input reload that must not share its reload register
5713 : : with any new earlyclobber, but might otherwise share the reload
5714 : : register with an output or input-output reload. */
5715 : 0 : int check_earlyclobber = 0;
5716 : 0 : int i;
5717 : 0 : int copy = 0;
5718 : :
5719 : 0 : if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5720 : : return 0;
5721 : :
5722 : 0 : if (out == const0_rtx)
5723 : : {
5724 : 0 : copy = 1;
5725 : 0 : out = NULL_RTX;
5726 : : }
5727 : :
5728 : : /* We use some pseudo 'time' value to check if the lifetimes of the
5729 : : new register use would overlap with the one of a previous reload
5730 : : that is not read-only or uses a different value.
5731 : : The 'time' used doesn't have to be linear in any shape or form, just
5732 : : monotonic.
5733 : : Some reload types use different 'buckets' for each operand.
5734 : : So there are MAX_RECOG_OPERANDS different time values for each
5735 : : such reload type.
5736 : : We compute TIME1 as the time when the register for the prospective
5737 : : new reload ceases to be live, and TIME2 for each existing
5738 : : reload as the time when that the reload register of that reload
5739 : : becomes live.
5740 : : Where there is little to be gained by exact lifetime calculations,
5741 : : we just make conservative assumptions, i.e. a longer lifetime;
5742 : : this is done in the 'default:' cases. */
5743 : 0 : switch (type)
5744 : : {
5745 : 0 : case RELOAD_FOR_OTHER_ADDRESS:
5746 : : /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5747 : 0 : time1 = copy ? 0 : 1;
5748 : 0 : break;
5749 : 0 : case RELOAD_OTHER:
5750 : 0 : time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5751 : : break;
5752 : : /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5753 : : RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5754 : : respectively, to the time values for these, we get distinct time
5755 : : values. To get distinct time values for each operand, we have to
5756 : : multiply opnum by at least three. We round that up to four because
5757 : : multiply by four is often cheaper. */
5758 : 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5759 : 0 : time1 = opnum * 4 + 2;
5760 : 0 : break;
5761 : 0 : case RELOAD_FOR_INPUT_ADDRESS:
5762 : 0 : time1 = opnum * 4 + 3;
5763 : 0 : break;
5764 : 0 : case RELOAD_FOR_INPUT:
5765 : : /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5766 : : executes (inclusive). */
5767 : 0 : time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5768 : : break;
5769 : : case RELOAD_FOR_OPADDR_ADDR:
5770 : : /* opnum * 4 + 4
5771 : : <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5772 : : time1 = MAX_RECOG_OPERANDS * 4 + 1;
5773 : : break;
5774 : 0 : case RELOAD_FOR_OPERAND_ADDRESS:
5775 : : /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5776 : : is executed. */
5777 : 0 : time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5778 : : break;
5779 : 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5780 : 0 : time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5781 : 0 : break;
5782 : 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5783 : 0 : time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5784 : 0 : break;
5785 : : default:
5786 : 0 : time1 = MAX_RECOG_OPERANDS * 5 + 5;
5787 : : }
5788 : :
5789 : 0 : for (i = 0; i < n_reloads; i++)
5790 : : {
5791 : 0 : rtx reg = rld[i].reg_rtx;
5792 : 0 : if (reg && REG_P (reg)
5793 : 0 : && (unsigned) regno - true_regnum (reg) < REG_NREGS (reg)
5794 : 0 : && i != reloadnum)
5795 : : {
5796 : 0 : rtx other_input = rld[i].in;
5797 : :
5798 : : /* If the other reload loads the same input value, that
5799 : : will not cause a conflict only if it's loading it into
5800 : : the same register. */
5801 : 0 : if (true_regnum (reg) != start_regno)
5802 : : other_input = NULL_RTX;
5803 : 0 : if (! other_input || ! rtx_equal_p (other_input, value)
5804 : 0 : || rld[i].out || out)
5805 : : {
5806 : 0 : int time2;
5807 : 0 : switch (rld[i].when_needed)
5808 : : {
5809 : : case RELOAD_FOR_OTHER_ADDRESS:
5810 : : time2 = 0;
5811 : : break;
5812 : 0 : case RELOAD_FOR_INPADDR_ADDRESS:
5813 : : /* find_reloads makes sure that a
5814 : : RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5815 : : by at most one - the first -
5816 : : RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5817 : : address reload is inherited, the address address reload
5818 : : goes away, so we can ignore this conflict. */
5819 : 0 : if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5820 : 0 : && ignore_address_reloads
5821 : : /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5822 : : Then the address address is still needed to store
5823 : : back the new address. */
5824 : 0 : && ! rld[reloadnum].out)
5825 : 0 : continue;
5826 : : /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5827 : : RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5828 : : reloads go away. */
5829 : 0 : if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5830 : 0 : && ignore_address_reloads
5831 : : /* Unless we are reloading an auto_inc expression. */
5832 : 0 : && ! rld[reloadnum].out)
5833 : 0 : continue;
5834 : 0 : time2 = rld[i].opnum * 4 + 2;
5835 : 0 : break;
5836 : 0 : case RELOAD_FOR_INPUT_ADDRESS:
5837 : 0 : if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5838 : 0 : && ignore_address_reloads
5839 : 0 : && ! rld[reloadnum].out)
5840 : 0 : continue;
5841 : 0 : time2 = rld[i].opnum * 4 + 3;
5842 : 0 : break;
5843 : 0 : case RELOAD_FOR_INPUT:
5844 : 0 : time2 = rld[i].opnum * 4 + 4;
5845 : 0 : check_earlyclobber = 1;
5846 : 0 : break;
5847 : : /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5848 : : == MAX_RECOG_OPERAND * 4 */
5849 : 0 : case RELOAD_FOR_OPADDR_ADDR:
5850 : 0 : if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5851 : 0 : && ignore_address_reloads
5852 : 0 : && ! rld[reloadnum].out)
5853 : 0 : continue;
5854 : : time2 = MAX_RECOG_OPERANDS * 4 + 1;
5855 : : break;
5856 : 0 : case RELOAD_FOR_OPERAND_ADDRESS:
5857 : 0 : time2 = MAX_RECOG_OPERANDS * 4 + 2;
5858 : 0 : check_earlyclobber = 1;
5859 : 0 : break;
5860 : 0 : case RELOAD_FOR_INSN:
5861 : 0 : time2 = MAX_RECOG_OPERANDS * 4 + 3;
5862 : 0 : break;
5863 : 0 : case RELOAD_FOR_OUTPUT:
5864 : : /* All RELOAD_FOR_OUTPUT reloads become live just after the
5865 : : instruction is executed. */
5866 : 0 : time2 = MAX_RECOG_OPERANDS * 4 + 4;
5867 : 0 : break;
5868 : : /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5869 : : the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5870 : : value. */
5871 : 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
5872 : 0 : if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5873 : 0 : && ignore_address_reloads
5874 : 0 : && ! rld[reloadnum].out)
5875 : 0 : continue;
5876 : 0 : time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
5877 : 0 : break;
5878 : 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
5879 : 0 : time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
5880 : 0 : break;
5881 : 0 : case RELOAD_OTHER:
5882 : : /* If there is no conflict in the input part, handle this
5883 : : like an output reload. */
5884 : 0 : if (! rld[i].in || rtx_equal_p (other_input, value))
5885 : : {
5886 : 0 : time2 = MAX_RECOG_OPERANDS * 4 + 4;
5887 : : /* Earlyclobbered outputs must conflict with inputs. */
5888 : 0 : if (earlyclobber_operand_p (rld[i].out))
5889 : 0 : time2 = MAX_RECOG_OPERANDS * 4 + 3;
5890 : :
5891 : : break;
5892 : : }
5893 : 0 : time2 = 1;
5894 : : /* RELOAD_OTHER might be live beyond instruction execution,
5895 : : but this is not obvious when we set time2 = 1. So check
5896 : : here if there might be a problem with the new reload
5897 : : clobbering the register used by the RELOAD_OTHER. */
5898 : 0 : if (out)
5899 : : return 0;
5900 : : break;
5901 : : default:
5902 : : return 0;
5903 : : }
5904 : 0 : if ((time1 >= time2
5905 : 0 : && (! rld[i].in || rld[i].out
5906 : 0 : || ! rtx_equal_p (other_input, value)))
5907 : 0 : || (out && rld[reloadnum].out_reg
5908 : 0 : && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
5909 : 0 : return 0;
5910 : : }
5911 : : }
5912 : : }
5913 : :
5914 : : /* Earlyclobbered outputs must conflict with inputs. */
5915 : 0 : if (check_earlyclobber && out && earlyclobber_operand_p (out))
5916 : : return 0;
5917 : :
5918 : : return 1;
5919 : : }
5920 : :
5921 : : /* Return 1 if the value in reload reg REGNO, as used by a reload
5922 : : needed for the part of the insn specified by OPNUM and TYPE,
5923 : : may be used to load VALUE into it.
5924 : :
5925 : : MODE is the mode in which the register is used, this is needed to
5926 : : determine how many hard regs to test.
5927 : :
5928 : : Other read-only reloads with the same value do not conflict
5929 : : unless OUT is nonzero and these other reloads have to live while
5930 : : output reloads live.
5931 : : If OUT is CONST0_RTX, this is a special case: it means that the
5932 : : test should not be for using register REGNO as reload register, but
5933 : : for copying from register REGNO into the reload register.
5934 : :
5935 : : RELOADNUM is the number of the reload we want to load this value for;
5936 : : a reload does not conflict with itself.
5937 : :
5938 : : When IGNORE_ADDRESS_RELOADS is set, we cannot have conflicts with
5939 : : reloads that load an address for the very reload we are considering.
5940 : :
5941 : : The caller has to make sure that there is no conflict with the return
5942 : : register. */
5943 : :
5944 : : static int
5945 : 0 : free_for_value_p (int regno, machine_mode mode, int opnum,
5946 : : enum reload_type type, rtx value, rtx out, int reloadnum,
5947 : : int ignore_address_reloads)
5948 : : {
5949 : 0 : int nregs = hard_regno_nregs (regno, mode);
5950 : 0 : while (nregs-- > 0)
5951 : 0 : if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
5952 : : value, out, reloadnum,
5953 : : ignore_address_reloads))
5954 : : return 0;
5955 : : return 1;
5956 : : }
5957 : :
5958 : : /* Return true if the rtx X is invariant over the current function. */
5959 : : /* ??? Actually, the places where we use this expect exactly what is
5960 : : tested here, and not everything that is function invariant. In
5961 : : particular, the frame pointer and arg pointer are special cased;
5962 : : pic_offset_table_rtx is not, and we must not spill these things to
5963 : : memory. */
5964 : :
5965 : : bool
5966 : 21511509 : function_invariant_p (const_rtx x)
5967 : : {
5968 : 21511509 : if (CONSTANT_P (x))
5969 : : return 1;
5970 : 16402939 : if (x == frame_pointer_rtx || x == arg_pointer_rtx)
5971 : : return 1;
5972 : 16395561 : if (GET_CODE (x) == PLUS
5973 : 4093963 : && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
5974 : 2333837 : && GET_CODE (XEXP (x, 1)) == CONST_INT)
5975 : 2333739 : return 1;
5976 : : return 0;
5977 : : }
5978 : :
5979 : : /* Determine whether the reload reg X overlaps any rtx'es used for
5980 : : overriding inheritance. Return nonzero if so. */
5981 : :
5982 : : static int
5983 : 0 : conflicts_with_override (rtx x)
5984 : : {
5985 : 0 : int i;
5986 : 0 : for (i = 0; i < n_reloads; i++)
5987 : 0 : if (reload_override_in[i]
5988 : 0 : && reg_overlap_mentioned_p (x, reload_override_in[i]))
5989 : : return 1;
5990 : : return 0;
5991 : : }
5992 : :
5993 : : /* Give an error message saying we failed to find a reload for INSN,
5994 : : and clear out reload R. */
5995 : : static void
5996 : 0 : failed_reload (rtx_insn *insn, int r)
5997 : : {
5998 : 0 : if (asm_noperands (PATTERN (insn)) < 0)
5999 : : /* It's the compiler's fault. */
6000 : 0 : fatal_insn ("could not find a spill register", insn);
6001 : :
6002 : : /* It's the user's fault; the operand's mode and constraint
6003 : : don't match. Disable this reload so we don't crash in final. */
6004 : 0 : error_for_asm (insn,
6005 : : "%<asm%> operand constraint incompatible with operand size");
6006 : 0 : rld[r].in = 0;
6007 : 0 : rld[r].out = 0;
6008 : 0 : rld[r].reg_rtx = 0;
6009 : 0 : rld[r].optional = 1;
6010 : 0 : rld[r].secondary_p = 1;
6011 : 0 : }
6012 : :
6013 : : /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6014 : : for reload R. If it's valid, get an rtx for it. Return nonzero if
6015 : : successful. */
6016 : : static int
6017 : 0 : set_reload_reg (int i, int r)
6018 : : {
6019 : 0 : int regno;
6020 : 0 : rtx reg = spill_reg_rtx[i];
6021 : :
6022 : 0 : if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6023 : 0 : spill_reg_rtx[i] = reg
6024 : 0 : = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6025 : :
6026 : 0 : regno = true_regnum (reg);
6027 : :
6028 : : /* Detect when the reload reg can't hold the reload mode.
6029 : : This used to be one `if', but Sequent compiler can't handle that. */
6030 : 0 : if (targetm.hard_regno_mode_ok (regno, rld[r].mode))
6031 : : {
6032 : 0 : machine_mode test_mode = VOIDmode;
6033 : 0 : if (rld[r].in)
6034 : 0 : test_mode = GET_MODE (rld[r].in);
6035 : : /* If rld[r].in has VOIDmode, it means we will load it
6036 : : in whatever mode the reload reg has: to wit, rld[r].mode.
6037 : : We have already tested that for validity. */
6038 : : /* Aside from that, we need to test that the expressions
6039 : : to reload from or into have modes which are valid for this
6040 : : reload register. Otherwise the reload insns would be invalid. */
6041 : 0 : if (! (rld[r].in != 0 && test_mode != VOIDmode
6042 : 0 : && !targetm.hard_regno_mode_ok (regno, test_mode)))
6043 : 0 : if (! (rld[r].out != 0
6044 : 0 : && !targetm.hard_regno_mode_ok (regno, GET_MODE (rld[r].out))))
6045 : : {
6046 : : /* The reg is OK. */
6047 : 0 : last_spill_reg = i;
6048 : :
6049 : : /* Mark as in use for this insn the reload regs we use
6050 : : for this. */
6051 : 0 : mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6052 : : rld[r].when_needed, rld[r].mode);
6053 : :
6054 : 0 : rld[r].reg_rtx = reg;
6055 : 0 : reload_spill_index[r] = spill_regs[i];
6056 : 0 : return 1;
6057 : : }
6058 : : }
6059 : : return 0;
6060 : : }
6061 : :
6062 : : /* Find a spill register to use as a reload register for reload R.
6063 : : LAST_RELOAD is nonzero if this is the last reload for the insn being
6064 : : processed.
6065 : :
6066 : : Set rld[R].reg_rtx to the register allocated.
6067 : :
6068 : : We return 1 if successful, or 0 if we couldn't find a spill reg and
6069 : : we didn't change anything. */
6070 : :
6071 : : static int
6072 : 0 : allocate_reload_reg (class insn_chain *chain ATTRIBUTE_UNUSED, int r,
6073 : : int last_reload)
6074 : : {
6075 : 0 : int i, pass, count;
6076 : :
6077 : : /* If we put this reload ahead, thinking it is a group,
6078 : : then insist on finding a group. Otherwise we can grab a
6079 : : reg that some other reload needs.
6080 : : (That can happen when we have a 68000 DATA_OR_FP_REG
6081 : : which is a group of data regs or one fp reg.)
6082 : : We need not be so restrictive if there are no more reloads
6083 : : for this insn.
6084 : :
6085 : : ??? Really it would be nicer to have smarter handling
6086 : : for that kind of reg class, where a problem like this is normal.
6087 : : Perhaps those classes should be avoided for reloading
6088 : : by use of more alternatives. */
6089 : :
6090 : 0 : int force_group = rld[r].nregs > 1 && ! last_reload;
6091 : :
6092 : : /* If we want a single register and haven't yet found one,
6093 : : take any reg in the right class and not in use.
6094 : : If we want a consecutive group, here is where we look for it.
6095 : :
6096 : : We use three passes so we can first look for reload regs to
6097 : : reuse, which are already in use for other reloads in this insn,
6098 : : and only then use additional registers which are not "bad", then
6099 : : finally any register.
6100 : :
6101 : : I think that maximizing reuse is needed to make sure we don't
6102 : : run out of reload regs. Suppose we have three reloads, and
6103 : : reloads A and B can share regs. These need two regs.
6104 : : Suppose A and B are given different regs.
6105 : : That leaves none for C. */
6106 : 0 : for (pass = 0; pass < 3; pass++)
6107 : : {
6108 : : /* I is the index in spill_regs.
6109 : : We advance it round-robin between insns to use all spill regs
6110 : : equally, so that inherited reloads have a chance
6111 : : of leapfrogging each other. */
6112 : :
6113 : 0 : i = last_spill_reg;
6114 : :
6115 : 0 : for (count = 0; count < n_spills; count++)
6116 : : {
6117 : 0 : int rclass = (int) rld[r].rclass;
6118 : 0 : int regnum;
6119 : :
6120 : 0 : i++;
6121 : 0 : if (i >= n_spills)
6122 : 0 : i -= n_spills;
6123 : 0 : regnum = spill_regs[i];
6124 : :
6125 : 0 : if ((reload_reg_free_p (regnum, rld[r].opnum,
6126 : : rld[r].when_needed)
6127 : 0 : || (rld[r].in
6128 : : /* We check reload_reg_used to make sure we
6129 : : don't clobber the return register. */
6130 : 0 : && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6131 : 0 : && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6132 : : rld[r].when_needed, rld[r].in,
6133 : : rld[r].out, r, 1)))
6134 : 0 : && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6135 : 0 : && targetm.hard_regno_mode_ok (regnum, rld[r].mode)
6136 : : /* Look first for regs to share, then for unshared. But
6137 : : don't share regs used for inherited reloads; they are
6138 : : the ones we want to preserve. */
6139 : 0 : && (pass
6140 : 0 : || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6141 : : regnum)
6142 : 0 : && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6143 : : regnum))))
6144 : : {
6145 : 0 : int nr = hard_regno_nregs (regnum, rld[r].mode);
6146 : :
6147 : : /* During the second pass we want to avoid reload registers
6148 : : which are "bad" for this reload. */
6149 : 0 : if (pass == 1
6150 : 0 : && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6151 : 0 : continue;
6152 : :
6153 : : /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6154 : : (on 68000) got us two FP regs. If NR is 1,
6155 : : we would reject both of them. */
6156 : 0 : if (force_group)
6157 : 0 : nr = rld[r].nregs;
6158 : : /* If we need only one reg, we have already won. */
6159 : 0 : if (nr == 1)
6160 : : {
6161 : : /* But reject a single reg if we demand a group. */
6162 : 0 : if (force_group)
6163 : 0 : continue;
6164 : : break;
6165 : : }
6166 : : /* Otherwise check that as many consecutive regs as we need
6167 : : are available here. */
6168 : 0 : while (nr > 1)
6169 : : {
6170 : 0 : int regno = regnum + nr - 1;
6171 : 0 : if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6172 : 0 : && spill_reg_order[regno] >= 0
6173 : 0 : && reload_reg_free_p (regno, rld[r].opnum,
6174 : : rld[r].when_needed)))
6175 : : break;
6176 : 0 : nr--;
6177 : : }
6178 : 0 : if (nr == 1)
6179 : : break;
6180 : : }
6181 : : }
6182 : :
6183 : : /* If we found something on the current pass, omit later passes. */
6184 : 0 : if (count < n_spills)
6185 : : break;
6186 : : }
6187 : :
6188 : : /* We should have found a spill register by now. */
6189 : 0 : if (count >= n_spills)
6190 : : return 0;
6191 : :
6192 : : /* I is the index in SPILL_REG_RTX of the reload register we are to
6193 : : allocate. Get an rtx for it and find its register number. */
6194 : :
6195 : 0 : return set_reload_reg (i, r);
6196 : : }
6197 : :
6198 : : /* Initialize all the tables needed to allocate reload registers.
6199 : : CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6200 : : is the array we use to restore the reg_rtx field for every reload. */
6201 : :
6202 : : static void
6203 : 0 : choose_reload_regs_init (class insn_chain *chain, rtx *save_reload_reg_rtx)
6204 : : {
6205 : 0 : int i;
6206 : :
6207 : 0 : for (i = 0; i < n_reloads; i++)
6208 : 0 : rld[i].reg_rtx = save_reload_reg_rtx[i];
6209 : :
6210 : 0 : memset (reload_inherited, 0, MAX_RELOADS);
6211 : 0 : memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6212 : 0 : memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6213 : :
6214 : 0 : CLEAR_HARD_REG_SET (reload_reg_used);
6215 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6216 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6217 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6218 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6219 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6220 : :
6221 : 0 : CLEAR_HARD_REG_SET (reg_used_in_insn);
6222 : : {
6223 : : HARD_REG_SET tmp;
6224 : 0 : REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6225 : 0 : reg_used_in_insn |= tmp;
6226 : 0 : REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6227 : 0 : reg_used_in_insn |= tmp;
6228 : 0 : compute_use_by_pseudos (®_used_in_insn, &chain->live_throughout);
6229 : 0 : compute_use_by_pseudos (®_used_in_insn, &chain->dead_or_set);
6230 : : }
6231 : :
6232 : 0 : for (i = 0; i < reload_n_operands; i++)
6233 : : {
6234 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6235 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6236 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6237 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6238 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6239 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6240 : : }
6241 : :
6242 : 0 : reload_reg_unavailable = ~chain->used_spill_regs;
6243 : :
6244 : 0 : CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6245 : :
6246 : 0 : for (i = 0; i < n_reloads; i++)
6247 : : /* If we have already decided to use a certain register,
6248 : : don't use it in another way. */
6249 : 0 : if (rld[i].reg_rtx)
6250 : 0 : mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6251 : : rld[i].when_needed, rld[i].mode);
6252 : 0 : }
6253 : :
6254 : : /* If X is not a subreg, return it unmodified. If it is a subreg,
6255 : : look up whether we made a replacement for the SUBREG_REG. Return
6256 : : either the replacement or the SUBREG_REG. */
6257 : :
6258 : : static rtx
6259 : 0 : replaced_subreg (rtx x)
6260 : : {
6261 : 0 : if (GET_CODE (x) == SUBREG)
6262 : 0 : return find_replacement (&SUBREG_REG (x));
6263 : : return x;
6264 : : }
6265 : :
6266 : : /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6267 : : mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6268 : : SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6269 : : otherwise it is NULL. */
6270 : :
6271 : : static poly_int64
6272 : 0 : compute_reload_subreg_offset (machine_mode outermode,
6273 : : rtx subreg,
6274 : : machine_mode innermode)
6275 : : {
6276 : 0 : poly_int64 outer_offset;
6277 : 0 : machine_mode middlemode;
6278 : :
6279 : 0 : if (!subreg)
6280 : 0 : return subreg_lowpart_offset (outermode, innermode);
6281 : :
6282 : 0 : outer_offset = SUBREG_BYTE (subreg);
6283 : 0 : middlemode = GET_MODE (SUBREG_REG (subreg));
6284 : :
6285 : : /* If SUBREG is paradoxical then return the normal lowpart offset
6286 : : for OUTERMODE and INNERMODE. Our caller has already checked
6287 : : that OUTERMODE fits in INNERMODE. */
6288 : 0 : if (paradoxical_subreg_p (outermode, middlemode))
6289 : 0 : return subreg_lowpart_offset (outermode, innermode);
6290 : :
6291 : : /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6292 : : plus the normal lowpart offset for MIDDLEMODE and INNERMODE. */
6293 : 0 : return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6294 : : }
6295 : :
6296 : : /* Assign hard reg targets for the pseudo-registers we must reload
6297 : : into hard regs for this insn.
6298 : : Also output the instructions to copy them in and out of the hard regs.
6299 : :
6300 : : For machines with register classes, we are responsible for
6301 : : finding a reload reg in the proper class. */
6302 : :
6303 : : static void
6304 : 0 : choose_reload_regs (class insn_chain *chain)
6305 : : {
6306 : 0 : rtx_insn *insn = chain->insn;
6307 : 0 : int i, j;
6308 : 0 : unsigned int max_group_size = 1;
6309 : 0 : enum reg_class group_class = NO_REGS;
6310 : 0 : int pass, win, inheritance;
6311 : :
6312 : 0 : rtx save_reload_reg_rtx[MAX_RELOADS];
6313 : :
6314 : : /* In order to be certain of getting the registers we need,
6315 : : we must sort the reloads into order of increasing register class.
6316 : : Then our grabbing of reload registers will parallel the process
6317 : : that provided the reload registers.
6318 : :
6319 : : Also note whether any of the reloads wants a consecutive group of regs.
6320 : : If so, record the maximum size of the group desired and what
6321 : : register class contains all the groups needed by this insn. */
6322 : :
6323 : 0 : for (j = 0; j < n_reloads; j++)
6324 : : {
6325 : 0 : reload_order[j] = j;
6326 : 0 : if (rld[j].reg_rtx != NULL_RTX)
6327 : : {
6328 : 0 : gcc_assert (REG_P (rld[j].reg_rtx)
6329 : : && HARD_REGISTER_P (rld[j].reg_rtx));
6330 : 0 : reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6331 : : }
6332 : : else
6333 : 0 : reload_spill_index[j] = -1;
6334 : :
6335 : 0 : if (rld[j].nregs > 1)
6336 : : {
6337 : 0 : max_group_size = MAX (rld[j].nregs, max_group_size);
6338 : 0 : group_class
6339 : 0 : = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6340 : : }
6341 : :
6342 : 0 : save_reload_reg_rtx[j] = rld[j].reg_rtx;
6343 : : }
6344 : :
6345 : 0 : if (n_reloads > 1)
6346 : 0 : qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6347 : :
6348 : : /* If -O, try first with inheritance, then turning it off.
6349 : : If not -O, don't do inheritance.
6350 : : Using inheritance when not optimizing leads to paradoxes
6351 : : with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6352 : : because one side of the comparison might be inherited. */
6353 : 0 : win = 0;
6354 : 0 : for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6355 : : {
6356 : 0 : choose_reload_regs_init (chain, save_reload_reg_rtx);
6357 : :
6358 : : /* Process the reloads in order of preference just found.
6359 : : Beyond this point, subregs can be found in reload_reg_rtx.
6360 : :
6361 : : This used to look for an existing reloaded home for all of the
6362 : : reloads, and only then perform any new reloads. But that could lose
6363 : : if the reloads were done out of reg-class order because a later
6364 : : reload with a looser constraint might have an old home in a register
6365 : : needed by an earlier reload with a tighter constraint.
6366 : :
6367 : : To solve this, we make two passes over the reloads, in the order
6368 : : described above. In the first pass we try to inherit a reload
6369 : : from a previous insn. If there is a later reload that needs a
6370 : : class that is a proper subset of the class being processed, we must
6371 : : also allocate a spill register during the first pass.
6372 : :
6373 : : Then make a second pass over the reloads to allocate any reloads
6374 : : that haven't been given registers yet. */
6375 : :
6376 : 0 : for (j = 0; j < n_reloads; j++)
6377 : : {
6378 : 0 : int r = reload_order[j];
6379 : 0 : rtx search_equiv = NULL_RTX;
6380 : :
6381 : : /* Ignore reloads that got marked inoperative. */
6382 : 0 : if (rld[r].out == 0 && rld[r].in == 0
6383 : 0 : && ! rld[r].secondary_p)
6384 : 0 : continue;
6385 : :
6386 : : /* If find_reloads chose to use reload_in or reload_out as a reload
6387 : : register, we don't need to chose one. Otherwise, try even if it
6388 : : found one since we might save an insn if we find the value lying
6389 : : around.
6390 : : Try also when reload_in is a pseudo without a hard reg. */
6391 : 0 : if (rld[r].in != 0 && rld[r].reg_rtx != 0
6392 : 0 : && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6393 : 0 : || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6394 : 0 : && !MEM_P (rld[r].in)
6395 : 0 : && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6396 : 0 : continue;
6397 : :
6398 : : #if 0 /* No longer needed for correct operation.
6399 : : It might give better code, or might not; worth an experiment? */
6400 : : /* If this is an optional reload, we can't inherit from earlier insns
6401 : : until we are sure that any non-optional reloads have been allocated.
6402 : : The following code takes advantage of the fact that optional reloads
6403 : : are at the end of reload_order. */
6404 : : if (rld[r].optional != 0)
6405 : : for (i = 0; i < j; i++)
6406 : : if ((rld[reload_order[i]].out != 0
6407 : : || rld[reload_order[i]].in != 0
6408 : : || rld[reload_order[i]].secondary_p)
6409 : : && ! rld[reload_order[i]].optional
6410 : : && rld[reload_order[i]].reg_rtx == 0)
6411 : : allocate_reload_reg (chain, reload_order[i], 0);
6412 : : #endif
6413 : :
6414 : : /* First see if this pseudo is already available as reloaded
6415 : : for a previous insn. We cannot try to inherit for reloads
6416 : : that are smaller than the maximum number of registers needed
6417 : : for groups unless the register we would allocate cannot be used
6418 : : for the groups.
6419 : :
6420 : : We could check here to see if this is a secondary reload for
6421 : : an object that is already in a register of the desired class.
6422 : : This would avoid the need for the secondary reload register.
6423 : : But this is complex because we can't easily determine what
6424 : : objects might want to be loaded via this reload. So let a
6425 : : register be allocated here. In `emit_reload_insns' we suppress
6426 : : one of the loads in the case described above. */
6427 : :
6428 : 0 : if (inheritance)
6429 : : {
6430 : 0 : poly_int64 byte = 0;
6431 : 0 : int regno = -1;
6432 : 0 : machine_mode mode = VOIDmode;
6433 : 0 : rtx subreg = NULL_RTX;
6434 : :
6435 : 0 : if (rld[r].in == 0)
6436 : : ;
6437 : 0 : else if (REG_P (rld[r].in))
6438 : : {
6439 : 0 : regno = REGNO (rld[r].in);
6440 : 0 : mode = GET_MODE (rld[r].in);
6441 : : }
6442 : 0 : else if (REG_P (rld[r].in_reg))
6443 : : {
6444 : 0 : regno = REGNO (rld[r].in_reg);
6445 : 0 : mode = GET_MODE (rld[r].in_reg);
6446 : : }
6447 : 0 : else if (GET_CODE (rld[r].in_reg) == SUBREG
6448 : 0 : && REG_P (SUBREG_REG (rld[r].in_reg)))
6449 : : {
6450 : 0 : regno = REGNO (SUBREG_REG (rld[r].in_reg));
6451 : 0 : if (regno < FIRST_PSEUDO_REGISTER)
6452 : 0 : regno = subreg_regno (rld[r].in_reg);
6453 : : else
6454 : : {
6455 : 0 : subreg = rld[r].in_reg;
6456 : 0 : byte = SUBREG_BYTE (subreg);
6457 : : }
6458 : 0 : mode = GET_MODE (rld[r].in_reg);
6459 : : }
6460 : : #if AUTO_INC_DEC
6461 : : else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6462 : : && REG_P (XEXP (rld[r].in_reg, 0)))
6463 : : {
6464 : : regno = REGNO (XEXP (rld[r].in_reg, 0));
6465 : : mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6466 : : rld[r].out = rld[r].in;
6467 : : }
6468 : : #endif
6469 : : #if 0
6470 : : /* This won't work, since REGNO can be a pseudo reg number.
6471 : : Also, it takes much more hair to keep track of all the things
6472 : : that can invalidate an inherited reload of part of a pseudoreg. */
6473 : : else if (GET_CODE (rld[r].in) == SUBREG
6474 : : && REG_P (SUBREG_REG (rld[r].in)))
6475 : : regno = subreg_regno (rld[r].in);
6476 : : #endif
6477 : :
6478 : 0 : if (regno >= 0
6479 : 0 : && reg_last_reload_reg[regno] != 0
6480 : 0 : && (known_ge
6481 : : (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno])),
6482 : : GET_MODE_SIZE (mode) + byte))
6483 : : /* Verify that the register it's in can be used in
6484 : : mode MODE. */
6485 : 0 : && (REG_CAN_CHANGE_MODE_P
6486 : : (REGNO (reg_last_reload_reg[regno]),
6487 : : GET_MODE (reg_last_reload_reg[regno]),
6488 : : mode)))
6489 : : {
6490 : 0 : enum reg_class rclass = rld[r].rclass, last_class;
6491 : 0 : rtx last_reg = reg_last_reload_reg[regno];
6492 : :
6493 : 0 : i = REGNO (last_reg);
6494 : 0 : byte = compute_reload_subreg_offset (mode,
6495 : : subreg,
6496 : 0 : GET_MODE (last_reg));
6497 : 0 : i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6498 : 0 : last_class = REGNO_REG_CLASS (i);
6499 : :
6500 : 0 : if (reg_reloaded_contents[i] == regno
6501 : 0 : && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6502 : 0 : && targetm.hard_regno_mode_ok (i, rld[r].mode)
6503 : 0 : && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6504 : : /* Even if we can't use this register as a reload
6505 : : register, we might use it for reload_override_in,
6506 : : if copying it to the desired class is cheap
6507 : : enough. */
6508 : 0 : || ((register_move_cost (mode, last_class, rclass)
6509 : 0 : < memory_move_cost (mode, rclass, true))
6510 : 0 : && (secondary_reload_class (1, rclass, mode,
6511 : : last_reg)
6512 : : == NO_REGS)
6513 : 0 : && !(targetm.secondary_memory_needed
6514 : 0 : (mode, last_class, rclass))))
6515 : 0 : && (rld[r].nregs == max_group_size
6516 : 0 : || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6517 : : i))
6518 : 0 : && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6519 : : rld[r].when_needed, rld[r].in,
6520 : : const0_rtx, r, 1))
6521 : : {
6522 : : /* If a group is needed, verify that all the subsequent
6523 : : registers still have their values intact. */
6524 : 0 : int nr = hard_regno_nregs (i, rld[r].mode);
6525 : 0 : int k;
6526 : :
6527 : 0 : for (k = 1; k < nr; k++)
6528 : 0 : if (reg_reloaded_contents[i + k] != regno
6529 : 0 : || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6530 : : break;
6531 : :
6532 : 0 : if (k == nr)
6533 : : {
6534 : 0 : int i1;
6535 : 0 : int bad_for_class;
6536 : :
6537 : 0 : last_reg = (GET_MODE (last_reg) == mode
6538 : 0 : ? last_reg : gen_rtx_REG (mode, i));
6539 : :
6540 : 0 : bad_for_class = 0;
6541 : 0 : for (k = 0; k < nr; k++)
6542 : 0 : bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6543 : 0 : i+k);
6544 : :
6545 : : /* We found a register that contains the
6546 : : value we need. If this register is the
6547 : : same as an `earlyclobber' operand of the
6548 : : current insn, just mark it as a place to
6549 : : reload from since we can't use it as the
6550 : : reload register itself. */
6551 : :
6552 : 0 : for (i1 = 0; i1 < n_earlyclobbers; i1++)
6553 : 0 : if (reg_overlap_mentioned_for_reload_p
6554 : 0 : (reg_last_reload_reg[regno],
6555 : : reload_earlyclobbers[i1]))
6556 : : break;
6557 : :
6558 : 0 : if (i1 != n_earlyclobbers
6559 : 0 : || ! (free_for_value_p (i, rld[r].mode,
6560 : : rld[r].opnum,
6561 : : rld[r].when_needed, rld[r].in,
6562 : : rld[r].out, r, 1))
6563 : : /* Don't use it if we'd clobber a pseudo reg. */
6564 : 0 : || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6565 : 0 : && rld[r].out
6566 : 0 : && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6567 : : /* Don't clobber the frame pointer. */
6568 : 0 : || (i == HARD_FRAME_POINTER_REGNUM
6569 : 0 : && frame_pointer_needed
6570 : 0 : && rld[r].out)
6571 : : /* Don't really use the inherited spill reg
6572 : : if we need it wider than we've got it. */
6573 : 0 : || paradoxical_subreg_p (rld[r].mode, mode)
6574 : 0 : || bad_for_class
6575 : :
6576 : : /* If find_reloads chose reload_out as reload
6577 : : register, stay with it - that leaves the
6578 : : inherited register for subsequent reloads. */
6579 : 0 : || (rld[r].out && rld[r].reg_rtx
6580 : 0 : && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6581 : : {
6582 : 0 : if (! rld[r].optional)
6583 : : {
6584 : 0 : reload_override_in[r] = last_reg;
6585 : 0 : reload_inheritance_insn[r]
6586 : 0 : = reg_reloaded_insn[i];
6587 : : }
6588 : : }
6589 : : else
6590 : : {
6591 : 0 : int k;
6592 : : /* We can use this as a reload reg. */
6593 : : /* Mark the register as in use for this part of
6594 : : the insn. */
6595 : 0 : mark_reload_reg_in_use (i,
6596 : : rld[r].opnum,
6597 : : rld[r].when_needed,
6598 : : rld[r].mode);
6599 : 0 : rld[r].reg_rtx = last_reg;
6600 : 0 : reload_inherited[r] = 1;
6601 : 0 : reload_inheritance_insn[r]
6602 : 0 : = reg_reloaded_insn[i];
6603 : 0 : reload_spill_index[r] = i;
6604 : 0 : for (k = 0; k < nr; k++)
6605 : 0 : SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6606 : 0 : i + k);
6607 : : }
6608 : : }
6609 : : }
6610 : : }
6611 : : }
6612 : :
6613 : : /* Here's another way to see if the value is already lying around. */
6614 : 0 : if (inheritance
6615 : 0 : && rld[r].in != 0
6616 : 0 : && ! reload_inherited[r]
6617 : 0 : && rld[r].out == 0
6618 : 0 : && (CONSTANT_P (rld[r].in)
6619 : : || GET_CODE (rld[r].in) == PLUS
6620 : : || REG_P (rld[r].in)
6621 : : || MEM_P (rld[r].in))
6622 : 0 : && (rld[r].nregs == max_group_size
6623 : 0 : || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6624 : 0 : search_equiv = rld[r].in;
6625 : :
6626 : 0 : if (search_equiv)
6627 : : {
6628 : 0 : rtx equiv
6629 : 0 : = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6630 : : -1, NULL, 0, rld[r].mode);
6631 : 0 : int regno = 0;
6632 : :
6633 : 0 : if (equiv != 0)
6634 : : {
6635 : 0 : if (REG_P (equiv))
6636 : 0 : regno = REGNO (equiv);
6637 : : else
6638 : : {
6639 : : /* This must be a SUBREG of a hard register.
6640 : : Make a new REG since this might be used in an
6641 : : address and not all machines support SUBREGs
6642 : : there. */
6643 : 0 : gcc_assert (GET_CODE (equiv) == SUBREG);
6644 : 0 : regno = subreg_regno (equiv);
6645 : 0 : equiv = gen_rtx_REG (rld[r].mode, regno);
6646 : : /* If we choose EQUIV as the reload register, but the
6647 : : loop below decides to cancel the inheritance, we'll
6648 : : end up reloading EQUIV in rld[r].mode, not the mode
6649 : : it had originally. That isn't safe when EQUIV isn't
6650 : : available as a spill register since its value might
6651 : : still be live at this point. */
6652 : 0 : for (i = regno; i < regno + (int) rld[r].nregs; i++)
6653 : 0 : if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6654 : 0 : equiv = 0;
6655 : : }
6656 : : }
6657 : :
6658 : : /* If we found a spill reg, reject it unless it is free
6659 : : and of the desired class. */
6660 : 0 : if (equiv != 0)
6661 : : {
6662 : 0 : int regs_used = 0;
6663 : 0 : int bad_for_class = 0;
6664 : 0 : int max_regno = regno + rld[r].nregs;
6665 : :
6666 : 0 : for (i = regno; i < max_regno; i++)
6667 : : {
6668 : 0 : regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6669 : : i);
6670 : 0 : bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6671 : : i);
6672 : : }
6673 : :
6674 : 0 : if ((regs_used
6675 : 0 : && ! free_for_value_p (regno, rld[r].mode,
6676 : : rld[r].opnum, rld[r].when_needed,
6677 : : rld[r].in, rld[r].out, r, 1))
6678 : 0 : || bad_for_class)
6679 : : equiv = 0;
6680 : : }
6681 : :
6682 : 0 : if (equiv != 0
6683 : 0 : && !targetm.hard_regno_mode_ok (regno, rld[r].mode))
6684 : : equiv = 0;
6685 : :
6686 : : /* We found a register that contains the value we need.
6687 : : If this register is the same as an `earlyclobber' operand
6688 : : of the current insn, just mark it as a place to reload from
6689 : : since we can't use it as the reload register itself. */
6690 : :
6691 : 0 : if (equiv != 0)
6692 : 0 : for (i = 0; i < n_earlyclobbers; i++)
6693 : 0 : if (reg_overlap_mentioned_for_reload_p (equiv,
6694 : : reload_earlyclobbers[i]))
6695 : : {
6696 : 0 : if (! rld[r].optional)
6697 : 0 : reload_override_in[r] = equiv;
6698 : : equiv = 0;
6699 : : break;
6700 : : }
6701 : :
6702 : : /* If the equiv register we have found is explicitly clobbered
6703 : : in the current insn, it depends on the reload type if we
6704 : : can use it, use it for reload_override_in, or not at all.
6705 : : In particular, we then can't use EQUIV for a
6706 : : RELOAD_FOR_OUTPUT_ADDRESS reload. */
6707 : :
6708 : 0 : if (equiv != 0)
6709 : : {
6710 : 0 : if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6711 : 0 : switch (rld[r].when_needed)
6712 : : {
6713 : : case RELOAD_FOR_OTHER_ADDRESS:
6714 : : case RELOAD_FOR_INPADDR_ADDRESS:
6715 : : case RELOAD_FOR_INPUT_ADDRESS:
6716 : : case RELOAD_FOR_OPADDR_ADDR:
6717 : : break;
6718 : 0 : case RELOAD_OTHER:
6719 : 0 : case RELOAD_FOR_INPUT:
6720 : 0 : case RELOAD_FOR_OPERAND_ADDRESS:
6721 : 0 : if (! rld[r].optional)
6722 : 0 : reload_override_in[r] = equiv;
6723 : : /* Fall through. */
6724 : : default:
6725 : : equiv = 0;
6726 : : break;
6727 : : }
6728 : 0 : else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6729 : 0 : switch (rld[r].when_needed)
6730 : : {
6731 : : case RELOAD_FOR_OTHER_ADDRESS:
6732 : : case RELOAD_FOR_INPADDR_ADDRESS:
6733 : : case RELOAD_FOR_INPUT_ADDRESS:
6734 : : case RELOAD_FOR_OPADDR_ADDR:
6735 : : case RELOAD_FOR_OPERAND_ADDRESS:
6736 : : case RELOAD_FOR_INPUT:
6737 : : break;
6738 : 0 : case RELOAD_OTHER:
6739 : 0 : if (! rld[r].optional)
6740 : 0 : reload_override_in[r] = equiv;
6741 : : /* Fall through. */
6742 : : default:
6743 : : equiv = 0;
6744 : : break;
6745 : : }
6746 : : }
6747 : :
6748 : : /* If we found an equivalent reg, say no code need be generated
6749 : : to load it, and use it as our reload reg. */
6750 : 0 : if (equiv != 0
6751 : 0 : && (regno != HARD_FRAME_POINTER_REGNUM
6752 : 0 : || !frame_pointer_needed))
6753 : : {
6754 : 0 : int nr = hard_regno_nregs (regno, rld[r].mode);
6755 : 0 : int k;
6756 : 0 : rld[r].reg_rtx = equiv;
6757 : 0 : reload_spill_index[r] = regno;
6758 : 0 : reload_inherited[r] = 1;
6759 : :
6760 : : /* If reg_reloaded_valid is not set for this register,
6761 : : there might be a stale spill_reg_store lying around.
6762 : : We must clear it, since otherwise emit_reload_insns
6763 : : might delete the store. */
6764 : 0 : if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6765 : 0 : spill_reg_store[regno] = NULL;
6766 : : /* If any of the hard registers in EQUIV are spill
6767 : : registers, mark them as in use for this insn. */
6768 : 0 : for (k = 0; k < nr; k++)
6769 : : {
6770 : 0 : i = spill_reg_order[regno + k];
6771 : 0 : if (i >= 0)
6772 : : {
6773 : 0 : mark_reload_reg_in_use (regno, rld[r].opnum,
6774 : : rld[r].when_needed,
6775 : : rld[r].mode);
6776 : 0 : SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6777 : : regno + k);
6778 : : }
6779 : : }
6780 : : }
6781 : : }
6782 : :
6783 : : /* If we found a register to use already, or if this is an optional
6784 : : reload, we are done. */
6785 : 0 : if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6786 : 0 : continue;
6787 : :
6788 : : #if 0
6789 : : /* No longer needed for correct operation. Might or might
6790 : : not give better code on the average. Want to experiment? */
6791 : :
6792 : : /* See if there is a later reload that has a class different from our
6793 : : class that intersects our class or that requires less register
6794 : : than our reload. If so, we must allocate a register to this
6795 : : reload now, since that reload might inherit a previous reload
6796 : : and take the only available register in our class. Don't do this
6797 : : for optional reloads since they will force all previous reloads
6798 : : to be allocated. Also don't do this for reloads that have been
6799 : : turned off. */
6800 : :
6801 : : for (i = j + 1; i < n_reloads; i++)
6802 : : {
6803 : : int s = reload_order[i];
6804 : :
6805 : : if ((rld[s].in == 0 && rld[s].out == 0
6806 : : && ! rld[s].secondary_p)
6807 : : || rld[s].optional)
6808 : : continue;
6809 : :
6810 : : if ((rld[s].rclass != rld[r].rclass
6811 : : && reg_classes_intersect_p (rld[r].rclass,
6812 : : rld[s].rclass))
6813 : : || rld[s].nregs < rld[r].nregs)
6814 : : break;
6815 : : }
6816 : :
6817 : : if (i == n_reloads)
6818 : : continue;
6819 : :
6820 : : allocate_reload_reg (chain, r, j == n_reloads - 1);
6821 : : #endif
6822 : : }
6823 : :
6824 : : /* Now allocate reload registers for anything non-optional that
6825 : : didn't get one yet. */
6826 : 0 : for (j = 0; j < n_reloads; j++)
6827 : : {
6828 : 0 : int r = reload_order[j];
6829 : :
6830 : : /* Ignore reloads that got marked inoperative. */
6831 : 0 : if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6832 : 0 : continue;
6833 : :
6834 : : /* Skip reloads that already have a register allocated or are
6835 : : optional. */
6836 : 0 : if (rld[r].reg_rtx != 0 || rld[r].optional)
6837 : 0 : continue;
6838 : :
6839 : 0 : if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6840 : : break;
6841 : : }
6842 : :
6843 : : /* If that loop got all the way, we have won. */
6844 : 0 : if (j == n_reloads)
6845 : : {
6846 : : win = 1;
6847 : : break;
6848 : : }
6849 : :
6850 : : /* Loop around and try without any inheritance. */
6851 : : }
6852 : :
6853 : 0 : if (! win)
6854 : : {
6855 : : /* First undo everything done by the failed attempt
6856 : : to allocate with inheritance. */
6857 : 0 : choose_reload_regs_init (chain, save_reload_reg_rtx);
6858 : :
6859 : : /* Some sanity tests to verify that the reloads found in the first
6860 : : pass are identical to the ones we have now. */
6861 : 0 : gcc_assert (chain->n_reloads == n_reloads);
6862 : :
6863 : 0 : for (i = 0; i < n_reloads; i++)
6864 : : {
6865 : 0 : if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
6866 : 0 : continue;
6867 : 0 : gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
6868 : 0 : for (j = 0; j < n_spills; j++)
6869 : 0 : if (spill_regs[j] == chain->rld[i].regno)
6870 : 0 : if (! set_reload_reg (j, i))
6871 : 0 : failed_reload (chain->insn, i);
6872 : : }
6873 : : }
6874 : :
6875 : : /* If we thought we could inherit a reload, because it seemed that
6876 : : nothing else wanted the same reload register earlier in the insn,
6877 : : verify that assumption, now that all reloads have been assigned.
6878 : : Likewise for reloads where reload_override_in has been set. */
6879 : :
6880 : : /* If doing expensive optimizations, do one preliminary pass that doesn't
6881 : : cancel any inheritance, but removes reloads that have been needed only
6882 : : for reloads that we know can be inherited. */
6883 : 0 : for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6884 : : {
6885 : 0 : for (j = 0; j < n_reloads; j++)
6886 : : {
6887 : 0 : int r = reload_order[j];
6888 : 0 : rtx check_reg;
6889 : 0 : rtx tem;
6890 : 0 : if (reload_inherited[r] && rld[r].reg_rtx)
6891 : : check_reg = rld[r].reg_rtx;
6892 : 0 : else if (reload_override_in[r]
6893 : 0 : && (REG_P (reload_override_in[r])
6894 : 0 : || GET_CODE (reload_override_in[r]) == SUBREG))
6895 : : check_reg = reload_override_in[r];
6896 : : else
6897 : 0 : continue;
6898 : 0 : if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
6899 : : rld[r].opnum, rld[r].when_needed, rld[r].in,
6900 : : (reload_inherited[r]
6901 : : ? rld[r].out : const0_rtx),
6902 : : r, 1))
6903 : : {
6904 : 0 : if (pass)
6905 : 0 : continue;
6906 : 0 : reload_inherited[r] = 0;
6907 : 0 : reload_override_in[r] = 0;
6908 : : }
6909 : : /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6910 : : reload_override_in, then we do not need its related
6911 : : RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6912 : : likewise for other reload types.
6913 : : We handle this by removing a reload when its only replacement
6914 : : is mentioned in reload_in of the reload we are going to inherit.
6915 : : A special case are auto_inc expressions; even if the input is
6916 : : inherited, we still need the address for the output. We can
6917 : : recognize them because they have RELOAD_OUT set to RELOAD_IN.
6918 : : If we succeeded removing some reload and we are doing a preliminary
6919 : : pass just to remove such reloads, make another pass, since the
6920 : : removal of one reload might allow us to inherit another one. */
6921 : 0 : else if (rld[r].in
6922 : 0 : && rld[r].out != rld[r].in
6923 : 0 : && remove_address_replacements (rld[r].in))
6924 : : {
6925 : 0 : if (pass)
6926 : 0 : pass = 2;
6927 : : }
6928 : : /* If we needed a memory location for the reload, we also have to
6929 : : remove its related reloads. */
6930 : 0 : else if (rld[r].in
6931 : 0 : && rld[r].out != rld[r].in
6932 : 0 : && (tem = replaced_subreg (rld[r].in), REG_P (tem))
6933 : 0 : && REGNO (tem) < FIRST_PSEUDO_REGISTER
6934 : 0 : && (targetm.secondary_memory_needed
6935 : 0 : (rld[r].inmode, REGNO_REG_CLASS (REGNO (tem)),
6936 : 0 : rld[r].rclass))
6937 : 0 : && remove_address_replacements
6938 : 0 : (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
6939 : : rld[r].when_needed)))
6940 : : {
6941 : 0 : if (pass)
6942 : 0 : pass = 2;
6943 : : }
6944 : : }
6945 : : }
6946 : :
6947 : : /* Now that reload_override_in is known valid,
6948 : : actually override reload_in. */
6949 : 0 : for (j = 0; j < n_reloads; j++)
6950 : 0 : if (reload_override_in[j])
6951 : 0 : rld[j].in = reload_override_in[j];
6952 : :
6953 : : /* If this reload won't be done because it has been canceled or is
6954 : : optional and not inherited, clear reload_reg_rtx so other
6955 : : routines (such as subst_reloads) don't get confused. */
6956 : 0 : for (j = 0; j < n_reloads; j++)
6957 : 0 : if (rld[j].reg_rtx != 0
6958 : 0 : && ((rld[j].optional && ! reload_inherited[j])
6959 : 0 : || (rld[j].in == 0 && rld[j].out == 0
6960 : 0 : && ! rld[j].secondary_p)))
6961 : : {
6962 : 0 : int regno = true_regnum (rld[j].reg_rtx);
6963 : :
6964 : 0 : if (spill_reg_order[regno] >= 0)
6965 : 0 : clear_reload_reg_in_use (regno, rld[j].opnum,
6966 : : rld[j].when_needed, rld[j].mode);
6967 : 0 : rld[j].reg_rtx = 0;
6968 : 0 : reload_spill_index[j] = -1;
6969 : : }
6970 : :
6971 : : /* Record which pseudos and which spill regs have output reloads. */
6972 : 0 : for (j = 0; j < n_reloads; j++)
6973 : : {
6974 : 0 : int r = reload_order[j];
6975 : :
6976 : 0 : i = reload_spill_index[r];
6977 : :
6978 : : /* I is nonneg if this reload uses a register.
6979 : : If rld[r].reg_rtx is 0, this is an optional reload
6980 : : that we opted to ignore. */
6981 : 0 : if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
6982 : 0 : && rld[r].reg_rtx != 0)
6983 : : {
6984 : 0 : int nregno = REGNO (rld[r].out_reg);
6985 : 0 : int nr = 1;
6986 : :
6987 : 0 : if (nregno < FIRST_PSEUDO_REGISTER)
6988 : 0 : nr = hard_regno_nregs (nregno, rld[r].mode);
6989 : :
6990 : 0 : while (--nr >= 0)
6991 : 0 : SET_REGNO_REG_SET (®_has_output_reload,
6992 : : nregno + nr);
6993 : :
6994 : 0 : if (i >= 0)
6995 : 0 : add_to_hard_reg_set (®_is_output_reload, rld[r].mode, i);
6996 : :
6997 : 0 : gcc_assert (rld[r].when_needed == RELOAD_OTHER
6998 : : || rld[r].when_needed == RELOAD_FOR_OUTPUT
6999 : : || rld[r].when_needed == RELOAD_FOR_INSN);
7000 : : }
7001 : : }
7002 : 0 : }
7003 : :
7004 : : /* Deallocate the reload register for reload R. This is called from
7005 : : remove_address_replacements. */
7006 : :
7007 : : void
7008 : 0 : deallocate_reload_reg (int r)
7009 : : {
7010 : 0 : int regno;
7011 : :
7012 : 0 : if (! rld[r].reg_rtx)
7013 : : return;
7014 : 0 : regno = true_regnum (rld[r].reg_rtx);
7015 : 0 : rld[r].reg_rtx = 0;
7016 : 0 : if (spill_reg_order[regno] >= 0)
7017 : 0 : clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7018 : : rld[r].mode);
7019 : 0 : reload_spill_index[r] = -1;
7020 : : }
7021 : :
7022 : : /* These arrays are filled by emit_reload_insns and its subroutines. */
7023 : : static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7024 : : static rtx_insn *other_input_address_reload_insns = 0;
7025 : : static rtx_insn *other_input_reload_insns = 0;
7026 : : static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7027 : : static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7028 : : static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7029 : : static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7030 : : static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7031 : : static rtx_insn *operand_reload_insns = 0;
7032 : : static rtx_insn *other_operand_reload_insns = 0;
7033 : : static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7034 : :
7035 : : /* Values to be put in spill_reg_store are put here first. Instructions
7036 : : must only be placed here if the associated reload register reaches
7037 : : the end of the instruction's reload sequence. */
7038 : : static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7039 : : static HARD_REG_SET reg_reloaded_died;
7040 : :
7041 : : /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7042 : : of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7043 : : is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7044 : : adjusted register, and return true. Otherwise, return false. */
7045 : : static bool
7046 : 0 : reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7047 : : enum reg_class new_class,
7048 : : machine_mode new_mode)
7049 : :
7050 : : {
7051 : 0 : rtx reg;
7052 : :
7053 : 0 : for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7054 : : {
7055 : 0 : unsigned regno = REGNO (reg);
7056 : :
7057 : 0 : if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7058 : 0 : continue;
7059 : 0 : if (GET_MODE (reg) != new_mode)
7060 : : {
7061 : 0 : if (!targetm.hard_regno_mode_ok (regno, new_mode))
7062 : 0 : continue;
7063 : 0 : if (hard_regno_nregs (regno, new_mode) > REG_NREGS (reg))
7064 : 0 : continue;
7065 : 0 : reg = reload_adjust_reg_for_mode (reg, new_mode);
7066 : : }
7067 : 0 : *reload_reg = reg;
7068 : 0 : return true;
7069 : : }
7070 : : return false;
7071 : : }
7072 : :
7073 : : /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7074 : : pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7075 : : nonzero, if that is suitable. On success, change *RELOAD_REG to the
7076 : : adjusted register, and return true. Otherwise, return false. */
7077 : : static bool
7078 : 0 : reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7079 : : enum insn_code icode)
7080 : :
7081 : : {
7082 : 0 : enum reg_class new_class = scratch_reload_class (icode);
7083 : 0 : machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7084 : :
7085 : 0 : return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7086 : 0 : new_class, new_mode);
7087 : : }
7088 : :
7089 : : /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7090 : : has the number J. OLD contains the value to be used as input. */
7091 : :
7092 : : static void
7093 : 0 : emit_input_reload_insns (class insn_chain *chain, struct reload *rl,
7094 : : rtx old, int j)
7095 : : {
7096 : 0 : rtx_insn *insn = chain->insn;
7097 : 0 : rtx reloadreg;
7098 : 0 : rtx oldequiv_reg = 0;
7099 : 0 : rtx oldequiv = 0;
7100 : 0 : int special = 0;
7101 : 0 : machine_mode mode;
7102 : 0 : rtx_insn **where;
7103 : :
7104 : : /* delete_output_reload is only invoked properly if old contains
7105 : : the original pseudo register. Since this is replaced with a
7106 : : hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7107 : : find the pseudo in RELOAD_IN_REG. This is also used to
7108 : : determine whether a secondary reload is needed. */
7109 : 0 : if (reload_override_in[j]
7110 : 0 : && (REG_P (rl->in_reg)
7111 : 0 : || (GET_CODE (rl->in_reg) == SUBREG
7112 : 0 : && REG_P (SUBREG_REG (rl->in_reg)))))
7113 : : {
7114 : 0 : oldequiv = old;
7115 : 0 : old = rl->in_reg;
7116 : : }
7117 : 0 : if (oldequiv == 0)
7118 : : oldequiv = old;
7119 : 0 : else if (REG_P (oldequiv))
7120 : : oldequiv_reg = oldequiv;
7121 : 0 : else if (GET_CODE (oldequiv) == SUBREG)
7122 : 0 : oldequiv_reg = SUBREG_REG (oldequiv);
7123 : :
7124 : 0 : reloadreg = reload_reg_rtx_for_input[j];
7125 : 0 : mode = GET_MODE (reloadreg);
7126 : :
7127 : : /* If we are reloading from a register that was recently stored in
7128 : : with an output-reload, see if we can prove there was
7129 : : actually no need to store the old value in it. */
7130 : :
7131 : 0 : if (optimize && REG_P (oldequiv)
7132 : 0 : && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7133 : 0 : && spill_reg_store[REGNO (oldequiv)]
7134 : 0 : && REG_P (old)
7135 : 0 : && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7136 : 0 : || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7137 : 0 : rl->out_reg)))
7138 : 0 : delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7139 : :
7140 : : /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7141 : : OLDEQUIV. */
7142 : :
7143 : 0 : while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7144 : 0 : oldequiv = SUBREG_REG (oldequiv);
7145 : 0 : if (GET_MODE (oldequiv) != VOIDmode
7146 : 0 : && mode != GET_MODE (oldequiv))
7147 : 0 : oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7148 : :
7149 : : /* Switch to the right place to emit the reload insns. */
7150 : 0 : switch (rl->when_needed)
7151 : : {
7152 : : case RELOAD_OTHER:
7153 : 0 : where = &other_input_reload_insns;
7154 : : break;
7155 : 0 : case RELOAD_FOR_INPUT:
7156 : 0 : where = &input_reload_insns[rl->opnum];
7157 : 0 : break;
7158 : 0 : case RELOAD_FOR_INPUT_ADDRESS:
7159 : 0 : where = &input_address_reload_insns[rl->opnum];
7160 : 0 : break;
7161 : 0 : case RELOAD_FOR_INPADDR_ADDRESS:
7162 : 0 : where = &inpaddr_address_reload_insns[rl->opnum];
7163 : 0 : break;
7164 : 0 : case RELOAD_FOR_OUTPUT_ADDRESS:
7165 : 0 : where = &output_address_reload_insns[rl->opnum];
7166 : 0 : break;
7167 : 0 : case RELOAD_FOR_OUTADDR_ADDRESS:
7168 : 0 : where = &outaddr_address_reload_insns[rl->opnum];
7169 : 0 : break;
7170 : 0 : case RELOAD_FOR_OPERAND_ADDRESS:
7171 : 0 : where = &operand_reload_insns;
7172 : 0 : break;
7173 : 0 : case RELOAD_FOR_OPADDR_ADDR:
7174 : 0 : where = &other_operand_reload_insns;
7175 : 0 : break;
7176 : 0 : case RELOAD_FOR_OTHER_ADDRESS:
7177 : 0 : where = &other_input_address_reload_insns;
7178 : 0 : break;
7179 : 0 : default:
7180 : 0 : gcc_unreachable ();
7181 : : }
7182 : :
7183 : 0 : push_to_sequence (*where);
7184 : :
7185 : : /* Auto-increment addresses must be reloaded in a special way. */
7186 : 0 : if (rl->out && ! rl->out_reg)
7187 : : {
7188 : : /* We are not going to bother supporting the case where a
7189 : : incremented register can't be copied directly from
7190 : : OLDEQUIV since this seems highly unlikely. */
7191 : 0 : gcc_assert (rl->secondary_in_reload < 0);
7192 : :
7193 : 0 : if (reload_inherited[j])
7194 : 0 : oldequiv = reloadreg;
7195 : :
7196 : 0 : old = XEXP (rl->in_reg, 0);
7197 : :
7198 : : /* Prevent normal processing of this reload. */
7199 : 0 : special = 1;
7200 : : /* Output a special code sequence for this case. */
7201 : 0 : inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7202 : : }
7203 : :
7204 : : /* If we are reloading a pseudo-register that was set by the previous
7205 : : insn, see if we can get rid of that pseudo-register entirely
7206 : : by redirecting the previous insn into our reload register. */
7207 : :
7208 : 0 : else if (optimize && REG_P (old)
7209 : 0 : && REGNO (old) >= FIRST_PSEUDO_REGISTER
7210 : 0 : && dead_or_set_p (insn, old)
7211 : : /* This is unsafe if some other reload
7212 : : uses the same reg first. */
7213 : 0 : && ! conflicts_with_override (reloadreg)
7214 : 0 : && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7215 : : rl->when_needed, old, rl->out, j, 0))
7216 : : {
7217 : 0 : rtx_insn *temp = PREV_INSN (insn);
7218 : 0 : while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7219 : 0 : temp = PREV_INSN (temp);
7220 : 0 : if (temp
7221 : 0 : && NONJUMP_INSN_P (temp)
7222 : 0 : && GET_CODE (PATTERN (temp)) == SET
7223 : 0 : && SET_DEST (PATTERN (temp)) == old
7224 : : /* Make sure we can access insn_operand_constraint. */
7225 : 0 : && asm_noperands (PATTERN (temp)) < 0
7226 : : /* This is unsafe if operand occurs more than once in current
7227 : : insn. Perhaps some occurrences aren't reloaded. */
7228 : 0 : && count_occurrences (PATTERN (insn), old, 0) == 1)
7229 : : {
7230 : 0 : rtx old = SET_DEST (PATTERN (temp));
7231 : : /* Store into the reload register instead of the pseudo. */
7232 : 0 : SET_DEST (PATTERN (temp)) = reloadreg;
7233 : :
7234 : : /* Verify that resulting insn is valid.
7235 : :
7236 : : Note that we have replaced the destination of TEMP with
7237 : : RELOADREG. If TEMP references RELOADREG within an
7238 : : autoincrement addressing mode, then the resulting insn
7239 : : is ill-formed and we must reject this optimization. */
7240 : 0 : extract_insn (temp);
7241 : 0 : if (constrain_operands (1, get_enabled_alternatives (temp))
7242 : 0 : && (!AUTO_INC_DEC || ! find_reg_note (temp, REG_INC, reloadreg)))
7243 : : {
7244 : : /* If the previous insn is an output reload, the source is
7245 : : a reload register, and its spill_reg_store entry will
7246 : : contain the previous destination. This is now
7247 : : invalid. */
7248 : 0 : if (REG_P (SET_SRC (PATTERN (temp)))
7249 : 0 : && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7250 : : {
7251 : 0 : spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7252 : 0 : spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7253 : : }
7254 : :
7255 : : /* If these are the only uses of the pseudo reg,
7256 : : pretend for GDB it lives in the reload reg we used. */
7257 : 0 : if (REG_N_DEATHS (REGNO (old)) == 1
7258 : 0 : && REG_N_SETS (REGNO (old)) == 1)
7259 : : {
7260 : 0 : reg_renumber[REGNO (old)] = REGNO (reloadreg);
7261 : 0 : if (ira_conflicts_p)
7262 : : /* Inform IRA about the change. */
7263 : 0 : ira_mark_allocation_change (REGNO (old));
7264 : 0 : alter_reg (REGNO (old), -1, false);
7265 : : }
7266 : 0 : special = 1;
7267 : :
7268 : : /* Adjust any debug insns between temp and insn. */
7269 : 0 : while ((temp = NEXT_INSN (temp)) != insn)
7270 : 0 : if (DEBUG_BIND_INSN_P (temp))
7271 : 0 : INSN_VAR_LOCATION_LOC (temp)
7272 : 0 : = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (temp),
7273 : : old, reloadreg);
7274 : : else
7275 : 0 : gcc_assert (DEBUG_INSN_P (temp) || NOTE_P (temp));
7276 : : }
7277 : : else
7278 : : {
7279 : 0 : SET_DEST (PATTERN (temp)) = old;
7280 : : }
7281 : : }
7282 : : }
7283 : :
7284 : : /* We can't do that, so output an insn to load RELOADREG. */
7285 : :
7286 : : /* If we have a secondary reload, pick up the secondary register
7287 : : and icode, if any. If OLDEQUIV and OLD are different or
7288 : : if this is an in-out reload, recompute whether or not we
7289 : : still need a secondary register and what the icode should
7290 : : be. If we still need a secondary register and the class or
7291 : : icode is different, go back to reloading from OLD if using
7292 : : OLDEQUIV means that we got the wrong type of register. We
7293 : : cannot have different class or icode due to an in-out reload
7294 : : because we don't make such reloads when both the input and
7295 : : output need secondary reload registers. */
7296 : :
7297 : 0 : if (! special && rl->secondary_in_reload >= 0)
7298 : : {
7299 : 0 : rtx second_reload_reg = 0;
7300 : 0 : rtx third_reload_reg = 0;
7301 : 0 : int secondary_reload = rl->secondary_in_reload;
7302 : 0 : rtx real_oldequiv = oldequiv;
7303 : 0 : rtx real_old = old;
7304 : 0 : rtx tmp;
7305 : 0 : enum insn_code icode;
7306 : 0 : enum insn_code tertiary_icode = CODE_FOR_nothing;
7307 : :
7308 : : /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7309 : : and similarly for OLD.
7310 : : See comments in get_secondary_reload in reload.cc. */
7311 : : /* If it is a pseudo that cannot be replaced with its
7312 : : equivalent MEM, we must fall back to reload_in, which
7313 : : will have all the necessary substitutions registered.
7314 : : Likewise for a pseudo that can't be replaced with its
7315 : : equivalent constant.
7316 : :
7317 : : Take extra care for subregs of such pseudos. Note that
7318 : : we cannot use reg_equiv_mem in this case because it is
7319 : : not in the right mode. */
7320 : :
7321 : 0 : tmp = oldequiv;
7322 : 0 : if (GET_CODE (tmp) == SUBREG)
7323 : 0 : tmp = SUBREG_REG (tmp);
7324 : 0 : if (REG_P (tmp)
7325 : 0 : && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7326 : 0 : && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7327 : 0 : || reg_equiv_constant (REGNO (tmp)) != 0))
7328 : : {
7329 : 0 : if (! reg_equiv_mem (REGNO (tmp))
7330 : 0 : || num_not_at_initial_offset
7331 : 0 : || GET_CODE (oldequiv) == SUBREG)
7332 : 0 : real_oldequiv = rl->in;
7333 : : else
7334 : : real_oldequiv = reg_equiv_mem (REGNO (tmp));
7335 : : }
7336 : :
7337 : 0 : tmp = old;
7338 : 0 : if (GET_CODE (tmp) == SUBREG)
7339 : 0 : tmp = SUBREG_REG (tmp);
7340 : 0 : if (REG_P (tmp)
7341 : 0 : && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7342 : 0 : && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7343 : 0 : || reg_equiv_constant (REGNO (tmp)) != 0))
7344 : : {
7345 : 0 : if (! reg_equiv_mem (REGNO (tmp))
7346 : 0 : || num_not_at_initial_offset
7347 : 0 : || GET_CODE (old) == SUBREG)
7348 : 0 : real_old = rl->in;
7349 : : else
7350 : : real_old = reg_equiv_mem (REGNO (tmp));
7351 : : }
7352 : :
7353 : 0 : second_reload_reg = rld[secondary_reload].reg_rtx;
7354 : 0 : if (rld[secondary_reload].secondary_in_reload >= 0)
7355 : : {
7356 : 0 : int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7357 : :
7358 : 0 : third_reload_reg = rld[tertiary_reload].reg_rtx;
7359 : 0 : tertiary_icode = rld[secondary_reload].secondary_in_icode;
7360 : : /* We'd have to add more code for quartary reloads. */
7361 : 0 : gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7362 : : }
7363 : 0 : icode = rl->secondary_in_icode;
7364 : :
7365 : 0 : if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7366 : 0 : || (rl->in != 0 && rl->out != 0))
7367 : : {
7368 : 0 : secondary_reload_info sri, sri2;
7369 : 0 : enum reg_class new_class, new_t_class;
7370 : :
7371 : 0 : sri.icode = CODE_FOR_nothing;
7372 : 0 : sri.prev_sri = NULL;
7373 : 0 : new_class
7374 : 0 : = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7375 : 0 : rl->rclass, mode,
7376 : : &sri);
7377 : :
7378 : 0 : if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7379 : 0 : second_reload_reg = 0;
7380 : 0 : else if (new_class == NO_REGS)
7381 : : {
7382 : 0 : if (reload_adjust_reg_for_icode (&second_reload_reg,
7383 : : third_reload_reg,
7384 : : (enum insn_code) sri.icode))
7385 : : {
7386 : 0 : icode = (enum insn_code) sri.icode;
7387 : 0 : third_reload_reg = 0;
7388 : : }
7389 : : else
7390 : : {
7391 : : oldequiv = old;
7392 : : real_oldequiv = real_old;
7393 : : }
7394 : : }
7395 : 0 : else if (sri.icode != CODE_FOR_nothing)
7396 : : /* We currently lack a way to express this in reloads. */
7397 : 0 : gcc_unreachable ();
7398 : : else
7399 : : {
7400 : 0 : sri2.icode = CODE_FOR_nothing;
7401 : 0 : sri2.prev_sri = &sri;
7402 : 0 : new_t_class
7403 : 0 : = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7404 : : new_class, mode,
7405 : : &sri);
7406 : 0 : if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7407 : : {
7408 : 0 : if (reload_adjust_reg_for_temp (&second_reload_reg,
7409 : : third_reload_reg,
7410 : : new_class, mode))
7411 : : {
7412 : 0 : third_reload_reg = 0;
7413 : 0 : tertiary_icode = (enum insn_code) sri2.icode;
7414 : : }
7415 : : else
7416 : : {
7417 : : oldequiv = old;
7418 : : real_oldequiv = real_old;
7419 : : }
7420 : : }
7421 : : else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7422 : : {
7423 : : rtx intermediate = second_reload_reg;
7424 : :
7425 : : if (reload_adjust_reg_for_temp (&intermediate, NULL,
7426 : : new_class, mode)
7427 : : && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7428 : : ((enum insn_code)
7429 : : sri2.icode)))
7430 : : {
7431 : : second_reload_reg = intermediate;
7432 : : tertiary_icode = (enum insn_code) sri2.icode;
7433 : : }
7434 : : else
7435 : : {
7436 : : oldequiv = old;
7437 : : real_oldequiv = real_old;
7438 : : }
7439 : : }
7440 : : else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7441 : : {
7442 : 0 : rtx intermediate = second_reload_reg;
7443 : :
7444 : 0 : if (reload_adjust_reg_for_temp (&intermediate, NULL,
7445 : : new_class, mode)
7446 : 0 : && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7447 : : new_t_class, mode))
7448 : : {
7449 : 0 : second_reload_reg = intermediate;
7450 : 0 : tertiary_icode = (enum insn_code) sri2.icode;
7451 : : }
7452 : : else
7453 : : {
7454 : : oldequiv = old;
7455 : : real_oldequiv = real_old;
7456 : : }
7457 : 0 : }
7458 : : else
7459 : : {
7460 : : /* This could be handled more intelligently too. */
7461 : : oldequiv = old;
7462 : : real_oldequiv = real_old;
7463 : : }
7464 : : }
7465 : : }
7466 : :
7467 : : /* If we still need a secondary reload register, check
7468 : : to see if it is being used as a scratch or intermediate
7469 : : register and generate code appropriately. If we need
7470 : : a scratch register, use REAL_OLDEQUIV since the form of
7471 : : the insn may depend on the actual address if it is
7472 : : a MEM. */
7473 : :
7474 : 0 : if (second_reload_reg)
7475 : : {
7476 : 0 : if (icode != CODE_FOR_nothing)
7477 : : {
7478 : : /* We'd have to add extra code to handle this case. */
7479 : 0 : gcc_assert (!third_reload_reg);
7480 : :
7481 : 0 : emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7482 : : second_reload_reg));
7483 : 0 : special = 1;
7484 : : }
7485 : : else
7486 : : {
7487 : : /* See if we need a scratch register to load the
7488 : : intermediate register (a tertiary reload). */
7489 : 0 : if (tertiary_icode != CODE_FOR_nothing)
7490 : : {
7491 : 0 : emit_insn ((GEN_FCN (tertiary_icode)
7492 : 0 : (second_reload_reg, real_oldequiv,
7493 : : third_reload_reg)));
7494 : : }
7495 : 0 : else if (third_reload_reg)
7496 : : {
7497 : 0 : gen_reload (third_reload_reg, real_oldequiv,
7498 : : rl->opnum,
7499 : : rl->when_needed);
7500 : 0 : gen_reload (second_reload_reg, third_reload_reg,
7501 : : rl->opnum,
7502 : : rl->when_needed);
7503 : : }
7504 : : else
7505 : 0 : gen_reload (second_reload_reg, real_oldequiv,
7506 : : rl->opnum,
7507 : : rl->when_needed);
7508 : :
7509 : : oldequiv = second_reload_reg;
7510 : : }
7511 : : }
7512 : : }
7513 : :
7514 : 0 : if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7515 : : {
7516 : 0 : rtx real_oldequiv = oldequiv;
7517 : :
7518 : 0 : if ((REG_P (oldequiv)
7519 : 0 : && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7520 : 0 : && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7521 : 0 : || reg_equiv_constant (REGNO (oldequiv)) != 0))
7522 : 0 : || (GET_CODE (oldequiv) == SUBREG
7523 : 0 : && REG_P (SUBREG_REG (oldequiv))
7524 : 0 : && (REGNO (SUBREG_REG (oldequiv))
7525 : : >= FIRST_PSEUDO_REGISTER)
7526 : 0 : && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7527 : 0 : || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7528 : 0 : || (CONSTANT_P (oldequiv)
7529 : 0 : && (targetm.preferred_reload_class (oldequiv,
7530 : 0 : REGNO_REG_CLASS (REGNO (reloadreg)))
7531 : : == NO_REGS)))
7532 : 0 : real_oldequiv = rl->in;
7533 : 0 : gen_reload (reloadreg, real_oldequiv, rl->opnum,
7534 : : rl->when_needed);
7535 : : }
7536 : :
7537 : 0 : if (cfun->can_throw_non_call_exceptions)
7538 : 0 : copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7539 : :
7540 : : /* End this sequence. */
7541 : 0 : *where = get_insns ();
7542 : 0 : end_sequence ();
7543 : :
7544 : : /* Update reload_override_in so that delete_address_reloads_1
7545 : : can see the actual register usage. */
7546 : 0 : if (oldequiv_reg)
7547 : 0 : reload_override_in[j] = oldequiv;
7548 : 0 : }
7549 : :
7550 : : /* Generate insns to for the output reload RL, which is for the insn described
7551 : : by CHAIN and has the number J. */
7552 : : static void
7553 : 0 : emit_output_reload_insns (class insn_chain *chain, struct reload *rl,
7554 : : int j)
7555 : : {
7556 : 0 : rtx reloadreg;
7557 : 0 : rtx_insn *insn = chain->insn;
7558 : 0 : int special = 0;
7559 : 0 : rtx old = rl->out;
7560 : 0 : machine_mode mode;
7561 : 0 : rtx_insn *p;
7562 : 0 : rtx rl_reg_rtx;
7563 : :
7564 : 0 : if (rl->when_needed == RELOAD_OTHER)
7565 : 0 : start_sequence ();
7566 : : else
7567 : 0 : push_to_sequence (output_reload_insns[rl->opnum]);
7568 : :
7569 : 0 : rl_reg_rtx = reload_reg_rtx_for_output[j];
7570 : 0 : mode = GET_MODE (rl_reg_rtx);
7571 : :
7572 : 0 : reloadreg = rl_reg_rtx;
7573 : :
7574 : : /* If we need two reload regs, set RELOADREG to the intermediate
7575 : : one, since it will be stored into OLD. We might need a secondary
7576 : : register only for an input reload, so check again here. */
7577 : :
7578 : 0 : if (rl->secondary_out_reload >= 0)
7579 : : {
7580 : 0 : rtx real_old = old;
7581 : 0 : int secondary_reload = rl->secondary_out_reload;
7582 : 0 : int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7583 : :
7584 : 0 : if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7585 : 0 : && reg_equiv_mem (REGNO (old)) != 0)
7586 : 0 : real_old = reg_equiv_mem (REGNO (old));
7587 : :
7588 : 0 : if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7589 : : {
7590 : 0 : rtx second_reloadreg = reloadreg;
7591 : 0 : reloadreg = rld[secondary_reload].reg_rtx;
7592 : :
7593 : : /* See if RELOADREG is to be used as a scratch register
7594 : : or as an intermediate register. */
7595 : 0 : if (rl->secondary_out_icode != CODE_FOR_nothing)
7596 : : {
7597 : : /* We'd have to add extra code to handle this case. */
7598 : 0 : gcc_assert (tertiary_reload < 0);
7599 : :
7600 : 0 : emit_insn ((GEN_FCN (rl->secondary_out_icode)
7601 : 0 : (real_old, second_reloadreg, reloadreg)));
7602 : 0 : special = 1;
7603 : : }
7604 : : else
7605 : : {
7606 : : /* See if we need both a scratch and intermediate reload
7607 : : register. */
7608 : :
7609 : 0 : enum insn_code tertiary_icode
7610 : : = rld[secondary_reload].secondary_out_icode;
7611 : :
7612 : : /* We'd have to add more code for quartary reloads. */
7613 : 0 : gcc_assert (tertiary_reload < 0
7614 : : || rld[tertiary_reload].secondary_out_reload < 0);
7615 : :
7616 : 0 : if (GET_MODE (reloadreg) != mode)
7617 : 0 : reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7618 : :
7619 : 0 : if (tertiary_icode != CODE_FOR_nothing)
7620 : : {
7621 : 0 : rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7622 : :
7623 : : /* Copy primary reload reg to secondary reload reg.
7624 : : (Note that these have been swapped above, then
7625 : : secondary reload reg to OLD using our insn.) */
7626 : :
7627 : : /* If REAL_OLD is a paradoxical SUBREG, remove it
7628 : : and try to put the opposite SUBREG on
7629 : : RELOADREG. */
7630 : 0 : strip_paradoxical_subreg (&real_old, &reloadreg);
7631 : :
7632 : 0 : gen_reload (reloadreg, second_reloadreg,
7633 : : rl->opnum, rl->when_needed);
7634 : 0 : emit_insn ((GEN_FCN (tertiary_icode)
7635 : 0 : (real_old, reloadreg, third_reloadreg)));
7636 : 0 : special = 1;
7637 : : }
7638 : :
7639 : : else
7640 : : {
7641 : : /* Copy between the reload regs here and then to
7642 : : OUT later. */
7643 : :
7644 : 0 : gen_reload (reloadreg, second_reloadreg,
7645 : : rl->opnum, rl->when_needed);
7646 : 0 : if (tertiary_reload >= 0)
7647 : : {
7648 : 0 : rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7649 : :
7650 : 0 : gen_reload (third_reloadreg, reloadreg,
7651 : : rl->opnum, rl->when_needed);
7652 : 0 : reloadreg = third_reloadreg;
7653 : : }
7654 : : }
7655 : : }
7656 : : }
7657 : : }
7658 : :
7659 : : /* Output the last reload insn. */
7660 : 0 : if (! special)
7661 : : {
7662 : 0 : rtx set;
7663 : :
7664 : : /* Don't output the last reload if OLD is not the dest of
7665 : : INSN and is in the src and is clobbered by INSN. */
7666 : 0 : if (! flag_expensive_optimizations
7667 : 0 : || !REG_P (old)
7668 : 0 : || !(set = single_set (insn))
7669 : 0 : || rtx_equal_p (old, SET_DEST (set))
7670 : 0 : || !reg_mentioned_p (old, SET_SRC (set))
7671 : 0 : || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7672 : 0 : && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7673 : 0 : gen_reload (old, reloadreg, rl->opnum,
7674 : : rl->when_needed);
7675 : : }
7676 : :
7677 : : /* Look at all insns we emitted, just to be safe. */
7678 : 0 : for (p = get_insns (); p; p = NEXT_INSN (p))
7679 : 0 : if (INSN_P (p))
7680 : : {
7681 : 0 : rtx pat = PATTERN (p);
7682 : :
7683 : : /* If this output reload doesn't come from a spill reg,
7684 : : clear any memory of reloaded copies of the pseudo reg.
7685 : : If this output reload comes from a spill reg,
7686 : : reg_has_output_reload will make this do nothing. */
7687 : 0 : note_stores (p, forget_old_reloads_1, NULL);
7688 : :
7689 : 0 : if (reg_mentioned_p (rl_reg_rtx, pat))
7690 : : {
7691 : 0 : rtx set = single_set (insn);
7692 : 0 : if (reload_spill_index[j] < 0
7693 : 0 : && set
7694 : 0 : && SET_SRC (set) == rl_reg_rtx)
7695 : : {
7696 : 0 : int src = REGNO (SET_SRC (set));
7697 : :
7698 : 0 : reload_spill_index[j] = src;
7699 : 0 : SET_HARD_REG_BIT (reg_is_output_reload, src);
7700 : 0 : if (find_regno_note (insn, REG_DEAD, src))
7701 : 0 : SET_HARD_REG_BIT (reg_reloaded_died, src);
7702 : : }
7703 : 0 : if (HARD_REGISTER_P (rl_reg_rtx))
7704 : : {
7705 : 0 : int s = rl->secondary_out_reload;
7706 : 0 : set = single_set (p);
7707 : : /* If this reload copies only to the secondary reload
7708 : : register, the secondary reload does the actual
7709 : : store. */
7710 : 0 : if (s >= 0 && set == NULL_RTX)
7711 : : /* We can't tell what function the secondary reload
7712 : : has and where the actual store to the pseudo is
7713 : : made; leave new_spill_reg_store alone. */
7714 : : ;
7715 : 0 : else if (s >= 0
7716 : 0 : && SET_SRC (set) == rl_reg_rtx
7717 : 0 : && SET_DEST (set) == rld[s].reg_rtx)
7718 : : {
7719 : : /* Usually the next instruction will be the
7720 : : secondary reload insn; if we can confirm
7721 : : that it is, setting new_spill_reg_store to
7722 : : that insn will allow an extra optimization. */
7723 : 0 : rtx s_reg = rld[s].reg_rtx;
7724 : 0 : rtx_insn *next = NEXT_INSN (p);
7725 : 0 : rld[s].out = rl->out;
7726 : 0 : rld[s].out_reg = rl->out_reg;
7727 : 0 : set = single_set (next);
7728 : 0 : if (set && SET_SRC (set) == s_reg
7729 : 0 : && reload_reg_rtx_reaches_end_p (s_reg, s))
7730 : : {
7731 : 0 : SET_HARD_REG_BIT (reg_is_output_reload,
7732 : : REGNO (s_reg));
7733 : 0 : new_spill_reg_store[REGNO (s_reg)] = next;
7734 : : }
7735 : : }
7736 : 0 : else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7737 : 0 : new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7738 : : }
7739 : : }
7740 : : }
7741 : :
7742 : 0 : if (rl->when_needed == RELOAD_OTHER)
7743 : : {
7744 : 0 : emit_insn (other_output_reload_insns[rl->opnum]);
7745 : 0 : other_output_reload_insns[rl->opnum] = get_insns ();
7746 : : }
7747 : : else
7748 : 0 : output_reload_insns[rl->opnum] = get_insns ();
7749 : :
7750 : 0 : if (cfun->can_throw_non_call_exceptions)
7751 : 0 : copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7752 : :
7753 : 0 : end_sequence ();
7754 : 0 : }
7755 : :
7756 : : /* Do input reloading for reload RL, which is for the insn described by CHAIN
7757 : : and has the number J. */
7758 : : static void
7759 : 0 : do_input_reload (class insn_chain *chain, struct reload *rl, int j)
7760 : : {
7761 : 0 : rtx_insn *insn = chain->insn;
7762 : 0 : rtx old = (rl->in && MEM_P (rl->in)
7763 : 0 : ? rl->in_reg : rl->in);
7764 : 0 : rtx reg_rtx = rl->reg_rtx;
7765 : :
7766 : 0 : if (old && reg_rtx)
7767 : : {
7768 : 0 : machine_mode mode;
7769 : :
7770 : : /* Determine the mode to reload in.
7771 : : This is very tricky because we have three to choose from.
7772 : : There is the mode the insn operand wants (rl->inmode).
7773 : : There is the mode of the reload register RELOADREG.
7774 : : There is the intrinsic mode of the operand, which we could find
7775 : : by stripping some SUBREGs.
7776 : : It turns out that RELOADREG's mode is irrelevant:
7777 : : we can change that arbitrarily.
7778 : :
7779 : : Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7780 : : then the reload reg may not support QImode moves, so use SImode.
7781 : : If foo is in memory due to spilling a pseudo reg, this is safe,
7782 : : because the QImode value is in the least significant part of a
7783 : : slot big enough for a SImode. If foo is some other sort of
7784 : : memory reference, then it is impossible to reload this case,
7785 : : so previous passes had better make sure this never happens.
7786 : :
7787 : : Then consider a one-word union which has SImode and one of its
7788 : : members is a float, being fetched as (SUBREG:SF union:SI).
7789 : : We must fetch that as SFmode because we could be loading into
7790 : : a float-only register. In this case OLD's mode is correct.
7791 : :
7792 : : Consider an immediate integer: it has VOIDmode. Here we need
7793 : : to get a mode from something else.
7794 : :
7795 : : In some cases, there is a fourth mode, the operand's
7796 : : containing mode. If the insn specifies a containing mode for
7797 : : this operand, it overrides all others.
7798 : :
7799 : : I am not sure whether the algorithm here is always right,
7800 : : but it does the right things in those cases. */
7801 : :
7802 : 0 : mode = GET_MODE (old);
7803 : 0 : if (mode == VOIDmode)
7804 : 0 : mode = rl->inmode;
7805 : :
7806 : : /* We cannot use gen_lowpart_common since it can do the wrong thing
7807 : : when REG_RTX has a multi-word mode. Note that REG_RTX must
7808 : : always be a REG here. */
7809 : 0 : if (GET_MODE (reg_rtx) != mode)
7810 : 0 : reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7811 : : }
7812 : 0 : reload_reg_rtx_for_input[j] = reg_rtx;
7813 : :
7814 : 0 : if (old != 0
7815 : : /* AUTO_INC reloads need to be handled even if inherited. We got an
7816 : : AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7817 : 0 : && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7818 : 0 : && ! rtx_equal_p (reg_rtx, old)
7819 : 0 : && reg_rtx != 0)
7820 : 0 : emit_input_reload_insns (chain, rld + j, old, j);
7821 : :
7822 : : /* When inheriting a wider reload, we have a MEM in rl->in,
7823 : : e.g. inheriting a SImode output reload for
7824 : : (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7825 : 0 : if (optimize && reload_inherited[j] && rl->in
7826 : 0 : && MEM_P (rl->in)
7827 : 0 : && MEM_P (rl->in_reg)
7828 : 0 : && reload_spill_index[j] >= 0
7829 : 0 : && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7830 : 0 : rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7831 : :
7832 : : /* If we are reloading a register that was recently stored in with an
7833 : : output-reload, see if we can prove there was
7834 : : actually no need to store the old value in it. */
7835 : :
7836 : 0 : if (optimize
7837 : 0 : && (reload_inherited[j] || reload_override_in[j])
7838 : 0 : && reg_rtx
7839 : 0 : && REG_P (reg_rtx)
7840 : 0 : && spill_reg_store[REGNO (reg_rtx)] != 0
7841 : : #if 0
7842 : : /* There doesn't seem to be any reason to restrict this to pseudos
7843 : : and doing so loses in the case where we are copying from a
7844 : : register of the wrong class. */
7845 : : && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
7846 : : #endif
7847 : : /* The insn might have already some references to stackslots
7848 : : replaced by MEMs, while reload_out_reg still names the
7849 : : original pseudo. */
7850 : 0 : && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
7851 : 0 : || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
7852 : 0 : delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
7853 : 0 : }
7854 : :
7855 : : /* Do output reloading for reload RL, which is for the insn described by
7856 : : CHAIN and has the number J.
7857 : : ??? At some point we need to support handling output reloads of
7858 : : JUMP_INSNs. */
7859 : : static void
7860 : 0 : do_output_reload (class insn_chain *chain, struct reload *rl, int j)
7861 : : {
7862 : 0 : rtx note, old;
7863 : 0 : rtx_insn *insn = chain->insn;
7864 : : /* If this is an output reload that stores something that is
7865 : : not loaded in this same reload, see if we can eliminate a previous
7866 : : store. */
7867 : 0 : rtx pseudo = rl->out_reg;
7868 : 0 : rtx reg_rtx = rl->reg_rtx;
7869 : :
7870 : 0 : if (rl->out && reg_rtx)
7871 : : {
7872 : 0 : machine_mode mode;
7873 : :
7874 : : /* Determine the mode to reload in.
7875 : : See comments above (for input reloading). */
7876 : 0 : mode = GET_MODE (rl->out);
7877 : 0 : if (mode == VOIDmode)
7878 : : {
7879 : : /* VOIDmode should never happen for an output. */
7880 : 0 : if (asm_noperands (PATTERN (insn)) < 0)
7881 : : /* It's the compiler's fault. */
7882 : 0 : fatal_insn ("VOIDmode on an output", insn);
7883 : 0 : error_for_asm (insn, "output operand is constant in %<asm%>");
7884 : : /* Prevent crash--use something we know is valid. */
7885 : 0 : mode = word_mode;
7886 : 0 : rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
7887 : : }
7888 : 0 : if (GET_MODE (reg_rtx) != mode)
7889 : 0 : reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7890 : : }
7891 : 0 : reload_reg_rtx_for_output[j] = reg_rtx;
7892 : :
7893 : 0 : if (pseudo
7894 : 0 : && optimize
7895 : 0 : && REG_P (pseudo)
7896 : 0 : && ! rtx_equal_p (rl->in_reg, pseudo)
7897 : 0 : && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7898 : 0 : && reg_last_reload_reg[REGNO (pseudo)])
7899 : : {
7900 : 0 : int pseudo_no = REGNO (pseudo);
7901 : 0 : int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7902 : :
7903 : : /* We don't need to test full validity of last_regno for
7904 : : inherit here; we only want to know if the store actually
7905 : : matches the pseudo. */
7906 : 0 : if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
7907 : 0 : && reg_reloaded_contents[last_regno] == pseudo_no
7908 : 0 : && spill_reg_store[last_regno]
7909 : 0 : && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7910 : 0 : delete_output_reload (insn, j, last_regno, reg_rtx);
7911 : : }
7912 : :
7913 : 0 : old = rl->out_reg;
7914 : 0 : if (old == 0
7915 : 0 : || reg_rtx == 0
7916 : 0 : || rtx_equal_p (old, reg_rtx))
7917 : 0 : return;
7918 : :
7919 : : /* An output operand that dies right away does need a reload,
7920 : : but need not be copied from it. Show the new location in the
7921 : : REG_UNUSED note. */
7922 : 0 : if ((REG_P (old) || GET_CODE (old) == SCRATCH)
7923 : 0 : && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7924 : : {
7925 : 0 : XEXP (note, 0) = reg_rtx;
7926 : 0 : return;
7927 : : }
7928 : : /* Likewise for a SUBREG of an operand that dies. */
7929 : 0 : else if (GET_CODE (old) == SUBREG
7930 : 0 : && REG_P (SUBREG_REG (old))
7931 : 0 : && (note = find_reg_note (insn, REG_UNUSED,
7932 : : SUBREG_REG (old))) != 0)
7933 : : {
7934 : 0 : XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
7935 : 0 : return;
7936 : : }
7937 : 0 : else if (GET_CODE (old) == SCRATCH)
7938 : : /* If we aren't optimizing, there won't be a REG_UNUSED note,
7939 : : but we don't want to make an output reload. */
7940 : : return;
7941 : :
7942 : : /* If is a JUMP_INSN, we can't support output reloads yet. */
7943 : 0 : gcc_assert (NONJUMP_INSN_P (insn));
7944 : :
7945 : 0 : emit_output_reload_insns (chain, rld + j, j);
7946 : : }
7947 : :
7948 : : /* A reload copies values of MODE from register SRC to register DEST.
7949 : : Return true if it can be treated for inheritance purposes like a
7950 : : group of reloads, each one reloading a single hard register. The
7951 : : caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
7952 : : occupy the same number of hard registers. */
7953 : :
7954 : : static bool
7955 : 0 : inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
7956 : : int src ATTRIBUTE_UNUSED,
7957 : : machine_mode mode ATTRIBUTE_UNUSED)
7958 : : {
7959 : 0 : return (REG_CAN_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
7960 : 0 : && REG_CAN_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
7961 : : }
7962 : :
7963 : : /* Output insns to reload values in and out of the chosen reload regs. */
7964 : :
7965 : : static void
7966 : 0 : emit_reload_insns (class insn_chain *chain)
7967 : : {
7968 : 0 : rtx_insn *insn = chain->insn;
7969 : :
7970 : 0 : int j;
7971 : :
7972 : 0 : CLEAR_HARD_REG_SET (reg_reloaded_died);
7973 : :
7974 : 0 : for (j = 0; j < reload_n_operands; j++)
7975 : 0 : input_reload_insns[j] = input_address_reload_insns[j]
7976 : 0 : = inpaddr_address_reload_insns[j]
7977 : 0 : = output_reload_insns[j] = output_address_reload_insns[j]
7978 : 0 : = outaddr_address_reload_insns[j]
7979 : 0 : = other_output_reload_insns[j] = 0;
7980 : 0 : other_input_address_reload_insns = 0;
7981 : 0 : other_input_reload_insns = 0;
7982 : 0 : operand_reload_insns = 0;
7983 : 0 : other_operand_reload_insns = 0;
7984 : :
7985 : : /* Dump reloads into the dump file. */
7986 : 0 : if (dump_file)
7987 : : {
7988 : 0 : fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
7989 : 0 : debug_reload_to_stream (dump_file);
7990 : : }
7991 : :
7992 : 0 : for (j = 0; j < n_reloads; j++)
7993 : 0 : if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
7994 : : {
7995 : : unsigned int i;
7996 : :
7997 : 0 : for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
7998 : 0 : new_spill_reg_store[i] = 0;
7999 : : }
8000 : :
8001 : : /* Now output the instructions to copy the data into and out of the
8002 : : reload registers. Do these in the order that the reloads were reported,
8003 : : since reloads of base and index registers precede reloads of operands
8004 : : and the operands may need the base and index registers reloaded. */
8005 : :
8006 : 0 : for (j = 0; j < n_reloads; j++)
8007 : : {
8008 : 0 : do_input_reload (chain, rld + j, j);
8009 : 0 : do_output_reload (chain, rld + j, j);
8010 : : }
8011 : :
8012 : : /* Now write all the insns we made for reloads in the order expected by
8013 : : the allocation functions. Prior to the insn being reloaded, we write
8014 : : the following reloads:
8015 : :
8016 : : RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8017 : :
8018 : : RELOAD_OTHER reloads.
8019 : :
8020 : : For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8021 : : by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8022 : : RELOAD_FOR_INPUT reload for the operand.
8023 : :
8024 : : RELOAD_FOR_OPADDR_ADDRS reloads.
8025 : :
8026 : : RELOAD_FOR_OPERAND_ADDRESS reloads.
8027 : :
8028 : : After the insn being reloaded, we write the following:
8029 : :
8030 : : For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8031 : : by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8032 : : RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8033 : : reloads for the operand. The RELOAD_OTHER output reloads are
8034 : : output in descending order by reload number. */
8035 : :
8036 : 0 : emit_insn_before (other_input_address_reload_insns, insn);
8037 : 0 : emit_insn_before (other_input_reload_insns, insn);
8038 : :
8039 : 0 : for (j = 0; j < reload_n_operands; j++)
8040 : : {
8041 : 0 : emit_insn_before (inpaddr_address_reload_insns[j], insn);
8042 : 0 : emit_insn_before (input_address_reload_insns[j], insn);
8043 : 0 : emit_insn_before (input_reload_insns[j], insn);
8044 : : }
8045 : :
8046 : 0 : emit_insn_before (other_operand_reload_insns, insn);
8047 : 0 : emit_insn_before (operand_reload_insns, insn);
8048 : :
8049 : 0 : for (j = 0; j < reload_n_operands; j++)
8050 : : {
8051 : 0 : rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8052 : 0 : x = emit_insn_after (output_address_reload_insns[j], x);
8053 : 0 : x = emit_insn_after (output_reload_insns[j], x);
8054 : 0 : emit_insn_after (other_output_reload_insns[j], x);
8055 : : }
8056 : :
8057 : : /* For all the spill regs newly reloaded in this instruction,
8058 : : record what they were reloaded from, so subsequent instructions
8059 : : can inherit the reloads.
8060 : :
8061 : : Update spill_reg_store for the reloads of this insn.
8062 : : Copy the elements that were updated in the loop above. */
8063 : :
8064 : 0 : for (j = 0; j < n_reloads; j++)
8065 : : {
8066 : 0 : int r = reload_order[j];
8067 : 0 : int i = reload_spill_index[r];
8068 : :
8069 : : /* If this is a non-inherited input reload from a pseudo, we must
8070 : : clear any memory of a previous store to the same pseudo. Only do
8071 : : something if there will not be an output reload for the pseudo
8072 : : being reloaded. */
8073 : 0 : if (rld[r].in_reg != 0
8074 : 0 : && ! (reload_inherited[r] || reload_override_in[r]))
8075 : : {
8076 : 0 : rtx reg = rld[r].in_reg;
8077 : :
8078 : 0 : if (GET_CODE (reg) == SUBREG)
8079 : 0 : reg = SUBREG_REG (reg);
8080 : :
8081 : 0 : if (REG_P (reg)
8082 : 0 : && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8083 : 0 : && !REGNO_REG_SET_P (®_has_output_reload, REGNO (reg)))
8084 : : {
8085 : 0 : int nregno = REGNO (reg);
8086 : :
8087 : 0 : if (reg_last_reload_reg[nregno])
8088 : : {
8089 : 0 : int last_regno = REGNO (reg_last_reload_reg[nregno]);
8090 : :
8091 : 0 : if (reg_reloaded_contents[last_regno] == nregno)
8092 : 0 : spill_reg_store[last_regno] = 0;
8093 : : }
8094 : : }
8095 : : }
8096 : :
8097 : : /* I is nonneg if this reload used a register.
8098 : : If rld[r].reg_rtx is 0, this is an optional reload
8099 : : that we opted to ignore. */
8100 : :
8101 : 0 : if (i >= 0 && rld[r].reg_rtx != 0)
8102 : : {
8103 : 0 : int nr = hard_regno_nregs (i, GET_MODE (rld[r].reg_rtx));
8104 : 0 : int k;
8105 : :
8106 : : /* For a multi register reload, we need to check if all or part
8107 : : of the value lives to the end. */
8108 : 0 : for (k = 0; k < nr; k++)
8109 : 0 : if (reload_reg_reaches_end_p (i + k, r))
8110 : 0 : CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8111 : :
8112 : : /* Maybe the spill reg contains a copy of reload_out. */
8113 : 0 : if (rld[r].out != 0
8114 : 0 : && (REG_P (rld[r].out)
8115 : 0 : || (rld[r].out_reg
8116 : 0 : ? REG_P (rld[r].out_reg)
8117 : : /* The reload value is an auto-modification of
8118 : : some kind. For PRE_INC, POST_INC, PRE_DEC
8119 : : and POST_DEC, we record an equivalence
8120 : : between the reload register and the operand
8121 : : on the optimistic assumption that we can make
8122 : : the equivalence hold. reload_as_needed must
8123 : : then either make it hold or invalidate the
8124 : : equivalence.
8125 : :
8126 : : PRE_MODIFY and POST_MODIFY addresses are reloaded
8127 : : somewhat differently, and allowing them here leads
8128 : : to problems. */
8129 : : : (GET_CODE (rld[r].out) != POST_MODIFY
8130 : 0 : && GET_CODE (rld[r].out) != PRE_MODIFY))))
8131 : : {
8132 : 0 : rtx reg;
8133 : :
8134 : 0 : reg = reload_reg_rtx_for_output[r];
8135 : 0 : if (reload_reg_rtx_reaches_end_p (reg, r))
8136 : : {
8137 : 0 : machine_mode mode = GET_MODE (reg);
8138 : 0 : int regno = REGNO (reg);
8139 : 0 : int nregs = REG_NREGS (reg);
8140 : 0 : rtx out = (REG_P (rld[r].out)
8141 : 0 : ? rld[r].out
8142 : 0 : : rld[r].out_reg
8143 : 0 : ? rld[r].out_reg
8144 : 0 : /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8145 : 0 : int out_regno = REGNO (out);
8146 : 0 : int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8147 : 0 : : hard_regno_nregs (out_regno, mode));
8148 : 0 : bool piecemeal;
8149 : :
8150 : 0 : spill_reg_store[regno] = new_spill_reg_store[regno];
8151 : 0 : spill_reg_stored_to[regno] = out;
8152 : 0 : reg_last_reload_reg[out_regno] = reg;
8153 : :
8154 : 0 : piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8155 : 0 : && nregs == out_nregs
8156 : 0 : && inherit_piecemeal_p (out_regno, regno, mode));
8157 : :
8158 : : /* If OUT_REGNO is a hard register, it may occupy more than
8159 : : one register. If it does, say what is in the
8160 : : rest of the registers assuming that both registers
8161 : : agree on how many words the object takes. If not,
8162 : : invalidate the subsequent registers. */
8163 : :
8164 : 0 : if (HARD_REGISTER_NUM_P (out_regno))
8165 : 0 : for (k = 1; k < out_nregs; k++)
8166 : 0 : reg_last_reload_reg[out_regno + k]
8167 : 0 : = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8168 : :
8169 : : /* Now do the inverse operation. */
8170 : 0 : for (k = 0; k < nregs; k++)
8171 : : {
8172 : 0 : CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8173 : 0 : reg_reloaded_contents[regno + k]
8174 : 0 : = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8175 : 0 : ? out_regno
8176 : : : out_regno + k);
8177 : 0 : reg_reloaded_insn[regno + k] = insn;
8178 : 0 : SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8179 : : }
8180 : : }
8181 : : }
8182 : : /* Maybe the spill reg contains a copy of reload_in. Only do
8183 : : something if there will not be an output reload for
8184 : : the register being reloaded. */
8185 : 0 : else if (rld[r].out_reg == 0
8186 : 0 : && rld[r].in != 0
8187 : 0 : && ((REG_P (rld[r].in)
8188 : 0 : && !HARD_REGISTER_P (rld[r].in)
8189 : 0 : && !REGNO_REG_SET_P (®_has_output_reload,
8190 : : REGNO (rld[r].in)))
8191 : 0 : || (REG_P (rld[r].in_reg)
8192 : 0 : && !REGNO_REG_SET_P (®_has_output_reload,
8193 : : REGNO (rld[r].in_reg))))
8194 : 0 : && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8195 : : {
8196 : 0 : rtx reg;
8197 : :
8198 : 0 : reg = reload_reg_rtx_for_input[r];
8199 : 0 : if (reload_reg_rtx_reaches_end_p (reg, r))
8200 : : {
8201 : 0 : machine_mode mode;
8202 : 0 : int regno;
8203 : 0 : int nregs;
8204 : 0 : int in_regno;
8205 : 0 : int in_nregs;
8206 : 0 : rtx in;
8207 : 0 : bool piecemeal;
8208 : :
8209 : 0 : mode = GET_MODE (reg);
8210 : 0 : regno = REGNO (reg);
8211 : 0 : nregs = REG_NREGS (reg);
8212 : 0 : if (REG_P (rld[r].in)
8213 : 0 : && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8214 : : in = rld[r].in;
8215 : 0 : else if (REG_P (rld[r].in_reg))
8216 : : in = rld[r].in_reg;
8217 : : else
8218 : 0 : in = XEXP (rld[r].in_reg, 0);
8219 : 0 : in_regno = REGNO (in);
8220 : :
8221 : 0 : in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8222 : 0 : : hard_regno_nregs (in_regno, mode));
8223 : :
8224 : 0 : reg_last_reload_reg[in_regno] = reg;
8225 : :
8226 : 0 : piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8227 : 0 : && nregs == in_nregs
8228 : 0 : && inherit_piecemeal_p (regno, in_regno, mode));
8229 : :
8230 : 0 : if (HARD_REGISTER_NUM_P (in_regno))
8231 : 0 : for (k = 1; k < in_nregs; k++)
8232 : 0 : reg_last_reload_reg[in_regno + k]
8233 : 0 : = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8234 : :
8235 : : /* Unless we inherited this reload, show we haven't
8236 : : recently done a store.
8237 : : Previous stores of inherited auto_inc expressions
8238 : : also have to be discarded. */
8239 : 0 : if (! reload_inherited[r]
8240 : 0 : || (rld[r].out && ! rld[r].out_reg))
8241 : 0 : spill_reg_store[regno] = 0;
8242 : :
8243 : 0 : for (k = 0; k < nregs; k++)
8244 : : {
8245 : 0 : CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8246 : 0 : reg_reloaded_contents[regno + k]
8247 : 0 : = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8248 : 0 : ? in_regno
8249 : : : in_regno + k);
8250 : 0 : reg_reloaded_insn[regno + k] = insn;
8251 : 0 : SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8252 : : }
8253 : : }
8254 : : }
8255 : : }
8256 : :
8257 : : /* The following if-statement was #if 0'd in 1.34 (or before...).
8258 : : It's reenabled in 1.35 because supposedly nothing else
8259 : : deals with this problem. */
8260 : :
8261 : : /* If a register gets output-reloaded from a non-spill register,
8262 : : that invalidates any previous reloaded copy of it.
8263 : : But forget_old_reloads_1 won't get to see it, because
8264 : : it thinks only about the original insn. So invalidate it here.
8265 : : Also do the same thing for RELOAD_OTHER constraints where the
8266 : : output is discarded. */
8267 : 0 : if (i < 0
8268 : 0 : && ((rld[r].out != 0
8269 : 0 : && (REG_P (rld[r].out)
8270 : 0 : || (MEM_P (rld[r].out)
8271 : 0 : && REG_P (rld[r].out_reg))))
8272 : 0 : || (rld[r].out == 0 && rld[r].out_reg
8273 : 0 : && REG_P (rld[r].out_reg))))
8274 : : {
8275 : 0 : rtx out = ((rld[r].out && REG_P (rld[r].out))
8276 : 0 : ? rld[r].out : rld[r].out_reg);
8277 : 0 : int out_regno = REGNO (out);
8278 : 0 : machine_mode mode = GET_MODE (out);
8279 : :
8280 : : /* REG_RTX is now set or clobbered by the main instruction.
8281 : : As the comment above explains, forget_old_reloads_1 only
8282 : : sees the original instruction, and there is no guarantee
8283 : : that the original instruction also clobbered REG_RTX.
8284 : : For example, if find_reloads sees that the input side of
8285 : : a matched operand pair dies in this instruction, it may
8286 : : use the input register as the reload register.
8287 : :
8288 : : Calling forget_old_reloads_1 is a waste of effort if
8289 : : REG_RTX is also the output register.
8290 : :
8291 : : If we know that REG_RTX holds the value of a pseudo
8292 : : register, the code after the call will record that fact. */
8293 : 0 : if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8294 : 0 : forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8295 : :
8296 : 0 : if (!HARD_REGISTER_NUM_P (out_regno))
8297 : : {
8298 : 0 : rtx src_reg;
8299 : 0 : rtx_insn *store_insn = NULL;
8300 : :
8301 : 0 : reg_last_reload_reg[out_regno] = 0;
8302 : :
8303 : : /* If we can find a hard register that is stored, record
8304 : : the storing insn so that we may delete this insn with
8305 : : delete_output_reload. */
8306 : 0 : src_reg = reload_reg_rtx_for_output[r];
8307 : :
8308 : 0 : if (src_reg)
8309 : : {
8310 : 0 : if (reload_reg_rtx_reaches_end_p (src_reg, r))
8311 : 0 : store_insn = new_spill_reg_store[REGNO (src_reg)];
8312 : : else
8313 : : src_reg = NULL_RTX;
8314 : : }
8315 : : else
8316 : : {
8317 : : /* If this is an optional reload, try to find the
8318 : : source reg from an input reload. */
8319 : 0 : rtx set = single_set (insn);
8320 : 0 : if (set && SET_DEST (set) == rld[r].out)
8321 : : {
8322 : 0 : int k;
8323 : :
8324 : 0 : src_reg = SET_SRC (set);
8325 : 0 : store_insn = insn;
8326 : 0 : for (k = 0; k < n_reloads; k++)
8327 : : {
8328 : 0 : if (rld[k].in == src_reg)
8329 : : {
8330 : 0 : src_reg = reload_reg_rtx_for_input[k];
8331 : 0 : break;
8332 : : }
8333 : : }
8334 : : }
8335 : : }
8336 : 0 : if (src_reg && REG_P (src_reg)
8337 : 0 : && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8338 : : {
8339 : 0 : int src_regno, src_nregs, k;
8340 : 0 : rtx note;
8341 : :
8342 : 0 : gcc_assert (GET_MODE (src_reg) == mode);
8343 : 0 : src_regno = REGNO (src_reg);
8344 : 0 : src_nregs = hard_regno_nregs (src_regno, mode);
8345 : : /* The place where to find a death note varies with
8346 : : PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8347 : : necessarily checked exactly in the code that moves
8348 : : notes, so just check both locations. */
8349 : 0 : note = find_regno_note (insn, REG_DEAD, src_regno);
8350 : 0 : if (! note && store_insn)
8351 : 0 : note = find_regno_note (store_insn, REG_DEAD, src_regno);
8352 : 0 : for (k = 0; k < src_nregs; k++)
8353 : : {
8354 : 0 : spill_reg_store[src_regno + k] = store_insn;
8355 : 0 : spill_reg_stored_to[src_regno + k] = out;
8356 : 0 : reg_reloaded_contents[src_regno + k] = out_regno;
8357 : 0 : reg_reloaded_insn[src_regno + k] = store_insn;
8358 : 0 : CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8359 : 0 : SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8360 : 0 : SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8361 : 0 : if (note)
8362 : 0 : SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8363 : : else
8364 : 0 : CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8365 : : }
8366 : 0 : reg_last_reload_reg[out_regno] = src_reg;
8367 : : /* We have to set reg_has_output_reload here, or else
8368 : : forget_old_reloads_1 will clear reg_last_reload_reg
8369 : : right away. */
8370 : 0 : SET_REGNO_REG_SET (®_has_output_reload,
8371 : : out_regno);
8372 : : }
8373 : : }
8374 : : else
8375 : : {
8376 : 0 : int k, out_nregs = hard_regno_nregs (out_regno, mode);
8377 : :
8378 : 0 : for (k = 0; k < out_nregs; k++)
8379 : 0 : reg_last_reload_reg[out_regno + k] = 0;
8380 : : }
8381 : : }
8382 : : }
8383 : 0 : reg_reloaded_dead |= reg_reloaded_died;
8384 : 0 : }
8385 : :
8386 : :
8387 : : /* Helper for emit_insn_if_valid_for_reload. */
8388 : :
8389 : : static rtx_insn *
8390 : 0 : emit_insn_if_valid_for_reload_1 (rtx pat)
8391 : : {
8392 : 0 : rtx_insn *last = get_last_insn ();
8393 : 0 : int code;
8394 : :
8395 : 0 : rtx_insn *insn = emit_insn (pat);
8396 : 0 : code = recog_memoized (insn);
8397 : :
8398 : 0 : if (code >= 0)
8399 : : {
8400 : 0 : extract_insn (insn);
8401 : : /* We want constrain operands to treat this insn strictly in its
8402 : : validity determination, i.e., the way it would after reload has
8403 : : completed. */
8404 : 0 : if (constrain_operands (1, get_enabled_alternatives (insn)))
8405 : : return insn;
8406 : : }
8407 : :
8408 : 0 : delete_insns_since (last);
8409 : 0 : return NULL;
8410 : : }
8411 : :
8412 : : /* Go through the motions to emit INSN and test if it is strictly valid.
8413 : : Return the emitted insn if valid, else return NULL. */
8414 : :
8415 : : static rtx_insn *
8416 : 0 : emit_insn_if_valid_for_reload (rtx pat)
8417 : : {
8418 : 0 : rtx_insn *insn = emit_insn_if_valid_for_reload_1 (pat);
8419 : :
8420 : 0 : if (insn)
8421 : : return insn;
8422 : :
8423 : : /* If the pattern is a SET, and this target has a single
8424 : : flags-register, try again with a PARALLEL that clobbers that
8425 : : register. */
8426 : 0 : if (targetm.flags_regnum == INVALID_REGNUM || GET_CODE (pat) != SET)
8427 : : return NULL;
8428 : :
8429 : 0 : rtx flags_clobber = gen_hard_reg_clobber (CCmode, targetm.flags_regnum);
8430 : 0 : rtx parpat = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, pat, flags_clobber));
8431 : :
8432 : 0 : return emit_insn_if_valid_for_reload (parpat);
8433 : : }
8434 : :
8435 : : /* Emit code to perform a reload from IN (which may be a reload register) to
8436 : : OUT (which may also be a reload register). IN or OUT is from operand
8437 : : OPNUM with reload type TYPE.
8438 : :
8439 : : Returns first insn emitted. */
8440 : :
8441 : : static rtx_insn *
8442 : 0 : gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8443 : : {
8444 : 0 : rtx_insn *last = get_last_insn ();
8445 : 0 : rtx_insn *tem;
8446 : 0 : rtx tem1, tem2;
8447 : :
8448 : : /* If IN is a paradoxical SUBREG, remove it and try to put the
8449 : : opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8450 : 0 : if (!strip_paradoxical_subreg (&in, &out))
8451 : 0 : strip_paradoxical_subreg (&out, &in);
8452 : :
8453 : : /* How to do this reload can get quite tricky. Normally, we are being
8454 : : asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8455 : : register that didn't get a hard register. In that case we can just
8456 : : call emit_move_insn.
8457 : :
8458 : : We can also be asked to reload a PLUS that adds a register or a MEM to
8459 : : another register, constant or MEM. This can occur during frame pointer
8460 : : elimination and while reloading addresses. This case is handled by
8461 : : trying to emit a single insn to perform the add. If it is not valid,
8462 : : we use a two insn sequence.
8463 : :
8464 : : Or we can be asked to reload an unary operand that was a fragment of
8465 : : an addressing mode, into a register. If it isn't recognized as-is,
8466 : : we try making the unop operand and the reload-register the same:
8467 : : (set reg:X (unop:X expr:Y))
8468 : : -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8469 : :
8470 : : Finally, we could be called to handle an 'o' constraint by putting
8471 : : an address into a register. In that case, we first try to do this
8472 : : with a named pattern of "reload_load_address". If no such pattern
8473 : : exists, we just emit a SET insn and hope for the best (it will normally
8474 : : be valid on machines that use 'o').
8475 : :
8476 : : This entire process is made complex because reload will never
8477 : : process the insns we generate here and so we must ensure that
8478 : : they will fit their constraints and also by the fact that parts of
8479 : : IN might be being reloaded separately and replaced with spill registers.
8480 : : Because of this, we are, in some sense, just guessing the right approach
8481 : : here. The one listed above seems to work.
8482 : :
8483 : : ??? At some point, this whole thing needs to be rethought. */
8484 : :
8485 : 0 : if (GET_CODE (in) == PLUS
8486 : 0 : && (REG_P (XEXP (in, 0))
8487 : : || GET_CODE (XEXP (in, 0)) == SUBREG
8488 : : || MEM_P (XEXP (in, 0)))
8489 : 0 : && (REG_P (XEXP (in, 1))
8490 : 0 : || GET_CODE (XEXP (in, 1)) == SUBREG
8491 : 0 : || CONSTANT_P (XEXP (in, 1))
8492 : 0 : || MEM_P (XEXP (in, 1))))
8493 : : {
8494 : : /* We need to compute the sum of a register or a MEM and another
8495 : : register, constant, or MEM, and put it into the reload
8496 : : register. The best possible way of doing this is if the machine
8497 : : has a three-operand ADD insn that accepts the required operands.
8498 : :
8499 : : The simplest approach is to try to generate such an insn and see if it
8500 : : is recognized and matches its constraints. If so, it can be used.
8501 : :
8502 : : It might be better not to actually emit the insn unless it is valid,
8503 : : but we need to pass the insn as an operand to `recog' and
8504 : : `extract_insn' and it is simpler to emit and then delete the insn if
8505 : : not valid than to dummy things up. */
8506 : :
8507 : 0 : rtx op0, op1, tem;
8508 : 0 : rtx_insn *insn;
8509 : 0 : enum insn_code code;
8510 : :
8511 : 0 : op0 = find_replacement (&XEXP (in, 0));
8512 : 0 : op1 = find_replacement (&XEXP (in, 1));
8513 : :
8514 : : /* Since constraint checking is strict, commutativity won't be
8515 : : checked, so we need to do that here to avoid spurious failure
8516 : : if the add instruction is two-address and the second operand
8517 : : of the add is the same as the reload reg, which is frequently
8518 : : the case. If the insn would be A = B + A, rearrange it so
8519 : : it will be A = A + B as constrain_operands expects. */
8520 : :
8521 : 0 : if (REG_P (XEXP (in, 1))
8522 : 0 : && REGNO (out) == REGNO (XEXP (in, 1)))
8523 : : tem = op0, op0 = op1, op1 = tem;
8524 : :
8525 : 0 : if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8526 : 0 : in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8527 : :
8528 : 0 : insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8529 : 0 : if (insn)
8530 : : return insn;
8531 : :
8532 : : /* If that failed, we must use a conservative two-insn sequence.
8533 : :
8534 : : Use a move to copy one operand into the reload register. Prefer
8535 : : to reload a constant, MEM or pseudo since the move patterns can
8536 : : handle an arbitrary operand. If OP1 is not a constant, MEM or
8537 : : pseudo and OP1 is not a valid operand for an add instruction, then
8538 : : reload OP1.
8539 : :
8540 : : After reloading one of the operands into the reload register, add
8541 : : the reload register to the output register.
8542 : :
8543 : : If there is another way to do this for a specific machine, a
8544 : : DEFINE_PEEPHOLE should be specified that recognizes the sequence
8545 : : we emit below. */
8546 : :
8547 : 0 : code = optab_handler (add_optab, GET_MODE (out));
8548 : :
8549 : 0 : if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8550 : 0 : || (REG_P (op1)
8551 : 0 : && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8552 : 0 : || (code != CODE_FOR_nothing
8553 : 0 : && !insn_operand_matches (code, 2, op1)))
8554 : : tem = op0, op0 = op1, op1 = tem;
8555 : :
8556 : 0 : gen_reload (out, op0, opnum, type);
8557 : :
8558 : : /* If OP0 and OP1 are the same, we can use OUT for OP1.
8559 : : This fixes a problem on the 32K where the stack pointer cannot
8560 : : be used as an operand of an add insn. */
8561 : :
8562 : 0 : if (rtx_equal_p (op0, op1))
8563 : 0 : op1 = out;
8564 : :
8565 : 0 : insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8566 : 0 : if (insn)
8567 : : {
8568 : : /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8569 : 0 : set_dst_reg_note (insn, REG_EQUIV, in, out);
8570 : 0 : return insn;
8571 : : }
8572 : :
8573 : : /* If that failed, copy the address register to the reload register.
8574 : : Then add the constant to the reload register. */
8575 : :
8576 : 0 : gcc_assert (!reg_overlap_mentioned_p (out, op0));
8577 : 0 : gen_reload (out, op1, opnum, type);
8578 : 0 : insn = emit_insn (gen_add2_insn (out, op0));
8579 : 0 : set_dst_reg_note (insn, REG_EQUIV, in, out);
8580 : 0 : }
8581 : :
8582 : : /* If we need a memory location to do the move, do it that way. */
8583 : 0 : else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8584 : 0 : (REG_P (tem1) && REG_P (tem2)))
8585 : 0 : && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8586 : 0 : && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8587 : 0 : && targetm.secondary_memory_needed (GET_MODE (out),
8588 : 0 : REGNO_REG_CLASS (REGNO (tem1)),
8589 : 0 : REGNO_REG_CLASS (REGNO (tem2))))
8590 : : {
8591 : : /* Get the memory to use and rewrite both registers to its mode. */
8592 : 0 : rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8593 : :
8594 : 0 : if (GET_MODE (loc) != GET_MODE (out))
8595 : 0 : out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8596 : :
8597 : 0 : if (GET_MODE (loc) != GET_MODE (in))
8598 : 0 : in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8599 : :
8600 : 0 : gen_reload (loc, in, opnum, type);
8601 : 0 : gen_reload (out, loc, opnum, type);
8602 : : }
8603 : 0 : else if (REG_P (out) && UNARY_P (in))
8604 : : {
8605 : 0 : rtx op1;
8606 : 0 : rtx out_moded;
8607 : 0 : rtx_insn *set;
8608 : :
8609 : 0 : op1 = find_replacement (&XEXP (in, 0));
8610 : 0 : if (op1 != XEXP (in, 0))
8611 : 0 : in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8612 : :
8613 : : /* First, try a plain SET. */
8614 : 0 : set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8615 : 0 : if (set)
8616 : : return set;
8617 : :
8618 : : /* If that failed, move the inner operand to the reload
8619 : : register, and try the same unop with the inner expression
8620 : : replaced with the reload register. */
8621 : :
8622 : 0 : if (GET_MODE (op1) != GET_MODE (out))
8623 : 0 : out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8624 : : else
8625 : : out_moded = out;
8626 : :
8627 : 0 : gen_reload (out_moded, op1, opnum, type);
8628 : :
8629 : 0 : rtx temp = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8630 : : out_moded));
8631 : 0 : rtx_insn *insn = emit_insn_if_valid_for_reload (temp);
8632 : 0 : if (insn)
8633 : : {
8634 : 0 : set_unique_reg_note (insn, REG_EQUIV, in);
8635 : 0 : return insn;
8636 : : }
8637 : :
8638 : 0 : fatal_insn ("failure trying to reload:", in);
8639 : : }
8640 : : /* If IN is a simple operand, use gen_move_insn. */
8641 : 0 : else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8642 : : {
8643 : 0 : tem = emit_insn (gen_move_insn (out, in));
8644 : : /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8645 : 0 : mark_jump_label (in, tem, 0);
8646 : : }
8647 : :
8648 : 0 : else if (targetm.have_reload_load_address ())
8649 : 0 : emit_insn (targetm.gen_reload_load_address (out, in));
8650 : :
8651 : : /* Otherwise, just write (set OUT IN) and hope for the best. */
8652 : : else
8653 : 0 : emit_insn (gen_rtx_SET (out, in));
8654 : :
8655 : : /* Return the first insn emitted.
8656 : : We cannot just return get_last_insn, because there may have
8657 : : been multiple instructions emitted. Also note that gen_move_insn may
8658 : : emit more than one insn itself, so we cannot assume that there is one
8659 : : insn emitted per emit_insn_before call. */
8660 : :
8661 : 0 : return last ? NEXT_INSN (last) : get_insns ();
8662 : : }
8663 : :
8664 : : /* Delete a previously made output-reload whose result we now believe
8665 : : is not needed. First we double-check.
8666 : :
8667 : : INSN is the insn now being processed.
8668 : : LAST_RELOAD_REG is the hard register number for which we want to delete
8669 : : the last output reload.
8670 : : J is the reload-number that originally used REG. The caller has made
8671 : : certain that reload J doesn't use REG any longer for input.
8672 : : NEW_RELOAD_REG is reload register that reload J is using for REG. */
8673 : :
8674 : : static void
8675 : 0 : delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8676 : : rtx new_reload_reg)
8677 : : {
8678 : 0 : rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8679 : 0 : rtx reg = spill_reg_stored_to[last_reload_reg];
8680 : 0 : int k;
8681 : 0 : int n_occurrences;
8682 : 0 : int n_inherited = 0;
8683 : 0 : rtx substed;
8684 : 0 : unsigned regno;
8685 : 0 : int nregs;
8686 : :
8687 : : /* It is possible that this reload has been only used to set another reload
8688 : : we eliminated earlier and thus deleted this instruction too. */
8689 : 0 : if (output_reload_insn->deleted ())
8690 : : return;
8691 : :
8692 : : /* Get the raw pseudo-register referred to. */
8693 : :
8694 : 0 : while (GET_CODE (reg) == SUBREG)
8695 : 0 : reg = SUBREG_REG (reg);
8696 : 0 : substed = reg_equiv_memory_loc (REGNO (reg));
8697 : :
8698 : : /* This is unsafe if the operand occurs more often in the current
8699 : : insn than it is inherited. */
8700 : 0 : for (k = n_reloads - 1; k >= 0; k--)
8701 : : {
8702 : 0 : rtx reg2 = rld[k].in;
8703 : 0 : if (! reg2)
8704 : 0 : continue;
8705 : 0 : if (MEM_P (reg2) || reload_override_in[k])
8706 : 0 : reg2 = rld[k].in_reg;
8707 : :
8708 : : if (AUTO_INC_DEC && rld[k].out && ! rld[k].out_reg)
8709 : : reg2 = XEXP (rld[k].in_reg, 0);
8710 : :
8711 : 0 : while (GET_CODE (reg2) == SUBREG)
8712 : 0 : reg2 = SUBREG_REG (reg2);
8713 : 0 : if (rtx_equal_p (reg2, reg))
8714 : : {
8715 : 0 : if (reload_inherited[k] || reload_override_in[k] || k == j)
8716 : 0 : n_inherited++;
8717 : : else
8718 : : return;
8719 : : }
8720 : : }
8721 : 0 : n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8722 : 0 : if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8723 : 0 : n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8724 : : reg, 0);
8725 : 0 : if (substed)
8726 : 0 : n_occurrences += count_occurrences (PATTERN (insn),
8727 : 0 : eliminate_regs (substed, VOIDmode,
8728 : : NULL_RTX), 0);
8729 : 0 : for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8730 : : {
8731 : 0 : gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8732 : 0 : n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8733 : : }
8734 : 0 : if (n_occurrences > n_inherited)
8735 : : return;
8736 : :
8737 : 0 : regno = REGNO (reg);
8738 : 0 : nregs = REG_NREGS (reg);
8739 : :
8740 : : /* If the pseudo-reg we are reloading is no longer referenced
8741 : : anywhere between the store into it and here,
8742 : : and we're within the same basic block, then the value can only
8743 : : pass through the reload reg and end up here.
8744 : : Otherwise, give up--return. */
8745 : 0 : for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8746 : 0 : i1 != insn; i1 = NEXT_INSN (i1))
8747 : : {
8748 : 0 : if (NOTE_INSN_BASIC_BLOCK_P (i1))
8749 : : return;
8750 : 0 : if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8751 : 0 : && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8752 : : {
8753 : : /* If this is USE in front of INSN, we only have to check that
8754 : : there are no more references than accounted for by inheritance. */
8755 : 0 : while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8756 : : {
8757 : 0 : n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8758 : 0 : i1 = NEXT_INSN (i1);
8759 : : }
8760 : 0 : if (n_occurrences <= n_inherited && i1 == insn)
8761 : : break;
8762 : : return;
8763 : : }
8764 : : }
8765 : :
8766 : : /* We will be deleting the insn. Remove the spill reg information. */
8767 : 0 : for (k = hard_regno_nregs (last_reload_reg, GET_MODE (reg)); k-- > 0; )
8768 : : {
8769 : 0 : spill_reg_store[last_reload_reg + k] = 0;
8770 : 0 : spill_reg_stored_to[last_reload_reg + k] = 0;
8771 : : }
8772 : :
8773 : : /* The caller has already checked that REG dies or is set in INSN.
8774 : : It has also checked that we are optimizing, and thus some
8775 : : inaccuracies in the debugging information are acceptable.
8776 : : So we could just delete output_reload_insn. But in some cases
8777 : : we can improve the debugging information without sacrificing
8778 : : optimization - maybe even improving the code: See if the pseudo
8779 : : reg has been completely replaced with reload regs. If so, delete
8780 : : the store insn and forget we had a stack slot for the pseudo. */
8781 : 0 : if (rld[j].out != rld[j].in
8782 : 0 : && REG_N_DEATHS (REGNO (reg)) == 1
8783 : 0 : && REG_N_SETS (REGNO (reg)) == 1
8784 : 0 : && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8785 : 0 : && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8786 : : {
8787 : 0 : rtx_insn *i2;
8788 : :
8789 : : /* We know that it was used only between here and the beginning of
8790 : : the current basic block. (We also know that the last use before
8791 : : INSN was the output reload we are thinking of deleting, but never
8792 : : mind that.) Search that range; see if any ref remains. */
8793 : 0 : for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8794 : : {
8795 : 0 : rtx set = single_set (i2);
8796 : :
8797 : : /* Uses which just store in the pseudo don't count,
8798 : : since if they are the only uses, they are dead. */
8799 : 0 : if (set != 0 && SET_DEST (set) == reg)
8800 : 0 : continue;
8801 : 0 : if (LABEL_P (i2) || JUMP_P (i2))
8802 : : break;
8803 : 0 : if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8804 : 0 : && reg_mentioned_p (reg, PATTERN (i2)))
8805 : : {
8806 : : /* Some other ref remains; just delete the output reload we
8807 : : know to be dead. */
8808 : 0 : delete_address_reloads (output_reload_insn, insn);
8809 : 0 : delete_insn (output_reload_insn);
8810 : 0 : return;
8811 : : }
8812 : : }
8813 : :
8814 : : /* Delete the now-dead stores into this pseudo. Note that this
8815 : : loop also takes care of deleting output_reload_insn. */
8816 : 0 : for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8817 : : {
8818 : 0 : rtx set = single_set (i2);
8819 : :
8820 : 0 : if (set != 0 && SET_DEST (set) == reg)
8821 : : {
8822 : 0 : delete_address_reloads (i2, insn);
8823 : 0 : delete_insn (i2);
8824 : : }
8825 : 0 : if (LABEL_P (i2) || JUMP_P (i2))
8826 : : break;
8827 : : }
8828 : :
8829 : : /* For the debugging info, say the pseudo lives in this reload reg. */
8830 : 0 : reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
8831 : 0 : if (ira_conflicts_p)
8832 : : /* Inform IRA about the change. */
8833 : 0 : ira_mark_allocation_change (REGNO (reg));
8834 : 0 : alter_reg (REGNO (reg), -1, false);
8835 : : }
8836 : : else
8837 : : {
8838 : 0 : delete_address_reloads (output_reload_insn, insn);
8839 : 0 : delete_insn (output_reload_insn);
8840 : : }
8841 : : }
8842 : :
8843 : : /* We are going to delete DEAD_INSN. Recursively delete loads of
8844 : : reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8845 : : CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8846 : : static void
8847 : 0 : delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
8848 : : {
8849 : 0 : rtx set = single_set (dead_insn);
8850 : 0 : rtx set2, dst;
8851 : 0 : rtx_insn *prev, *next;
8852 : 0 : if (set)
8853 : : {
8854 : 0 : rtx dst = SET_DEST (set);
8855 : 0 : if (MEM_P (dst))
8856 : 0 : delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8857 : : }
8858 : : /* If we deleted the store from a reloaded post_{in,de}c expression,
8859 : : we can delete the matching adds. */
8860 : 0 : prev = PREV_INSN (dead_insn);
8861 : 0 : next = NEXT_INSN (dead_insn);
8862 : 0 : if (! prev || ! next)
8863 : : return;
8864 : 0 : set = single_set (next);
8865 : 0 : set2 = single_set (prev);
8866 : 0 : if (! set || ! set2
8867 : 0 : || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8868 : 0 : || !CONST_INT_P (XEXP (SET_SRC (set), 1))
8869 : 0 : || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
8870 : : return;
8871 : 0 : dst = SET_DEST (set);
8872 : 0 : if (! rtx_equal_p (dst, SET_DEST (set2))
8873 : 0 : || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8874 : 0 : || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8875 : 0 : || (INTVAL (XEXP (SET_SRC (set), 1))
8876 : 0 : != -INTVAL (XEXP (SET_SRC (set2), 1))))
8877 : 0 : return;
8878 : 0 : delete_related_insns (prev);
8879 : 0 : delete_related_insns (next);
8880 : : }
8881 : :
8882 : : /* Subfunction of delete_address_reloads: process registers found in X. */
8883 : : static void
8884 : 0 : delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
8885 : : {
8886 : 0 : rtx_insn *prev, *i2;
8887 : 0 : rtx set, dst;
8888 : 0 : int i, j;
8889 : 0 : enum rtx_code code = GET_CODE (x);
8890 : :
8891 : 0 : if (code != REG)
8892 : : {
8893 : 0 : const char *fmt = GET_RTX_FORMAT (code);
8894 : 0 : for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8895 : : {
8896 : 0 : if (fmt[i] == 'e')
8897 : 0 : delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8898 : 0 : else if (fmt[i] == 'E')
8899 : : {
8900 : 0 : for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8901 : 0 : delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8902 : : current_insn);
8903 : : }
8904 : : }
8905 : : return;
8906 : : }
8907 : :
8908 : 0 : if (spill_reg_order[REGNO (x)] < 0)
8909 : : return;
8910 : :
8911 : : /* Scan backwards for the insn that sets x. This might be a way back due
8912 : : to inheritance. */
8913 : 0 : for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8914 : : {
8915 : 0 : code = GET_CODE (prev);
8916 : 0 : if (code == CODE_LABEL || code == JUMP_INSN)
8917 : : return;
8918 : 0 : if (!INSN_P (prev))
8919 : 0 : continue;
8920 : 0 : if (reg_set_p (x, PATTERN (prev)))
8921 : : break;
8922 : 0 : if (reg_referenced_p (x, PATTERN (prev)))
8923 : : return;
8924 : : }
8925 : 0 : if (! prev || INSN_UID (prev) < reload_first_uid)
8926 : : return;
8927 : : /* Check that PREV only sets the reload register. */
8928 : 0 : set = single_set (prev);
8929 : 0 : if (! set)
8930 : : return;
8931 : 0 : dst = SET_DEST (set);
8932 : 0 : if (!REG_P (dst)
8933 : 0 : || ! rtx_equal_p (dst, x))
8934 : 0 : return;
8935 : 0 : if (! reg_set_p (dst, PATTERN (dead_insn)))
8936 : : {
8937 : : /* Check if DST was used in a later insn -
8938 : : it might have been inherited. */
8939 : 0 : for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8940 : : {
8941 : 0 : if (LABEL_P (i2))
8942 : : break;
8943 : 0 : if (! INSN_P (i2))
8944 : 0 : continue;
8945 : 0 : if (reg_referenced_p (dst, PATTERN (i2)))
8946 : : {
8947 : : /* If there is a reference to the register in the current insn,
8948 : : it might be loaded in a non-inherited reload. If no other
8949 : : reload uses it, that means the register is set before
8950 : : referenced. */
8951 : 0 : if (i2 == current_insn)
8952 : : {
8953 : 0 : for (j = n_reloads - 1; j >= 0; j--)
8954 : 0 : if ((rld[j].reg_rtx == dst && reload_inherited[j])
8955 : 0 : || reload_override_in[j] == dst)
8956 : : return;
8957 : 0 : for (j = n_reloads - 1; j >= 0; j--)
8958 : 0 : if (rld[j].in && rld[j].reg_rtx == dst)
8959 : : break;
8960 : 0 : if (j >= 0)
8961 : : break;
8962 : : }
8963 : : return;
8964 : : }
8965 : 0 : if (JUMP_P (i2))
8966 : : break;
8967 : : /* If DST is still live at CURRENT_INSN, check if it is used for
8968 : : any reload. Note that even if CURRENT_INSN sets DST, we still
8969 : : have to check the reloads. */
8970 : 0 : if (i2 == current_insn)
8971 : : {
8972 : 0 : for (j = n_reloads - 1; j >= 0; j--)
8973 : 0 : if ((rld[j].reg_rtx == dst && reload_inherited[j])
8974 : 0 : || reload_override_in[j] == dst)
8975 : : return;
8976 : : /* ??? We can't finish the loop here, because dst might be
8977 : : allocated to a pseudo in this block if no reload in this
8978 : : block needs any of the classes containing DST - see
8979 : : spill_hard_reg. There is no easy way to tell this, so we
8980 : : have to scan till the end of the basic block. */
8981 : : }
8982 : 0 : if (reg_set_p (dst, PATTERN (i2)))
8983 : : break;
8984 : : }
8985 : : }
8986 : 0 : delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8987 : 0 : reg_reloaded_contents[REGNO (dst)] = -1;
8988 : 0 : delete_insn (prev);
8989 : : }
8990 : :
8991 : : /* Output reload-insns to reload VALUE into RELOADREG.
8992 : : VALUE is an autoincrement or autodecrement RTX whose operand
8993 : : is a register or memory location;
8994 : : so reloading involves incrementing that location.
8995 : : IN is either identical to VALUE, or some cheaper place to reload from.
8996 : :
8997 : : INC_AMOUNT is the number to increment or decrement by (always positive).
8998 : : This cannot be deduced from VALUE. */
8999 : :
9000 : : static void
9001 : 0 : inc_for_reload (rtx reloadreg, rtx in, rtx value, poly_int64 inc_amount)
9002 : : {
9003 : : /* REG or MEM to be copied and incremented. */
9004 : 0 : rtx incloc = find_replacement (&XEXP (value, 0));
9005 : : /* Nonzero if increment after copying. */
9006 : 0 : int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9007 : 0 : || GET_CODE (value) == POST_MODIFY);
9008 : 0 : rtx_insn *last;
9009 : 0 : rtx inc;
9010 : 0 : rtx_insn *add_insn;
9011 : 0 : int code;
9012 : 0 : rtx real_in = in == value ? incloc : in;
9013 : :
9014 : : /* No hard register is equivalent to this register after
9015 : : inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9016 : : we could inc/dec that register as well (maybe even using it for
9017 : : the source), but I'm not sure it's worth worrying about. */
9018 : 0 : if (REG_P (incloc))
9019 : 0 : reg_last_reload_reg[REGNO (incloc)] = 0;
9020 : :
9021 : 0 : if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9022 : : {
9023 : 0 : gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9024 : 0 : inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9025 : : }
9026 : : else
9027 : : {
9028 : 0 : if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9029 : 0 : inc_amount = -inc_amount;
9030 : :
9031 : 0 : inc = gen_int_mode (inc_amount, Pmode);
9032 : : }
9033 : :
9034 : : /* If this is post-increment, first copy the location to the reload reg. */
9035 : 0 : if (post && real_in != reloadreg)
9036 : 0 : emit_insn (gen_move_insn (reloadreg, real_in));
9037 : :
9038 : 0 : if (in == value)
9039 : : {
9040 : : /* See if we can directly increment INCLOC. Use a method similar to
9041 : : that in gen_reload. */
9042 : :
9043 : 0 : last = get_last_insn ();
9044 : 0 : add_insn = emit_insn (gen_rtx_SET (incloc,
9045 : : gen_rtx_PLUS (GET_MODE (incloc),
9046 : : incloc, inc)));
9047 : :
9048 : 0 : code = recog_memoized (add_insn);
9049 : 0 : if (code >= 0)
9050 : : {
9051 : 0 : extract_insn (add_insn);
9052 : 0 : if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9053 : : {
9054 : : /* If this is a pre-increment and we have incremented the value
9055 : : where it lives, copy the incremented value to RELOADREG to
9056 : : be used as an address. */
9057 : :
9058 : 0 : if (! post)
9059 : 0 : emit_insn (gen_move_insn (reloadreg, incloc));
9060 : 0 : return;
9061 : : }
9062 : : }
9063 : 0 : delete_insns_since (last);
9064 : : }
9065 : :
9066 : : /* If couldn't do the increment directly, must increment in RELOADREG.
9067 : : The way we do this depends on whether this is pre- or post-increment.
9068 : : For pre-increment, copy INCLOC to the reload register, increment it
9069 : : there, then save back. */
9070 : :
9071 : 0 : if (! post)
9072 : : {
9073 : 0 : if (in != reloadreg)
9074 : 0 : emit_insn (gen_move_insn (reloadreg, real_in));
9075 : 0 : emit_insn (gen_add2_insn (reloadreg, inc));
9076 : 0 : emit_insn (gen_move_insn (incloc, reloadreg));
9077 : : }
9078 : : else
9079 : : {
9080 : : /* Postincrement.
9081 : : Because this might be a jump insn or a compare, and because RELOADREG
9082 : : may not be available after the insn in an input reload, we must do
9083 : : the incrementation before the insn being reloaded for.
9084 : :
9085 : : We have already copied IN to RELOADREG. Increment the copy in
9086 : : RELOADREG, save that back, then decrement RELOADREG so it has
9087 : : the original value. */
9088 : :
9089 : 0 : emit_insn (gen_add2_insn (reloadreg, inc));
9090 : 0 : emit_insn (gen_move_insn (incloc, reloadreg));
9091 : 0 : if (CONST_INT_P (inc))
9092 : 0 : emit_insn (gen_add2_insn (reloadreg,
9093 : 0 : gen_int_mode (-INTVAL (inc),
9094 : 0 : GET_MODE (reloadreg))));
9095 : : else
9096 : 0 : emit_insn (gen_sub2_insn (reloadreg, inc));
9097 : : }
9098 : : }
|