]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
flags.h (enum debug_info_type): Remove DWARF_DEBUG.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
af841dbd 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
dd3adcf8 3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
32131a9c 4
1322177d 5This file is part of GCC.
32131a9c 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
32131a9c 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
32131a9c
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
32131a9c 21
32131a9c 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
cab634f2
KG
26
27#include "machmode.h"
28#include "hard-reg-set.h"
32131a9c 29#include "rtl.h"
6baf1cc8 30#include "tm_p.h"
32131a9c
RK
31#include "obstack.h"
32#include "insn-config.h"
32131a9c 33#include "flags.h"
49ad7cfa 34#include "function.h"
32131a9c 35#include "expr.h"
e78d8e51 36#include "optabs.h"
32131a9c 37#include "regs.h"
cad6f7d0 38#include "basic-block.h"
32131a9c
RK
39#include "reload.h"
40#include "recog.h"
32131a9c 41#include "output.h"
a9c366bf 42#include "real.h"
10f0ad3d 43#include "toplev.h"
39f95a2c 44#include "except.h"
a20fd5ac 45#include "tree.h"
32131a9c
RK
46
47/* This file contains the reload pass of the compiler, which is
48 run after register allocation has been done. It checks that
49 each insn is valid (operands required to be in registers really
50 are in registers of the proper class) and fixes up invalid ones
51 by copying values temporarily into registers for the insns
52 that need them.
53
54 The results of register allocation are described by the vector
55 reg_renumber; the insns still contain pseudo regs, but reg_renumber
56 can be used to find which hard reg, if any, a pseudo reg is in.
57
58 The technique we always use is to free up a few hard regs that are
59 called ``reload regs'', and for each place where a pseudo reg
60 must be in a hard reg, copy it temporarily into one of the reload regs.
61
03acd8f8
BS
62 Reload regs are allocated locally for every instruction that needs
63 reloads. When there are pseudos which are allocated to a register that
64 has been chosen as a reload reg, such pseudos must be ``spilled''.
65 This means that they go to other hard regs, or to stack slots if no other
32131a9c
RK
66 available hard regs can be found. Spilling can invalidate more
67 insns, requiring additional need for reloads, so we must keep checking
68 until the process stabilizes.
69
70 For machines with different classes of registers, we must keep track
71 of the register class needed for each reload, and make sure that
72 we allocate enough reload registers of each class.
73
74 The file reload.c contains the code that checks one insn for
75 validity and reports the reloads that it needs. This file
76 is in charge of scanning the entire rtl code, accumulating the
77 reload needs, spilling, assigning reload registers to use for
78 fixing up each insn, and generating the new insns to copy values
79 into the reload registers. */
80\f
81/* During reload_as_needed, element N contains a REG rtx for the hard reg
0f41302f 82 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
83static rtx *reg_last_reload_reg;
84
85/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87static char *reg_has_output_reload;
88
89/* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91static HARD_REG_SET reg_is_output_reload;
92
93/* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97rtx *reg_equiv_constant;
98
99/* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 103rtx *reg_equiv_memory_loc;
32131a9c 104
965ccc5a
R
105/* We allocate reg_equiv_memory_loc inside a varray so that the garbage
106 collector can keep track of what is inside. */
107varray_type reg_equiv_memory_loc_varray;
108
32131a9c
RK
109/* Element N is the address of stack slot to which pseudo reg N is equivalent.
110 This is used when the address is not valid as a memory address
111 (because its displacement is too big for the machine.) */
112rtx *reg_equiv_address;
113
114/* Element N is the memory slot to which pseudo reg N is equivalent,
115 or zero if pseudo reg N is not equivalent to a memory slot. */
116rtx *reg_equiv_mem;
117
118/* Widest width in which each pseudo reg is referred to (via subreg). */
770ae6cc 119static unsigned int *reg_max_ref_width;
32131a9c 120
135eb61c 121/* Element N is the list of insns that initialized reg N from its equivalent
32131a9c
RK
122 constant or memory slot. */
123static rtx *reg_equiv_init;
124
03acd8f8
BS
125/* Vector to remember old contents of reg_renumber before spilling. */
126static short *reg_old_renumber;
127
e6e52be0 128/* During reload_as_needed, element N contains the last pseudo regno reloaded
03acd8f8 129 into hard register N. If that pseudo reg occupied more than one register,
32131a9c
RK
130 reg_reloaded_contents points to that pseudo for each spill register in
131 use; all of these must remain set for an inheritance to occur. */
132static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
133
134/* During reload_as_needed, element N contains the insn for which
e6e52be0
R
135 hard register N was last used. Its contents are significant only
136 when reg_reloaded_valid is set for this register. */
32131a9c
RK
137static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
138
3eae4643 139/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
e6e52be0
R
140static HARD_REG_SET reg_reloaded_valid;
141/* Indicate if the register was dead at the end of the reload.
142 This is only valid if reg_reloaded_contents is set and valid. */
143static HARD_REG_SET reg_reloaded_dead;
144
e3e9336f
DJ
145/* Indicate whether the register's current value is one that is not
146 safe to retain across a call, even for registers that are normally
147 call-saved. */
148static HARD_REG_SET reg_reloaded_call_part_clobbered;
149
32131a9c
RK
150/* Number of spill-regs so far; number of valid elements of spill_regs. */
151static int n_spills;
152
153/* In parallel with spill_regs, contains REG rtx's for those regs.
154 Holds the last rtx used for any given reg, or 0 if it has never
155 been used for spilling yet. This rtx is reused, provided it has
156 the proper mode. */
157static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
158
159/* In parallel with spill_regs, contains nonzero for a spill reg
160 that was stored after the last time it was used.
161 The precise value is the insn generated to do the store. */
162static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
163
cb2afeb3
R
164/* This is the register that was stored with spill_reg_store. This is a
165 copy of reload_out / reload_out_reg when the value was stored; if
166 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
167static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
168
32131a9c
RK
169/* This table is the inverse mapping of spill_regs:
170 indexed by hard reg number,
171 it contains the position of that reg in spill_regs,
05d10675 172 or -1 for something that is not in spill_regs.
13c8e8e3
JL
173
174 ?!? This is no longer accurate. */
32131a9c
RK
175static short spill_reg_order[FIRST_PSEUDO_REGISTER];
176
03acd8f8
BS
177/* This reg set indicates registers that can't be used as spill registers for
178 the currently processed insn. These are the hard registers which are live
179 during the insn, but not allocated to pseudos, as well as fixed
180 registers. */
32131a9c
RK
181static HARD_REG_SET bad_spill_regs;
182
03acd8f8
BS
183/* These are the hard registers that can't be used as spill register for any
184 insn. This includes registers used for user variables and registers that
185 we can't eliminate. A register that appears in this set also can't be used
186 to retry register allocation. */
187static HARD_REG_SET bad_spill_regs_global;
188
32131a9c 189/* Describes order of use of registers for reloading
03acd8f8
BS
190 of spilled pseudo-registers. `n_spills' is the number of
191 elements that are actually valid; new ones are added at the end.
192
193 Both spill_regs and spill_reg_order are used on two occasions:
194 once during find_reload_regs, where they keep track of the spill registers
195 for a single insn, but also during reload_as_needed where they show all
196 the registers ever used by reload. For the latter case, the information
197 is calculated during finish_spills. */
32131a9c
RK
198static short spill_regs[FIRST_PSEUDO_REGISTER];
199
03acd8f8
BS
200/* This vector of reg sets indicates, for each pseudo, which hard registers
201 may not be used for retrying global allocation because the register was
202 formerly spilled from one of them. If we allowed reallocating a pseudo to
203 a register that it was already allocated to, reload might not
204 terminate. */
205static HARD_REG_SET *pseudo_previous_regs;
206
207/* This vector of reg sets indicates, for each pseudo, which hard
208 registers may not be used for retrying global allocation because they
209 are used as spill registers during one of the insns in which the
210 pseudo is live. */
211static HARD_REG_SET *pseudo_forbidden_regs;
212
213/* All hard regs that have been used as spill registers for any insn are
214 marked in this set. */
215static HARD_REG_SET used_spill_regs;
8b4f9969 216
4079cd63
JW
217/* Index of last register assigned as a spill register. We allocate in
218 a round-robin fashion. */
4079cd63
JW
219static int last_spill_reg;
220
32131a9c
RK
221/* Nonzero if indirect addressing is supported on the machine; this means
222 that spilling (REG n) does not require reloading it into a register in
223 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
224 value indicates the level of indirect addressing supported, e.g., two
225 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
226 a hard register. */
32131a9c
RK
227static char spill_indirect_levels;
228
229/* Nonzero if indirect addressing is supported when the innermost MEM is
230 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
6d2f8887 231 which these are valid is the same as spill_indirect_levels, above. */
32131a9c
RK
232char indirect_symref_ok;
233
234/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
32131a9c
RK
235char double_reg_address_ok;
236
237/* Record the stack slot for each spilled hard register. */
32131a9c
RK
238static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
239
240/* Width allocated so far for that stack slot. */
770ae6cc 241static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
32131a9c 242
7609e720 243/* Record which pseudos needed to be spilled. */
f5d8c9f4
BS
244static regset_head spilled_pseudos;
245
246/* Used for communication between order_regs_for_reload and count_pseudo.
247 Used to avoid counting one pseudo twice. */
248static regset_head pseudos_counted;
7609e720 249
32131a9c
RK
250/* First uid used by insns created by reload in this function.
251 Used in find_equiv_reg. */
252int reload_first_uid;
253
254/* Flag set by local-alloc or global-alloc if anything is live in
255 a call-clobbered reg across calls. */
32131a9c
RK
256int caller_save_needed;
257
258/* Set to 1 while reload_as_needed is operating.
259 Required by some machines to handle any generated moves differently. */
32131a9c
RK
260int reload_in_progress = 0;
261
262/* These arrays record the insn_code of insns that may be needed to
263 perform input and output reloads of special objects. They provide a
264 place to pass a scratch register. */
32131a9c
RK
265enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266enum insn_code reload_out_optab[NUM_MACHINE_MODES];
267
d45cf215 268/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
269 The allocated storage can be freed once find_reloads has processed the
270 insn. */
32131a9c 271struct obstack reload_obstack;
cad6f7d0
BS
272
273/* Points to the beginning of the reload_obstack. All insn_chain structures
274 are allocated first. */
275char *reload_startobj;
276
277/* The point after all insn_chain structures. Used to quickly deallocate
f5d8c9f4 278 memory allocated in copy_reloads during calculate_needs_all_insns. */
32131a9c
RK
279char *reload_firstobj;
280
f5d8c9f4
BS
281/* This points before all local rtl generated by register elimination.
282 Used to quickly free all memory after processing one insn. */
283static char *reload_insn_firstobj;
284
cad6f7d0
BS
285/* List of insn_chain instructions, one for every insn that reload needs to
286 examine. */
287struct insn_chain *reload_insn_chain;
7609e720 288
03acd8f8 289/* List of all insns needing reloads. */
7609e720 290static struct insn_chain *insns_need_reload;
32131a9c
RK
291\f
292/* This structure is used to record information about register eliminations.
293 Each array entry describes one possible way of eliminating a register
294 in favor of another. If there is more than one way of eliminating a
295 particular register, the most preferred should be specified first. */
296
590cf94d 297struct elim_table
32131a9c 298{
0f41302f
MS
299 int from; /* Register number to be eliminated. */
300 int to; /* Register number used as replacement. */
b19ee4bd 301 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
272d0bee 302 int can_eliminate; /* Nonzero if this elimination can be done. */
32131a9c 303 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
0f41302f 304 insns made by reload. */
b19ee4bd
JJ
305 HOST_WIDE_INT offset; /* Current offset between the two regs. */
306 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
0f41302f 307 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
32131a9c
RK
308 rtx from_rtx; /* REG rtx for the register to be eliminated.
309 We cannot simply compare the number since
310 we might then spuriously replace a hard
311 register corresponding to a pseudo
0f41302f
MS
312 assigned to the reg to be eliminated. */
313 rtx to_rtx; /* REG rtx for the replacement. */
590cf94d
KG
314};
315
1d7254c5 316static struct elim_table *reg_eliminate = 0;
590cf94d
KG
317
318/* This is an intermediate structure to initialize the table. It has
1d7254c5 319 exactly the members provided by ELIMINABLE_REGS. */
0b5826ac 320static const struct elim_table_1
590cf94d 321{
0b5826ac
KG
322 const int from;
323 const int to;
590cf94d 324} reg_eliminate_1[] =
32131a9c
RK
325
326/* If a set of eliminable registers was specified, define the table from it.
327 Otherwise, default to the normal case of the frame pointer being
328 replaced by the stack pointer. */
329
330#ifdef ELIMINABLE_REGS
331 ELIMINABLE_REGS;
332#else
333 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
334#endif
335
b6a1cbae 336#define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
32131a9c
RK
337
338/* Record the number of pending eliminations that have an offset not equal
40f03658 339 to their initial offset. If nonzero, we use a new copy of each
32131a9c 340 replacement result in any insns encountered. */
cb2afeb3 341int num_not_at_initial_offset;
32131a9c
RK
342
343/* Count the number of registers that we may be able to eliminate. */
344static int num_eliminable;
2b49ee39
R
345/* And the number of registers that are equivalent to a constant that
346 can be eliminated to frame_pointer / arg_pointer + constant. */
347static int num_eliminable_invariants;
32131a9c
RK
348
349/* For each label, we record the offset of each elimination. If we reach
350 a label by more than one path and an offset differs, we cannot do the
4cc0fdd2
JDA
351 elimination. This information is indexed by the difference of the
352 number of the label and the first label number. We can't offset the
353 pointer itself as this can cause problems on machines with segmented
354 memory. The first table is an array of flags that records whether we
355 have yet encountered a label and the second table is an array of arrays,
356 one entry in the latter array for each elimination. */
357
358static int first_label_num;
32131a9c 359static char *offsets_known_at;
b19ee4bd 360static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
32131a9c
RK
361
362/* Number of labels in the current function. */
363
364static int num_labels;
365\f
0c20a65f
AJ
366static void replace_pseudos_in (rtx *, enum machine_mode, rtx);
367static void maybe_fix_stack_asms (void);
368static void copy_reloads (struct insn_chain *);
369static void calculate_needs_all_insns (int);
370static int find_reg (struct insn_chain *, int);
371static void find_reload_regs (struct insn_chain *);
372static void select_reload_regs (void);
373static void delete_caller_save_insns (void);
374
375static void spill_failure (rtx, enum reg_class);
376static void count_spilled_pseudo (int, int, int);
377static void delete_dead_insn (rtx);
378static void alter_reg (int, int);
379static void set_label_offsets (rtx, rtx, int);
380static void check_eliminable_occurrences (rtx);
381static void elimination_effects (rtx, enum machine_mode);
382static int eliminate_regs_in_insn (rtx, int);
383static void update_eliminable_offsets (void);
384static void mark_not_eliminable (rtx, rtx, void *);
385static void set_initial_elim_offsets (void);
386static void verify_initial_elim_offsets (void);
387static void set_initial_label_offsets (void);
388static void set_offsets_for_label (rtx);
389static void init_elim_table (void);
390static void update_eliminables (HARD_REG_SET *);
391static void spill_hard_reg (unsigned int, int);
392static int finish_spills (int);
393static void ior_hard_reg_set (HARD_REG_SET *, HARD_REG_SET *);
394static void scan_paradoxical_subregs (rtx);
395static void count_pseudo (int);
396static void order_regs_for_reload (struct insn_chain *);
397static void reload_as_needed (int);
398static void forget_old_reloads_1 (rtx, rtx, void *);
399static int reload_reg_class_lower (const void *, const void *);
400static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
401 enum machine_mode);
402static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
403 enum machine_mode);
404static int reload_reg_free_p (unsigned int, int, enum reload_type);
405static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
406 rtx, rtx, int, int);
407static int free_for_value_p (int, enum machine_mode, int, enum reload_type,
408 rtx, rtx, int, int);
86caf04d 409static int function_invariant_p (rtx);
0c20a65f
AJ
410static int reload_reg_reaches_end_p (unsigned int, int, enum reload_type);
411static int allocate_reload_reg (struct insn_chain *, int, int);
412static int conflicts_with_override (rtx);
413static void failed_reload (rtx, int);
414static int set_reload_reg (int, int);
415static void choose_reload_regs_init (struct insn_chain *, rtx *);
416static void choose_reload_regs (struct insn_chain *);
417static void merge_assigned_reloads (rtx);
418static void emit_input_reload_insns (struct insn_chain *, struct reload *,
419 rtx, int);
420static void emit_output_reload_insns (struct insn_chain *, struct reload *,
421 int);
422static void do_input_reload (struct insn_chain *, struct reload *, int);
423static void do_output_reload (struct insn_chain *, struct reload *, int);
b5ba341f 424static bool inherit_piecemeal_p (int, int);
0c20a65f
AJ
425static void emit_reload_insns (struct insn_chain *);
426static void delete_output_reload (rtx, int, int);
427static void delete_address_reloads (rtx, rtx);
428static void delete_address_reloads_1 (rtx, rtx, rtx);
429static rtx inc_for_reload (rtx, rtx, rtx, int);
2dfa9a87 430#ifdef AUTO_INC_DEC
0c20a65f 431static void add_auto_inc_notes (rtx, rtx);
2dfa9a87 432#endif
0c20a65f 433static void copy_eh_notes (rtx, rtx);
32131a9c 434\f
546b63fb
RK
435/* Initialize the reload pass once per compilation. */
436
32131a9c 437void
0c20a65f 438init_reload (void)
32131a9c 439{
b3694847 440 int i;
32131a9c
RK
441
442 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
443 Set spill_indirect_levels to the number of levels such addressing is
444 permitted, zero if it is not permitted at all. */
445
b3694847 446 rtx tem
38a448ca
RH
447 = gen_rtx_MEM (Pmode,
448 gen_rtx_PLUS (Pmode,
c5c76735
JL
449 gen_rtx_REG (Pmode,
450 LAST_VIRTUAL_REGISTER + 1),
38a448ca 451 GEN_INT (4)));
32131a9c
RK
452 spill_indirect_levels = 0;
453
454 while (memory_address_p (QImode, tem))
455 {
456 spill_indirect_levels++;
38a448ca 457 tem = gen_rtx_MEM (Pmode, tem);
32131a9c
RK
458 }
459
460 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
461
38a448ca 462 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
32131a9c
RK
463 indirect_symref_ok = memory_address_p (QImode, tem);
464
465 /* See if reg+reg is a valid (and offsettable) address. */
466
65701fd2 467 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638 468 {
38a448ca
RH
469 tem = gen_rtx_PLUS (Pmode,
470 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
471 gen_rtx_REG (Pmode, i));
c5c76735 472
57caa638
RS
473 /* This way, we make sure that reg+reg is an offsettable address. */
474 tem = plus_constant (tem, 4);
475
476 if (memory_address_p (QImode, tem))
477 {
478 double_reg_address_ok = 1;
479 break;
480 }
481 }
32131a9c 482
0f41302f 483 /* Initialize obstack for our rtl allocation. */
32131a9c 484 gcc_obstack_init (&reload_obstack);
703ad42b 485 reload_startobj = obstack_alloc (&reload_obstack, 0);
f5d8c9f4
BS
486
487 INIT_REG_SET (&spilled_pseudos);
488 INIT_REG_SET (&pseudos_counted);
965ccc5a 489 VARRAY_RTX_INIT (reg_equiv_memory_loc_varray, 0, "reg_equiv_memory_loc");
32131a9c
RK
490}
491
cad6f7d0
BS
492/* List of insn chains that are currently unused. */
493static struct insn_chain *unused_insn_chains = 0;
494
495/* Allocate an empty insn_chain structure. */
496struct insn_chain *
0c20a65f 497new_insn_chain (void)
cad6f7d0
BS
498{
499 struct insn_chain *c;
500
501 if (unused_insn_chains == 0)
502 {
703ad42b 503 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
239a0f5b
BS
504 INIT_REG_SET (&c->live_throughout);
505 INIT_REG_SET (&c->dead_or_set);
cad6f7d0
BS
506 }
507 else
508 {
509 c = unused_insn_chains;
510 unused_insn_chains = c->next;
511 }
512 c->is_caller_save_insn = 0;
03acd8f8 513 c->need_operand_change = 0;
cad6f7d0
BS
514 c->need_reload = 0;
515 c->need_elim = 0;
516 return c;
517}
518
7609e720
BS
519/* Small utility function to set all regs in hard reg set TO which are
520 allocated to pseudos in regset FROM. */
770ae6cc 521
7609e720 522void
0c20a65f 523compute_use_by_pseudos (HARD_REG_SET *to, regset from)
7609e720 524{
770ae6cc
RK
525 unsigned int regno;
526
7609e720
BS
527 EXECUTE_IF_SET_IN_REG_SET
528 (from, FIRST_PSEUDO_REGISTER, regno,
529 {
530 int r = reg_renumber[regno];
531 int nregs;
770ae6cc 532
7609e720 533 if (r < 0)
404d95c4
R
534 {
535 /* reload_combine uses the information from
e881bb1b
RH
536 BASIC_BLOCK->global_live_at_start, which might still
537 contain registers that have not actually been allocated
538 since they have an equivalence. */
404d95c4
R
539 if (! reload_completed)
540 abort ();
541 }
542 else
543 {
66fd46b6 544 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (regno)];
404d95c4
R
545 while (nregs-- > 0)
546 SET_HARD_REG_BIT (*to, r + nregs);
547 }
7609e720
BS
548 });
549}
f474c6f8
AO
550
551/* Replace all pseudos found in LOC with their corresponding
552 equivalences. */
553
554static void
0c20a65f 555replace_pseudos_in (rtx *loc, enum machine_mode mem_mode, rtx usage)
f474c6f8
AO
556{
557 rtx x = *loc;
558 enum rtx_code code;
559 const char *fmt;
560 int i, j;
561
562 if (! x)
563 return;
174fa2c4 564
f474c6f8
AO
565 code = GET_CODE (x);
566 if (code == REG)
567 {
ae0ed63a 568 unsigned int regno = REGNO (x);
086fef9e
AO
569
570 if (regno < FIRST_PSEUDO_REGISTER)
f474c6f8
AO
571 return;
572
573 x = eliminate_regs (x, mem_mode, usage);
574 if (x != *loc)
575 {
576 *loc = x;
ee960939 577 replace_pseudos_in (loc, mem_mode, usage);
f474c6f8
AO
578 return;
579 }
580
086fef9e
AO
581 if (reg_equiv_constant[regno])
582 *loc = reg_equiv_constant[regno];
583 else if (reg_equiv_mem[regno])
584 *loc = reg_equiv_mem[regno];
585 else if (reg_equiv_address[regno])
586 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
f8cfc6aa 587 else if (!REG_P (regno_reg_rtx[regno])
086fef9e
AO
588 || REGNO (regno_reg_rtx[regno]) != regno)
589 *loc = regno_reg_rtx[regno];
f474c6f8
AO
590 else
591 abort ();
592
593 return;
594 }
595 else if (code == MEM)
596 {
ee960939 597 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
f474c6f8
AO
598 return;
599 }
174fa2c4 600
f474c6f8
AO
601 /* Process each of our operands recursively. */
602 fmt = GET_RTX_FORMAT (code);
603 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
604 if (*fmt == 'e')
ee960939 605 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
f474c6f8
AO
606 else if (*fmt == 'E')
607 for (j = 0; j < XVECLEN (x, i); j++)
ee960939 608 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
f474c6f8
AO
609}
610
03acd8f8 611\f
1e5bd841
BS
612/* Global variables used by reload and its subroutines. */
613
1e5bd841
BS
614/* Set during calculate_needs if an insn needs register elimination. */
615static int something_needs_elimination;
cb2afeb3
R
616/* Set during calculate_needs if an insn needs an operand changed. */
617int something_needs_operands_changed;
1e5bd841 618
1e5bd841
BS
619/* Nonzero means we couldn't get enough spill regs. */
620static int failure;
621
546b63fb 622/* Main entry point for the reload pass.
32131a9c
RK
623
624 FIRST is the first insn of the function being compiled.
625
626 GLOBAL nonzero means we were called from global_alloc
627 and should attempt to reallocate any pseudoregs that we
628 displace from hard regs we will use for reloads.
629 If GLOBAL is zero, we do not have enough information to do that,
630 so any pseudo reg that is spilled must go to the stack.
631
5352b11a
RS
632 Return value is nonzero if reload failed
633 and we must not do any more for this function. */
634
635int
0c20a65f 636reload (rtx first, int global)
32131a9c 637{
b3694847
SS
638 int i;
639 rtx insn;
640 struct elim_table *ep;
e0082a72 641 basic_block bb;
32131a9c 642
32131a9c
RK
643 /* Make sure even insns with volatile mem refs are recognizable. */
644 init_recog ();
645
1e5bd841
BS
646 failure = 0;
647
703ad42b 648 reload_firstobj = obstack_alloc (&reload_obstack, 0);
cad6f7d0 649
437a710d
BS
650 /* Make sure that the last insn in the chain
651 is not something that needs reloading. */
2e040219 652 emit_note (NOTE_INSN_DELETED);
437a710d 653
32131a9c
RK
654 /* Enable find_equiv_reg to distinguish insns made by reload. */
655 reload_first_uid = get_max_uid ();
656
0dadecf6
RK
657#ifdef SECONDARY_MEMORY_NEEDED
658 /* Initialize the secondary memory table. */
659 clear_secondary_mem ();
660#endif
661
32131a9c 662 /* We don't have a stack slot for any spill reg yet. */
703ad42b
KG
663 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
664 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
32131a9c 665
a8efe40d
RK
666 /* Initialize the save area information for caller-save, in case some
667 are needed. */
668 init_save_areas ();
a8fdc208 669
32131a9c
RK
670 /* Compute which hard registers are now in use
671 as homes for pseudo registers.
672 This is done here rather than (eg) in global_alloc
673 because this point is reached even if not optimizing. */
32131a9c
RK
674 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
675 mark_home_live (i);
676
8dddd002
RK
677 /* A function that receives a nonlocal goto must save all call-saved
678 registers. */
679 if (current_function_has_nonlocal_label)
680 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2a3e384f
RH
681 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
682 regs_ever_live[i] = 1;
8dddd002 683
5583e33d
JW
684#ifdef NON_SAVING_SETJMP
685 /* A function that calls setjmp should save and restore all the
686 call-saved registers on a system where longjmp clobbers them. */
687 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
688 {
689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
690 if (! call_used_regs[i])
691 regs_ever_live[i] = 1;
692 }
693#endif
694
32131a9c
RK
695 /* Find all the pseudo registers that didn't get hard regs
696 but do have known equivalent constants or memory slots.
697 These include parameters (known equivalent to parameter slots)
698 and cse'd or loop-moved constant memory addresses.
699
700 Record constant equivalents in reg_equiv_constant
701 so they will be substituted by find_reloads.
702 Record memory equivalents in reg_mem_equiv so they can
703 be substituted eventually by altering the REG-rtx's. */
704
703ad42b
KG
705 reg_equiv_constant = xcalloc (max_regno, sizeof (rtx));
706 reg_equiv_mem = xcalloc (max_regno, sizeof (rtx));
707 reg_equiv_init = xcalloc (max_regno, sizeof (rtx));
708 reg_equiv_address = xcalloc (max_regno, sizeof (rtx));
709 reg_max_ref_width = xcalloc (max_regno, sizeof (int));
710 reg_old_renumber = xcalloc (max_regno, sizeof (short));
4e135bdd 711 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
703ad42b
KG
712 pseudo_forbidden_regs = xmalloc (max_regno * sizeof (HARD_REG_SET));
713 pseudo_previous_regs = xcalloc (max_regno, sizeof (HARD_REG_SET));
32131a9c 714
03acd8f8 715 CLEAR_HARD_REG_SET (bad_spill_regs_global);
56f58d3a 716
d754127f
ILT
717 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
718 to. Also find all paradoxical subregs and find largest such for
719 each pseudo. */
32131a9c 720
2b49ee39 721 num_eliminable_invariants = 0;
32131a9c
RK
722 for (insn = first; insn; insn = NEXT_INSN (insn))
723 {
724 rtx set = single_set (insn);
725
3d17d93d
AO
726 /* We may introduce USEs that we want to remove at the end, so
727 we'll mark them with QImode. Make sure there are no
728 previously-marked insns left by say regmove. */
729 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
730 && GET_MODE (insn) != VOIDmode)
731 PUT_MODE (insn, VOIDmode);
732
f8cfc6aa 733 if (set != 0 && REG_P (SET_DEST (set)))
32131a9c 734 {
fb3821f7 735 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d 736 if (note
2b49ee39
R
737 && (! function_invariant_p (XEXP (note, 0))
738 || ! flag_pic
129c0899
HPN
739 /* A function invariant is often CONSTANT_P but may
740 include a register. We promise to only pass
741 CONSTANT_P objects to LEGITIMATE_PIC_OPERAND_P. */
742 || (CONSTANT_P (XEXP (note, 0))
2e4e72b1 743 && LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))))
32131a9c
RK
744 {
745 rtx x = XEXP (note, 0);
746 i = REGNO (SET_DEST (set));
747 if (i > LAST_VIRTUAL_REGISTER)
748 {
6a45951f
UW
749 /* It can happen that a REG_EQUIV note contains a MEM
750 that is not a legitimate memory operand. As later
751 stages of reload assume that all addresses found
752 in the reg_equiv_* arrays were originally legitimate,
753 we ignore such REG_EQUIV notes. */
754 if (memory_operand (x, VOIDmode))
956d6950 755 {
cf728d61
HPN
756 /* Always unshare the equivalence, so we can
757 substitute into this insn without touching the
2ba84f36 758 equivalence. */
cf728d61 759 reg_equiv_memory_loc[i] = copy_rtx (x);
956d6950 760 }
2b49ee39 761 else if (function_invariant_p (x))
32131a9c 762 {
2b49ee39
R
763 if (GET_CODE (x) == PLUS)
764 {
765 /* This is PLUS of frame pointer and a constant,
766 and might be shared. Unshare it. */
767 reg_equiv_constant[i] = copy_rtx (x);
768 num_eliminable_invariants++;
769 }
770 else if (x == frame_pointer_rtx
771 || x == arg_pointer_rtx)
772 {
773 reg_equiv_constant[i] = x;
774 num_eliminable_invariants++;
775 }
776 else if (LEGITIMATE_CONSTANT_P (x))
32131a9c
RK
777 reg_equiv_constant[i] = x;
778 else
3a04ff64
RH
779 {
780 reg_equiv_memory_loc[i]
781 = force_const_mem (GET_MODE (SET_DEST (set)), x);
782 if (!reg_equiv_memory_loc[i])
783 continue;
784 }
32131a9c
RK
785 }
786 else
787 continue;
788
789 /* If this register is being made equivalent to a MEM
790 and the MEM is not SET_SRC, the equivalencing insn
791 is one with the MEM as a SET_DEST and it occurs later.
792 So don't mark this insn now. */
3c0cb5de 793 if (!MEM_P (x)
32131a9c 794 || rtx_equal_p (SET_SRC (set), x))
135eb61c
R
795 reg_equiv_init[i]
796 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
32131a9c
RK
797 }
798 }
799 }
800
801 /* If this insn is setting a MEM from a register equivalent to it,
802 this is the equivalencing insn. */
3c0cb5de 803 else if (set && MEM_P (SET_DEST (set))
f8cfc6aa 804 && REG_P (SET_SRC (set))
32131a9c
RK
805 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
806 && rtx_equal_p (SET_DEST (set),
807 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
135eb61c
R
808 reg_equiv_init[REGNO (SET_SRC (set))]
809 = gen_rtx_INSN_LIST (VOIDmode, insn,
810 reg_equiv_init[REGNO (SET_SRC (set))]);
32131a9c 811
2c3c49de 812 if (INSN_P (insn))
32131a9c
RK
813 scan_paradoxical_subregs (PATTERN (insn));
814 }
815
09dd1133 816 init_elim_table ();
32131a9c 817
4cc0fdd2
JDA
818 first_label_num = get_first_label_num ();
819 num_labels = max_label_num () - first_label_num;
32131a9c
RK
820
821 /* Allocate the tables used to store offset information at labels. */
a68d4b75
BK
822 /* We used to use alloca here, but the size of what it would try to
823 allocate would occasionally cause it to exceed the stack limit and
824 cause a core dump. */
4cc0fdd2 825 offsets_known_at = xmalloc (num_labels);
b19ee4bd 826 offsets_at = xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
32131a9c 827
32131a9c
RK
828 /* Alter each pseudo-reg rtx to contain its hard reg number.
829 Assign stack slots to the pseudos that lack hard regs or equivalents.
830 Do not touch virtual registers. */
831
832 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
833 alter_reg (i, -1);
834
32131a9c
RK
835 /* If we have some registers we think can be eliminated, scan all insns to
836 see if there is an insn that sets one of these registers to something
837 other than itself plus a constant. If so, the register cannot be
838 eliminated. Doing this scan here eliminates an extra pass through the
839 main reload loop in the most common case where register elimination
840 cannot be done. */
841 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
4b4bf941 842 if (INSN_P (insn))
84832317 843 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
32131a9c 844
18a90182
BS
845 maybe_fix_stack_asms ();
846
03acd8f8
BS
847 insns_need_reload = 0;
848 something_needs_elimination = 0;
05d10675 849
4079cd63
JW
850 /* Initialize to -1, which means take the first spill register. */
851 last_spill_reg = -1;
852
32131a9c 853 /* Spill any hard regs that we know we can't eliminate. */
03acd8f8 854 CLEAR_HARD_REG_SET (used_spill_regs);
4ab51fb5
R
855 /* There can be multiple ways to eliminate a register;
856 they should be listed adjacently.
857 Elimination for any register fails only if all possible ways fail. */
858 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
859 {
860 int from = ep->from;
861 int can_eliminate = 0;
862 do
863 {
864 can_eliminate |= ep->can_eliminate;
865 ep++;
866 }
867 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
868 if (! can_eliminate)
869 spill_hard_reg (from, 1);
870 }
9ff3516a
RK
871
872#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
873 if (frame_pointer_needed)
e04ca094 874 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
9ff3516a 875#endif
e04ca094 876 finish_spills (global);
7609e720 877
f1db3576
JL
878 /* From now on, we may need to generate moves differently. We may also
879 allow modifications of insns which cause them to not be recognized.
880 Any such modifications will be cleaned up during reload itself. */
b2f15f94
RK
881 reload_in_progress = 1;
882
32131a9c
RK
883 /* This loop scans the entire function each go-round
884 and repeats until one repetition spills no additional hard regs. */
03acd8f8 885 for (;;)
32131a9c 886 {
03acd8f8
BS
887 int something_changed;
888 int did_spill;
32131a9c 889
03acd8f8 890 HOST_WIDE_INT starting_frame_size;
32131a9c 891
665792eb 892 /* Round size of stack frame to stack_alignment_needed. This must be done
7657bf2f
JW
893 here because the stack size may be a part of the offset computation
894 for register elimination, and there might have been new stack slots
6d2f8887 895 created in the last iteration of this loop. */
665792eb
JH
896 if (cfun->stack_alignment_needed)
897 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
7657bf2f
JW
898
899 starting_frame_size = get_frame_size ();
900
09dd1133 901 set_initial_elim_offsets ();
1f3b1e1a 902 set_initial_label_offsets ();
03acd8f8 903
32131a9c
RK
904 /* For each pseudo register that has an equivalent location defined,
905 try to eliminate any eliminable registers (such as the frame pointer)
906 assuming initial offsets for the replacement register, which
907 is the normal case.
908
909 If the resulting location is directly addressable, substitute
910 the MEM we just got directly for the old REG.
911
912 If it is not addressable but is a constant or the sum of a hard reg
913 and constant, it is probably not addressable because the constant is
914 out of range, in that case record the address; we will generate
915 hairy code to compute the address in a register each time it is
6491dbbb
RK
916 needed. Similarly if it is a hard register, but one that is not
917 valid as an address register.
32131a9c
RK
918
919 If the location is not addressable, but does not have one of the
920 above forms, assign a stack slot. We have to do this to avoid the
921 potential of producing lots of reloads if, e.g., a location involves
922 a pseudo that didn't get a hard register and has an equivalent memory
923 location that also involves a pseudo that didn't get a hard register.
924
925 Perhaps at some point we will improve reload_when_needed handling
926 so this problem goes away. But that's very hairy. */
927
928 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
929 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
930 {
1914f5da 931 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
932
933 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
934 XEXP (x, 0)))
935 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
1f663989 936 else if (CONSTANT_P (XEXP (x, 0))
f8cfc6aa 937 || (REG_P (XEXP (x, 0))
6491dbbb 938 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c 939 || (GET_CODE (XEXP (x, 0)) == PLUS
f8cfc6aa 940 && REG_P (XEXP (XEXP (x, 0), 0))
32131a9c
RK
941 && (REGNO (XEXP (XEXP (x, 0), 0))
942 < FIRST_PSEUDO_REGISTER)
1f663989 943 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
32131a9c
RK
944 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
945 else
946 {
947 /* Make a new stack slot. Then indicate that something
a8fdc208 948 changed so we go back and recompute offsets for
32131a9c
RK
949 eliminable registers because the allocation of memory
950 below might change some offset. reg_equiv_{mem,address}
951 will be set up for this pseudo on the next pass around
952 the loop. */
953 reg_equiv_memory_loc[i] = 0;
954 reg_equiv_init[i] = 0;
955 alter_reg (i, -1);
32131a9c
RK
956 }
957 }
a8fdc208 958
437a710d
BS
959 if (caller_save_needed)
960 setup_save_areas ();
961
03acd8f8 962 /* If we allocated another stack slot, redo elimination bookkeeping. */
437a710d 963 if (starting_frame_size != get_frame_size ())
32131a9c
RK
964 continue;
965
437a710d 966 if (caller_save_needed)
a8efe40d 967 {
437a710d
BS
968 save_call_clobbered_regs ();
969 /* That might have allocated new insn_chain structures. */
703ad42b 970 reload_firstobj = obstack_alloc (&reload_obstack, 0);
a8efe40d
RK
971 }
972
03acd8f8
BS
973 calculate_needs_all_insns (global);
974
f5d8c9f4 975 CLEAR_REG_SET (&spilled_pseudos);
03acd8f8
BS
976 did_spill = 0;
977
978 something_changed = 0;
32131a9c 979
0dadecf6
RK
980 /* If we allocated any new memory locations, make another pass
981 since it might have changed elimination offsets. */
982 if (starting_frame_size != get_frame_size ())
983 something_changed = 1;
984
09dd1133
BS
985 {
986 HARD_REG_SET to_spill;
987 CLEAR_HARD_REG_SET (to_spill);
988 update_eliminables (&to_spill);
989 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
990 if (TEST_HARD_REG_BIT (to_spill, i))
32131a9c 991 {
e04ca094 992 spill_hard_reg (i, 1);
03acd8f8 993 did_spill = 1;
8f5db3c1
JL
994
995 /* Regardless of the state of spills, if we previously had
e591c83d 996 a register that we thought we could eliminate, but now can
8f5db3c1
JL
997 not eliminate, we must run another pass.
998
999 Consider pseudos which have an entry in reg_equiv_* which
1000 reference an eliminable register. We must make another pass
1001 to update reg_equiv_* so that we do not substitute in the
1002 old value from when we thought the elimination could be
1003 performed. */
1004 something_changed = 1;
32131a9c 1005 }
09dd1133 1006 }
9ff3516a 1007
e04ca094 1008 select_reload_regs ();
e483bf9c
BS
1009 if (failure)
1010 goto failed;
437a710d 1011
e483bf9c 1012 if (insns_need_reload != 0 || did_spill)
e04ca094 1013 something_changed |= finish_spills (global);
7609e720 1014
03acd8f8
BS
1015 if (! something_changed)
1016 break;
1017
1018 if (caller_save_needed)
7609e720 1019 delete_caller_save_insns ();
f5d8c9f4
BS
1020
1021 obstack_free (&reload_obstack, reload_firstobj);
32131a9c
RK
1022 }
1023
1024 /* If global-alloc was run, notify it of any register eliminations we have
1025 done. */
1026 if (global)
1027 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1028 if (ep->can_eliminate)
1029 mark_elimination (ep->from, ep->to);
1030
32131a9c
RK
1031 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1032 If that insn didn't set the register (i.e., it copied the register to
1033 memory), just delete that insn instead of the equivalencing insn plus
1034 anything now dead. If we call delete_dead_insn on that insn, we may
135eb61c 1035 delete the insn that actually sets the register if the register dies
32131a9c
RK
1036 there and that is incorrect. */
1037
1038 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
135eb61c
R
1039 {
1040 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1041 {
1042 rtx list;
1043 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1044 {
1045 rtx equiv_insn = XEXP (list, 0);
78571511
RK
1046
1047 /* If we already deleted the insn or if it may trap, we can't
1048 delete it. The latter case shouldn't happen, but can
1049 if an insn has a variable address, gets a REG_EH_REGION
1050 note added to it, and then gets converted into an load
1051 from a constant address. */
4b4bf941 1052 if (NOTE_P (equiv_insn)
78571511
RK
1053 || can_throw_internal (equiv_insn))
1054 ;
1055 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
135eb61c
R
1056 delete_dead_insn (equiv_insn);
1057 else
6773e15f 1058 SET_INSN_DELETED (equiv_insn);
135eb61c
R
1059 }
1060 }
1061 }
32131a9c
RK
1062
1063 /* Use the reload registers where necessary
1064 by generating move instructions to move the must-be-register
1065 values into or out of the reload registers. */
1066
03acd8f8
BS
1067 if (insns_need_reload != 0 || something_needs_elimination
1068 || something_needs_operands_changed)
c47f5ea5 1069 {
102870fb 1070 HOST_WIDE_INT old_frame_size = get_frame_size ();
c47f5ea5 1071
e04ca094 1072 reload_as_needed (global);
c47f5ea5
BS
1073
1074 if (old_frame_size != get_frame_size ())
1075 abort ();
1076
1077 if (num_eliminable)
1078 verify_initial_elim_offsets ();
1079 }
32131a9c 1080
2a1f8b6b 1081 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1082 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1083 virtue of being in a pseudo, that pseudo will be marked live
1084 and hence the frame pointer will be known to be live via that
1085 pseudo. */
1086
1087 if (! frame_pointer_needed)
e0082a72
ZD
1088 FOR_EACH_BB (bb)
1089 CLEAR_REGNO_REG_SET (bb->global_live_at_start,
8e08106d 1090 HARD_FRAME_POINTER_REGNUM);
2a1f8b6b 1091
5352b11a
RS
1092 /* Come here (with failure set nonzero) if we can't get enough spill regs
1093 and we decide not to abort about it. */
1094 failed:
1095
f5d8c9f4 1096 CLEAR_REG_SET (&spilled_pseudos);
a3ec87a8
RS
1097 reload_in_progress = 0;
1098
32131a9c
RK
1099 /* Now eliminate all pseudo regs by modifying them into
1100 their equivalent memory references.
1101 The REG-rtx's for the pseudos are modified in place,
1102 so all insns that used to refer to them now refer to memory.
1103
1104 For a reg that has a reg_equiv_address, all those insns
1105 were changed by reloading so that no insns refer to it any longer;
1106 but the DECL_RTL of a variable decl may refer to it,
1107 and if so this causes the debugging info to mention the variable. */
1108
1109 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1110 {
1111 rtx addr = 0;
9ec36da5
JL
1112
1113 if (reg_equiv_mem[i])
1114 addr = XEXP (reg_equiv_mem[i], 0);
1115
32131a9c
RK
1116 if (reg_equiv_address[i])
1117 addr = reg_equiv_address[i];
9ec36da5 1118
32131a9c
RK
1119 if (addr)
1120 {
1121 if (reg_renumber[i] < 0)
1122 {
1123 rtx reg = regno_reg_rtx[i];
173b24b9 1124
5a63e069 1125 REG_USERVAR_P (reg) = 0;
ef178af3 1126 PUT_CODE (reg, MEM);
32131a9c 1127 XEXP (reg, 0) = addr;
173b24b9
RK
1128 if (reg_equiv_memory_loc[i])
1129 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc[i]);
1130 else
1131 {
389fdba0 1132 MEM_IN_STRUCT_P (reg) = MEM_SCALAR_P (reg) = 0;
173b24b9
RK
1133 MEM_ATTRS (reg) = 0;
1134 }
32131a9c
RK
1135 }
1136 else if (reg_equiv_mem[i])
1137 XEXP (reg_equiv_mem[i], 0) = addr;
1138 }
1139 }
1140
2ae74651
JL
1141 /* We must set reload_completed now since the cleanup_subreg_operands call
1142 below will re-recognize each insn and reload may have generated insns
1143 which are only valid during and after reload. */
1144 reload_completed = 1;
1145
bd695e1e
RH
1146 /* Make a pass over all the insns and delete all USEs which we inserted
1147 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
41e34bab
DJ
1148 notes. Delete all CLOBBER insns, except those that refer to the return
1149 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1150 from misarranging variable-array code, and simplify (subreg (reg))
260f91c2
DJ
1151 operands. Also remove all REG_RETVAL and REG_LIBCALL notes since they
1152 are no longer useful or accurate. Strip and regenerate REG_INC notes
1153 that may have been moved around. */
32131a9c
RK
1154
1155 for (insn = first; insn; insn = NEXT_INSN (insn))
2c3c49de 1156 if (INSN_P (insn))
32131a9c 1157 {
6764d250 1158 rtx *pnote;
32131a9c 1159
4b4bf941 1160 if (CALL_P (insn))
ee960939
OH
1161 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1162 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
f474c6f8 1163
0304f787 1164 if ((GET_CODE (PATTERN (insn)) == USE
3d17d93d
AO
1165 /* We mark with QImode USEs introduced by reload itself. */
1166 && (GET_MODE (insn) == QImode
1167 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
bd695e1e 1168 || (GET_CODE (PATTERN (insn)) == CLOBBER
3c0cb5de 1169 && (!MEM_P (XEXP (PATTERN (insn), 0))
41e34bab
DJ
1170 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1171 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
0c20a65f 1172 && XEXP (XEXP (PATTERN (insn), 0), 0)
41e34bab 1173 != stack_pointer_rtx))
f8cfc6aa 1174 && (!REG_P (XEXP (PATTERN (insn), 0))
bd695e1e 1175 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
b60a8416 1176 {
e5eac8ef 1177 delete_insn (insn);
b60a8416
R
1178 continue;
1179 }
6764d250 1180
ee960939
OH
1181 /* Some CLOBBERs may survive until here and still reference unassigned
1182 pseudos with const equivalent, which may in turn cause ICE in later
1183 passes if the reference remains in place. */
1184 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1185 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1186 VOIDmode, PATTERN (insn));
1187
6764d250
BS
1188 pnote = &REG_NOTES (insn);
1189 while (*pnote != 0)
32131a9c 1190 {
6764d250 1191 if (REG_NOTE_KIND (*pnote) == REG_DEAD
80599fd9 1192 || REG_NOTE_KIND (*pnote) == REG_UNUSED
2dfa9a87 1193 || REG_NOTE_KIND (*pnote) == REG_INC
80599fd9
NC
1194 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1195 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
6764d250
BS
1196 *pnote = XEXP (*pnote, 1);
1197 else
1198 pnote = &XEXP (*pnote, 1);
32131a9c 1199 }
0304f787 1200
2dfa9a87
MH
1201#ifdef AUTO_INC_DEC
1202 add_auto_inc_notes (insn, PATTERN (insn));
1203#endif
1204
0304f787
JL
1205 /* And simplify (subreg (reg)) if it appears as an operand. */
1206 cleanup_subreg_operands (insn);
b60a8416 1207 }
32131a9c 1208
ab87f8c8
JL
1209 /* If we are doing stack checking, give a warning if this function's
1210 frame size is larger than we expect. */
1211 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1212 {
1213 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
05d10675
BS
1214 static int verbose_warned = 0;
1215
ab87f8c8
JL
1216 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1217 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1218 size += UNITS_PER_WORD;
1219
1220 if (size > STACK_CHECK_MAX_FRAME_SIZE)
05d10675 1221 {
ab87f8c8
JL
1222 warning ("frame size too large for reliable stack checking");
1223 if (! verbose_warned)
1224 {
1225 warning ("try reducing the number of local variables");
1226 verbose_warned = 1;
1227 }
1228 }
1229 }
1230
32131a9c 1231 /* Indicate that we no longer have known memory locations or constants. */
58d9f9d9
JL
1232 if (reg_equiv_constant)
1233 free (reg_equiv_constant);
32131a9c 1234 reg_equiv_constant = 0;
965ccc5a 1235 VARRAY_GROW (reg_equiv_memory_loc_varray, 0);
32131a9c 1236 reg_equiv_memory_loc = 0;
5352b11a 1237
4cc0fdd2
JDA
1238 if (offsets_known_at)
1239 free (offsets_known_at);
1240 if (offsets_at)
1241 free (offsets_at);
a68d4b75 1242
56a65848
DB
1243 free (reg_equiv_mem);
1244 free (reg_equiv_init);
1245 free (reg_equiv_address);
1246 free (reg_max_ref_width);
03acd8f8
BS
1247 free (reg_old_renumber);
1248 free (pseudo_previous_regs);
1249 free (pseudo_forbidden_regs);
56a65848 1250
8b4f9969
JW
1251 CLEAR_HARD_REG_SET (used_spill_regs);
1252 for (i = 0; i < n_spills; i++)
1253 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1254
7609e720
BS
1255 /* Free all the insn_chain structures at once. */
1256 obstack_free (&reload_obstack, reload_startobj);
1257 unused_insn_chains = 0;
f1330226 1258 fixup_abnormal_edges ();
7609e720 1259
e16e3291
UW
1260 /* Replacing pseudos with their memory equivalents might have
1261 created shared rtx. Subsequent passes would get confused
1262 by this, so unshare everything here. */
1263 unshare_all_rtl_again (first);
1264
b483cfb7
EB
1265#ifdef STACK_BOUNDARY
1266 /* init_emit has set the alignment of the hard frame pointer
1267 to STACK_BOUNDARY. It is very likely no longer valid if
1268 the hard frame pointer was used for register allocation. */
1269 if (!frame_pointer_needed)
1270 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1271#endif
1272
5352b11a 1273 return failure;
32131a9c 1274}
1e5bd841 1275
18a90182
BS
1276/* Yet another special case. Unfortunately, reg-stack forces people to
1277 write incorrect clobbers in asm statements. These clobbers must not
1278 cause the register to appear in bad_spill_regs, otherwise we'll call
1279 fatal_insn later. We clear the corresponding regnos in the live
1280 register sets to avoid this.
1281 The whole thing is rather sick, I'm afraid. */
efc9bd41 1282
18a90182 1283static void
0c20a65f 1284maybe_fix_stack_asms (void)
18a90182
BS
1285{
1286#ifdef STACK_REGS
392dccb7 1287 const char *constraints[MAX_RECOG_OPERANDS];
18a90182
BS
1288 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1289 struct insn_chain *chain;
1290
1291 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1292 {
1293 int i, noperands;
1294 HARD_REG_SET clobbered, allowed;
1295 rtx pat;
1296
2c3c49de 1297 if (! INSN_P (chain->insn)
18a90182
BS
1298 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1299 continue;
1300 pat = PATTERN (chain->insn);
1301 if (GET_CODE (pat) != PARALLEL)
1302 continue;
1303
1304 CLEAR_HARD_REG_SET (clobbered);
1305 CLEAR_HARD_REG_SET (allowed);
1306
1307 /* First, make a mask of all stack regs that are clobbered. */
1308 for (i = 0; i < XVECLEN (pat, 0); i++)
1309 {
1310 rtx t = XVECEXP (pat, 0, i);
1311 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1312 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1313 }
1314
1315 /* Get the operand values and constraints out of the insn. */
1ccbefce 1316 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
18a90182
BS
1317 constraints, operand_mode);
1318
1319 /* For every operand, see what registers are allowed. */
1320 for (i = 0; i < noperands; i++)
1321 {
6b9c6f4f 1322 const char *p = constraints[i];
18a90182
BS
1323 /* For every alternative, we compute the class of registers allowed
1324 for reloading in CLS, and merge its contents into the reg set
1325 ALLOWED. */
1326 int cls = (int) NO_REGS;
1327
1328 for (;;)
1329 {
97488870 1330 char c = *p;
18a90182
BS
1331
1332 if (c == '\0' || c == ',' || c == '#')
1333 {
1334 /* End of one alternative - mark the regs in the current
1335 class, and reset the class. */
1336 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1337 cls = NO_REGS;
97488870 1338 p++;
18a90182
BS
1339 if (c == '#')
1340 do {
1341 c = *p++;
1342 } while (c != '\0' && c != ',');
1343 if (c == '\0')
1344 break;
1345 continue;
1346 }
1347
1348 switch (c)
1349 {
1350 case '=': case '+': case '*': case '%': case '?': case '!':
1351 case '0': case '1': case '2': case '3': case '4': case 'm':
1352 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1353 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1354 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1355 case 'P':
18a90182
BS
1356 break;
1357
1358 case 'p':
3dcc68a4
NC
1359 cls = (int) reg_class_subunion[cls]
1360 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
18a90182
BS
1361 break;
1362
1363 case 'g':
1364 case 'r':
1365 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1366 break;
1367
1368 default:
97488870 1369 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
ccfc6cc8
UW
1370 cls = (int) reg_class_subunion[cls]
1371 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
1372 else
1373 cls = (int) reg_class_subunion[cls]
97488870 1374 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)];
18a90182 1375 }
97488870 1376 p += CONSTRAINT_LEN (c, p);
18a90182
BS
1377 }
1378 }
1379 /* Those of the registers which are clobbered, but allowed by the
1380 constraints, must be usable as reload registers. So clear them
1381 out of the life information. */
1382 AND_HARD_REG_SET (allowed, clobbered);
1383 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1384 if (TEST_HARD_REG_BIT (allowed, i))
1385 {
239a0f5b
BS
1386 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1387 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
18a90182
BS
1388 }
1389 }
1390
1391#endif
1392}
03acd8f8 1393\f
f5d8c9f4
BS
1394/* Copy the global variables n_reloads and rld into the corresponding elts
1395 of CHAIN. */
1396static void
0c20a65f 1397copy_reloads (struct insn_chain *chain)
f5d8c9f4
BS
1398{
1399 chain->n_reloads = n_reloads;
703ad42b
KG
1400 chain->rld = obstack_alloc (&reload_obstack,
1401 n_reloads * sizeof (struct reload));
f5d8c9f4 1402 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
703ad42b 1403 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
f5d8c9f4
BS
1404}
1405
03acd8f8
BS
1406/* Walk the chain of insns, and determine for each whether it needs reloads
1407 and/or eliminations. Build the corresponding insns_need_reload list, and
1408 set something_needs_elimination as appropriate. */
1409static void
0c20a65f 1410calculate_needs_all_insns (int global)
1e5bd841 1411{
7609e720 1412 struct insn_chain **pprev_reload = &insns_need_reload;
462561b7 1413 struct insn_chain *chain, *next = 0;
1e5bd841 1414
03acd8f8
BS
1415 something_needs_elimination = 0;
1416
703ad42b 1417 reload_insn_firstobj = obstack_alloc (&reload_obstack, 0);
462561b7 1418 for (chain = reload_insn_chain; chain != 0; chain = next)
1e5bd841 1419 {
67e61fe7 1420 rtx insn = chain->insn;
03acd8f8 1421
462561b7
JJ
1422 next = chain->next;
1423
f5d8c9f4
BS
1424 /* Clear out the shortcuts. */
1425 chain->n_reloads = 0;
67e61fe7
BS
1426 chain->need_elim = 0;
1427 chain->need_reload = 0;
1428 chain->need_operand_change = 0;
1e5bd841 1429
03acd8f8
BS
1430 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1431 include REG_LABEL), we need to see what effects this has on the
1432 known offsets at labels. */
1e5bd841 1433
4b4bf941 1434 if (LABEL_P (insn) || JUMP_P (insn)
2c3c49de 1435 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1e5bd841
BS
1436 set_label_offsets (insn, insn, 0);
1437
2c3c49de 1438 if (INSN_P (insn))
1e5bd841
BS
1439 {
1440 rtx old_body = PATTERN (insn);
1441 int old_code = INSN_CODE (insn);
1442 rtx old_notes = REG_NOTES (insn);
1443 int did_elimination = 0;
cb2afeb3 1444 int operands_changed = 0;
2b49ee39
R
1445 rtx set = single_set (insn);
1446
1447 /* Skip insns that only set an equivalence. */
f8cfc6aa 1448 if (set && REG_P (SET_DEST (set))
2b49ee39
R
1449 && reg_renumber[REGNO (SET_DEST (set))] < 0
1450 && reg_equiv_constant[REGNO (SET_DEST (set))])
67e61fe7 1451 continue;
1e5bd841 1452
1e5bd841 1453 /* If needed, eliminate any eliminable registers. */
2b49ee39 1454 if (num_eliminable || num_eliminable_invariants)
1e5bd841
BS
1455 did_elimination = eliminate_regs_in_insn (insn, 0);
1456
1457 /* Analyze the instruction. */
cb2afeb3
R
1458 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1459 global, spill_reg_order);
1460
1461 /* If a no-op set needs more than one reload, this is likely
1462 to be something that needs input address reloads. We
1463 can't get rid of this cleanly later, and it is of no use
1464 anyway, so discard it now.
1465 We only do this when expensive_optimizations is enabled,
1466 since this complements reload inheritance / output
1467 reload deletion, and it can make debugging harder. */
1468 if (flag_expensive_optimizations && n_reloads > 1)
1469 {
1470 rtx set = single_set (insn);
1471 if (set
1472 && SET_SRC (set) == SET_DEST (set)
f8cfc6aa 1473 && REG_P (SET_SRC (set))
cb2afeb3
R
1474 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1475 {
ca6c03ca 1476 delete_insn (insn);
3eae4643 1477 /* Delete it from the reload chain. */
462561b7
JJ
1478 if (chain->prev)
1479 chain->prev->next = next;
1480 else
1481 reload_insn_chain = next;
1482 if (next)
1483 next->prev = chain->prev;
1484 chain->next = unused_insn_chains;
1485 unused_insn_chains = chain;
cb2afeb3
R
1486 continue;
1487 }
1488 }
1489 if (num_eliminable)
1490 update_eliminable_offsets ();
1e5bd841
BS
1491
1492 /* Remember for later shortcuts which insns had any reloads or
7609e720
BS
1493 register eliminations. */
1494 chain->need_elim = did_elimination;
03acd8f8
BS
1495 chain->need_reload = n_reloads > 0;
1496 chain->need_operand_change = operands_changed;
1e5bd841
BS
1497
1498 /* Discard any register replacements done. */
1499 if (did_elimination)
1500 {
f5d8c9f4 1501 obstack_free (&reload_obstack, reload_insn_firstobj);
1e5bd841
BS
1502 PATTERN (insn) = old_body;
1503 INSN_CODE (insn) = old_code;
1504 REG_NOTES (insn) = old_notes;
1505 something_needs_elimination = 1;
1506 }
1507
cb2afeb3
R
1508 something_needs_operands_changed |= operands_changed;
1509
437a710d 1510 if (n_reloads != 0)
7609e720 1511 {
f5d8c9f4 1512 copy_reloads (chain);
7609e720
BS
1513 *pprev_reload = chain;
1514 pprev_reload = &chain->next_need_reload;
7609e720 1515 }
1e5bd841 1516 }
1e5bd841 1517 }
7609e720 1518 *pprev_reload = 0;
1e5bd841 1519}
f5d8c9f4
BS
1520\f
1521/* Comparison function for qsort to decide which of two reloads
1522 should be handled first. *P1 and *P2 are the reload numbers. */
1e5bd841 1523
f5d8c9f4 1524static int
0c20a65f 1525reload_reg_class_lower (const void *r1p, const void *r2p)
1e5bd841 1526{
b3694847
SS
1527 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1528 int t;
1e5bd841 1529
f5d8c9f4
BS
1530 /* Consider required reloads before optional ones. */
1531 t = rld[r1].optional - rld[r2].optional;
1532 if (t != 0)
1533 return t;
1e5bd841 1534
f5d8c9f4
BS
1535 /* Count all solitary classes before non-solitary ones. */
1536 t = ((reg_class_size[(int) rld[r2].class] == 1)
1537 - (reg_class_size[(int) rld[r1].class] == 1));
1538 if (t != 0)
1539 return t;
1e5bd841 1540
f5d8c9f4
BS
1541 /* Aside from solitaires, consider all multi-reg groups first. */
1542 t = rld[r2].nregs - rld[r1].nregs;
1543 if (t != 0)
1544 return t;
1e5bd841 1545
f5d8c9f4
BS
1546 /* Consider reloads in order of increasing reg-class number. */
1547 t = (int) rld[r1].class - (int) rld[r2].class;
1548 if (t != 0)
1549 return t;
1e5bd841 1550
f5d8c9f4
BS
1551 /* If reloads are equally urgent, sort by reload number,
1552 so that the results of qsort leave nothing to chance. */
1553 return r1 - r2;
1554}
1555\f
1556/* The cost of spilling each hard reg. */
1557static int spill_cost[FIRST_PSEUDO_REGISTER];
1e5bd841 1558
f5d8c9f4
BS
1559/* When spilling multiple hard registers, we use SPILL_COST for the first
1560 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1561 only the first hard reg for a multi-reg pseudo. */
1562static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1e5bd841 1563
f5d8c9f4 1564/* Update the spill cost arrays, considering that pseudo REG is live. */
770ae6cc 1565
f5d8c9f4 1566static void
0c20a65f 1567count_pseudo (int reg)
f5d8c9f4 1568{
b2aec5c0 1569 int freq = REG_FREQ (reg);
f5d8c9f4
BS
1570 int r = reg_renumber[reg];
1571 int nregs;
1e5bd841 1572
f5d8c9f4
BS
1573 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1574 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1575 return;
1e5bd841 1576
f5d8c9f4 1577 SET_REGNO_REG_SET (&pseudos_counted, reg);
1e5bd841 1578
f5d8c9f4
BS
1579 if (r < 0)
1580 abort ();
1d7254c5 1581
b2aec5c0 1582 spill_add_cost[r] += freq;
1e5bd841 1583
66fd46b6 1584 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
f5d8c9f4 1585 while (nregs-- > 0)
b2aec5c0 1586 spill_cost[r + nregs] += freq;
f5d8c9f4 1587}
1e5bd841 1588
f5d8c9f4
BS
1589/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1590 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
efc9bd41 1591
f5d8c9f4 1592static void
0c20a65f 1593order_regs_for_reload (struct insn_chain *chain)
f5d8c9f4 1594{
fbd40359 1595 int i;
efc9bd41
RK
1596 HARD_REG_SET used_by_pseudos;
1597 HARD_REG_SET used_by_pseudos2;
1e5bd841 1598
efc9bd41 1599 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1e5bd841 1600
f5d8c9f4
BS
1601 memset (spill_cost, 0, sizeof spill_cost);
1602 memset (spill_add_cost, 0, sizeof spill_add_cost);
1e5bd841 1603
f5d8c9f4 1604 /* Count number of uses of each hard reg by pseudo regs allocated to it
efc9bd41
RK
1605 and then order them by decreasing use. First exclude hard registers
1606 that are live in or across this insn. */
1607
1608 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1609 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1610 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1611 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1e5bd841 1612
f5d8c9f4
BS
1613 /* Now find out which pseudos are allocated to it, and update
1614 hard_reg_n_uses. */
1615 CLEAR_REG_SET (&pseudos_counted);
1e5bd841 1616
f5d8c9f4 1617 EXECUTE_IF_SET_IN_REG_SET
fbd40359 1618 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
f5d8c9f4 1619 {
fbd40359 1620 count_pseudo (i);
f5d8c9f4
BS
1621 });
1622 EXECUTE_IF_SET_IN_REG_SET
fbd40359 1623 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
f5d8c9f4 1624 {
fbd40359 1625 count_pseudo (i);
f5d8c9f4
BS
1626 });
1627 CLEAR_REG_SET (&pseudos_counted);
1e5bd841 1628}
03acd8f8 1629\f
f5d8c9f4
BS
1630/* Vector of reload-numbers showing the order in which the reloads should
1631 be processed. */
1632static short reload_order[MAX_RELOADS];
1e5bd841 1633
f5d8c9f4
BS
1634/* This is used to keep track of the spill regs used in one insn. */
1635static HARD_REG_SET used_spill_regs_local;
03acd8f8 1636
f5d8c9f4
BS
1637/* We decided to spill hard register SPILLED, which has a size of
1638 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1639 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1640 update SPILL_COST/SPILL_ADD_COST. */
770ae6cc 1641
03acd8f8 1642static void
0c20a65f 1643count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1e5bd841 1644{
f5d8c9f4 1645 int r = reg_renumber[reg];
66fd46b6 1646 int nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1e5bd841 1647
f5d8c9f4
BS
1648 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1649 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1650 return;
1e5bd841 1651
f5d8c9f4 1652 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1e5bd841 1653
b2aec5c0 1654 spill_add_cost[r] -= REG_FREQ (reg);
f5d8c9f4 1655 while (nregs-- > 0)
b2aec5c0 1656 spill_cost[r + nregs] -= REG_FREQ (reg);
1e5bd841
BS
1657}
1658
f5d8c9f4 1659/* Find reload register to use for reload number ORDER. */
03acd8f8 1660
f5d8c9f4 1661static int
0c20a65f 1662find_reg (struct insn_chain *chain, int order)
1e5bd841 1663{
f5d8c9f4
BS
1664 int rnum = reload_order[order];
1665 struct reload *rl = rld + rnum;
1666 int best_cost = INT_MAX;
1667 int best_reg = -1;
770ae6cc
RK
1668 unsigned int i, j;
1669 int k;
f5d8c9f4
BS
1670 HARD_REG_SET not_usable;
1671 HARD_REG_SET used_by_other_reload;
1e5bd841 1672
f5d8c9f4
BS
1673 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1674 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1675 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1676
1677 CLEAR_HARD_REG_SET (used_by_other_reload);
770ae6cc 1678 for (k = 0; k < order; k++)
1e5bd841 1679 {
770ae6cc
RK
1680 int other = reload_order[k];
1681
f5d8c9f4
BS
1682 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1683 for (j = 0; j < rld[other].nregs; j++)
1684 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1685 }
1e5bd841 1686
f5d8c9f4
BS
1687 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1688 {
770ae6cc
RK
1689 unsigned int regno = i;
1690
f5d8c9f4
BS
1691 if (! TEST_HARD_REG_BIT (not_usable, regno)
1692 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1693 && HARD_REGNO_MODE_OK (regno, rl->mode))
1e5bd841 1694 {
f5d8c9f4
BS
1695 int this_cost = spill_cost[regno];
1696 int ok = 1;
66fd46b6 1697 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1e5bd841 1698
f5d8c9f4
BS
1699 for (j = 1; j < this_nregs; j++)
1700 {
1701 this_cost += spill_add_cost[regno + j];
1702 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1703 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1704 ok = 0;
1705 }
1706 if (! ok)
1707 continue;
f8cfc6aa 1708 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
f5d8c9f4 1709 this_cost--;
f8cfc6aa 1710 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
f5d8c9f4
BS
1711 this_cost--;
1712 if (this_cost < best_cost
1713 /* Among registers with equal cost, prefer caller-saved ones, or
1714 use REG_ALLOC_ORDER if it is defined. */
1715 || (this_cost == best_cost
1716#ifdef REG_ALLOC_ORDER
1717 && (inv_reg_alloc_order[regno]
1718 < inv_reg_alloc_order[best_reg])
1719#else
1720 && call_used_regs[regno]
1721 && ! call_used_regs[best_reg]
1722#endif
1723 ))
1724 {
1725 best_reg = regno;
1726 best_cost = this_cost;
1e5bd841
BS
1727 }
1728 }
1729 }
f5d8c9f4
BS
1730 if (best_reg == -1)
1731 return 0;
770ae6cc 1732
c263766c
RH
1733 if (dump_file)
1734 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
770ae6cc 1735
66fd46b6 1736 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
f5d8c9f4 1737 rl->regno = best_reg;
1e5bd841 1738
f5d8c9f4 1739 EXECUTE_IF_SET_IN_REG_SET
239a0f5b 1740 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j,
f5d8c9f4
BS
1741 {
1742 count_spilled_pseudo (best_reg, rl->nregs, j);
1743 });
770ae6cc 1744
f5d8c9f4 1745 EXECUTE_IF_SET_IN_REG_SET
239a0f5b 1746 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j,
f5d8c9f4
BS
1747 {
1748 count_spilled_pseudo (best_reg, rl->nregs, j);
1749 });
03acd8f8 1750
f5d8c9f4
BS
1751 for (i = 0; i < rl->nregs; i++)
1752 {
1753 if (spill_cost[best_reg + i] != 0
1754 || spill_add_cost[best_reg + i] != 0)
1755 abort ();
1756 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1757 }
1758 return 1;
03acd8f8
BS
1759}
1760
1761/* Find more reload regs to satisfy the remaining need of an insn, which
1762 is given by CHAIN.
1e5bd841
BS
1763 Do it by ascending class number, since otherwise a reg
1764 might be spilled for a big class and might fail to count
f5d8c9f4 1765 for a smaller class even though it belongs to that class. */
1e5bd841 1766
03acd8f8 1767static void
0c20a65f 1768find_reload_regs (struct insn_chain *chain)
1e5bd841 1769{
f5d8c9f4 1770 int i;
1e5bd841 1771
f5d8c9f4
BS
1772 /* In order to be certain of getting the registers we need,
1773 we must sort the reloads into order of increasing register class.
1774 Then our grabbing of reload registers will parallel the process
1775 that provided the reload registers. */
1776 for (i = 0; i < chain->n_reloads; i++)
1e5bd841 1777 {
f5d8c9f4
BS
1778 /* Show whether this reload already has a hard reg. */
1779 if (chain->rld[i].reg_rtx)
1e5bd841 1780 {
f5d8c9f4
BS
1781 int regno = REGNO (chain->rld[i].reg_rtx);
1782 chain->rld[i].regno = regno;
770ae6cc 1783 chain->rld[i].nregs
66fd46b6 1784 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
1e5bd841 1785 }
f5d8c9f4
BS
1786 else
1787 chain->rld[i].regno = -1;
1788 reload_order[i] = i;
1789 }
1e5bd841 1790
f5d8c9f4
BS
1791 n_reloads = chain->n_reloads;
1792 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1e5bd841 1793
f5d8c9f4 1794 CLEAR_HARD_REG_SET (used_spill_regs_local);
03acd8f8 1795
c263766c
RH
1796 if (dump_file)
1797 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1e5bd841 1798
f5d8c9f4 1799 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1e5bd841 1800
f5d8c9f4 1801 /* Compute the order of preference for hard registers to spill. */
1e5bd841 1802
f5d8c9f4 1803 order_regs_for_reload (chain);
1e5bd841 1804
f5d8c9f4
BS
1805 for (i = 0; i < n_reloads; i++)
1806 {
1807 int r = reload_order[i];
1e5bd841 1808
f5d8c9f4
BS
1809 /* Ignore reloads that got marked inoperative. */
1810 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1811 && ! rld[r].optional
1812 && rld[r].regno == -1)
e04ca094 1813 if (! find_reg (chain, i))
f5d8c9f4 1814 {
ecf3151a 1815 spill_failure (chain->insn, rld[r].class);
f5d8c9f4 1816 failure = 1;
03acd8f8 1817 return;
f5d8c9f4 1818 }
1e5bd841 1819 }
05d10675 1820
f5d8c9f4
BS
1821 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1822 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
03acd8f8 1823
f5d8c9f4 1824 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1e5bd841
BS
1825}
1826
f5d8c9f4 1827static void
0c20a65f 1828select_reload_regs (void)
09dd1133 1829{
f5d8c9f4 1830 struct insn_chain *chain;
09dd1133 1831
f5d8c9f4
BS
1832 /* Try to satisfy the needs for each insn. */
1833 for (chain = insns_need_reload; chain != 0;
1834 chain = chain->next_need_reload)
e04ca094 1835 find_reload_regs (chain);
09dd1133 1836}
32131a9c 1837\f
437a710d
BS
1838/* Delete all insns that were inserted by emit_caller_save_insns during
1839 this iteration. */
1840static void
0c20a65f 1841delete_caller_save_insns (void)
437a710d 1842{
7609e720 1843 struct insn_chain *c = reload_insn_chain;
437a710d 1844
7609e720 1845 while (c != 0)
437a710d 1846 {
7609e720 1847 while (c != 0 && c->is_caller_save_insn)
437a710d 1848 {
7609e720
BS
1849 struct insn_chain *next = c->next;
1850 rtx insn = c->insn;
1851
7609e720
BS
1852 if (c == reload_insn_chain)
1853 reload_insn_chain = next;
ca6c03ca 1854 delete_insn (insn);
7609e720
BS
1855
1856 if (next)
1857 next->prev = c->prev;
1858 if (c->prev)
1859 c->prev->next = next;
1860 c->next = unused_insn_chains;
1861 unused_insn_chains = c;
1862 c = next;
437a710d 1863 }
7609e720
BS
1864 if (c != 0)
1865 c = c->next;
437a710d
BS
1866 }
1867}
1868\f
5352b11a
RS
1869/* Handle the failure to find a register to spill.
1870 INSN should be one of the insns which needed this particular spill reg. */
1871
1872static void
0c20a65f 1873spill_failure (rtx insn, enum reg_class class)
5352b11a 1874{
ecf3151a 1875 static const char *const reg_class_names[] = REG_CLASS_NAMES;
5352b11a 1876 if (asm_noperands (PATTERN (insn)) >= 0)
1f978f5f 1877 error_for_asm (insn, "can't find a register in class `%s' while reloading `asm'",
ecf3151a 1878 reg_class_names[class]);
5352b11a 1879 else
ecf3151a 1880 {
1f978f5f 1881 error ("unable to find a register to spill in class `%s'",
ecf3151a 1882 reg_class_names[class]);
1f978f5f 1883 fatal_insn ("this is the insn:", insn);
ecf3151a 1884 }
5352b11a 1885}
32131a9c
RK
1886\f
1887/* Delete an unneeded INSN and any previous insns who sole purpose is loading
1888 data that is dead in INSN. */
1889
1890static void
0c20a65f 1891delete_dead_insn (rtx insn)
32131a9c
RK
1892{
1893 rtx prev = prev_real_insn (insn);
1894 rtx prev_dest;
1895
1896 /* If the previous insn sets a register that dies in our insn, delete it
1897 too. */
1898 if (prev && GET_CODE (PATTERN (prev)) == SET
f8cfc6aa 1899 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
32131a9c 1900 && reg_mentioned_p (prev_dest, PATTERN (insn))
b294ca38
R
1901 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1902 && ! side_effects_p (SET_SRC (PATTERN (prev))))
32131a9c
RK
1903 delete_dead_insn (prev);
1904
6773e15f 1905 SET_INSN_DELETED (insn);
32131a9c
RK
1906}
1907
1908/* Modify the home of pseudo-reg I.
1909 The new home is present in reg_renumber[I].
1910
1911 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1912 or it may be -1, meaning there is none or it is not relevant.
1913 This is used so that all pseudos spilled from a given hard reg
1914 can share one stack slot. */
1915
1916static void
0c20a65f 1917alter_reg (int i, int from_reg)
32131a9c
RK
1918{
1919 /* When outputting an inline function, this can happen
1920 for a reg that isn't actually used. */
1921 if (regno_reg_rtx[i] == 0)
1922 return;
1923
1924 /* If the reg got changed to a MEM at rtl-generation time,
1925 ignore it. */
f8cfc6aa 1926 if (!REG_P (regno_reg_rtx[i]))
32131a9c
RK
1927 return;
1928
1929 /* Modify the reg-rtx to contain the new hard reg
1930 number or else to contain its pseudo reg number. */
1931 REGNO (regno_reg_rtx[i])
1932 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1933
1934 /* If we have a pseudo that is needed but has no hard reg or equivalent,
1935 allocate a stack slot for it. */
1936
1937 if (reg_renumber[i] < 0
b1f21e0a 1938 && REG_N_REFS (i) > 0
32131a9c
RK
1939 && reg_equiv_constant[i] == 0
1940 && reg_equiv_memory_loc[i] == 0)
1941 {
b3694847 1942 rtx x;
770ae6cc
RK
1943 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
1944 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
32131a9c
RK
1945 int adjust = 0;
1946
1947 /* Each pseudo reg has an inherent size which comes from its own mode,
1948 and a total size which provides room for paradoxical subregs
1949 which refer to the pseudo reg in wider modes.
1950
1951 We can use a slot already allocated if it provides both
1952 enough inherent space and enough total space.
1953 Otherwise, we allocate a new slot, making sure that it has no less
1954 inherent space, and no less total space, then the previous slot. */
1955 if (from_reg == -1)
1956 {
1957 /* No known place to spill from => no slot to reuse. */
cabcf079
ILT
1958 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
1959 inherent_size == total_size ? 0 : -1);
f76b9db2 1960 if (BYTES_BIG_ENDIAN)
02db8dd0
RK
1961 /* Cancel the big-endian correction done in assign_stack_local.
1962 Get the address of the beginning of the slot.
1963 This is so we can do a big-endian correction unconditionally
1964 below. */
1965 adjust = inherent_size - total_size;
1966
3bdf5ad1 1967 /* Nothing can alias this slot except this pseudo. */
ba4828e0 1968 set_mem_alias_set (x, new_alias_set ());
32131a9c 1969 }
3bdf5ad1 1970
32131a9c
RK
1971 /* Reuse a stack slot if possible. */
1972 else if (spill_stack_slot[from_reg] != 0
1973 && spill_stack_slot_width[from_reg] >= total_size
1974 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
1975 >= inherent_size))
1976 x = spill_stack_slot[from_reg];
3bdf5ad1 1977
32131a9c
RK
1978 /* Allocate a bigger slot. */
1979 else
1980 {
1981 /* Compute maximum size needed, both for inherent size
1982 and for total size. */
1983 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
4f2d3674 1984 rtx stack_slot;
3bdf5ad1 1985
32131a9c
RK
1986 if (spill_stack_slot[from_reg])
1987 {
1988 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
1989 > inherent_size)
1990 mode = GET_MODE (spill_stack_slot[from_reg]);
1991 if (spill_stack_slot_width[from_reg] > total_size)
1992 total_size = spill_stack_slot_width[from_reg];
1993 }
3bdf5ad1 1994
32131a9c 1995 /* Make a slot with that size. */
cabcf079
ILT
1996 x = assign_stack_local (mode, total_size,
1997 inherent_size == total_size ? 0 : -1);
4f2d3674 1998 stack_slot = x;
3bdf5ad1
RK
1999
2000 /* All pseudos mapped to this slot can alias each other. */
2001 if (spill_stack_slot[from_reg])
ba4828e0 2002 set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
3bdf5ad1 2003 else
ba4828e0 2004 set_mem_alias_set (x, new_alias_set ());
3bdf5ad1 2005
f76b9db2
ILT
2006 if (BYTES_BIG_ENDIAN)
2007 {
2008 /* Cancel the big-endian correction done in assign_stack_local.
2009 Get the address of the beginning of the slot.
2010 This is so we can do a big-endian correction unconditionally
2011 below. */
2012 adjust = GET_MODE_SIZE (mode) - total_size;
4f2d3674 2013 if (adjust)
8ac61af7
RK
2014 stack_slot
2015 = adjust_address_nv (x, mode_for_size (total_size
38a448ca
RH
2016 * BITS_PER_UNIT,
2017 MODE_INT, 1),
8ac61af7 2018 adjust);
f76b9db2 2019 }
3bdf5ad1 2020
4f2d3674 2021 spill_stack_slot[from_reg] = stack_slot;
32131a9c
RK
2022 spill_stack_slot_width[from_reg] = total_size;
2023 }
2024
32131a9c
RK
2025 /* On a big endian machine, the "address" of the slot
2026 is the address of the low part that fits its inherent mode. */
f76b9db2 2027 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
32131a9c 2028 adjust += (total_size - inherent_size);
32131a9c
RK
2029
2030 /* If we have any adjustment to make, or if the stack slot is the
2031 wrong mode, make a new stack slot. */
1285011e
RK
2032 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2033
2034 /* If we have a decl for the original register, set it for the
2035 memory. If this is a shared MEM, make a copy. */
a560d4d4
JH
2036 if (REG_EXPR (regno_reg_rtx[i])
2037 && TREE_CODE_CLASS (TREE_CODE (REG_EXPR (regno_reg_rtx[i]))) == 'd')
1285011e 2038 {
a560d4d4 2039 rtx decl = DECL_RTL_IF_SET (REG_EXPR (regno_reg_rtx[i]));
1285011e 2040
a20fd5ac
JJ
2041 /* We can do this only for the DECLs home pseudo, not for
2042 any copies of it, since otherwise when the stack slot
2043 is reused, nonoverlapping_memrefs_p might think they
2044 cannot overlap. */
f8cfc6aa 2045 if (decl && REG_P (decl) && REGNO (decl) == (unsigned) i)
a20fd5ac
JJ
2046 {
2047 if (from_reg != -1 && spill_stack_slot[from_reg] == x)
2048 x = copy_rtx (x);
2049
a560d4d4 2050 set_mem_attrs_from_reg (x, regno_reg_rtx[i]);
a20fd5ac 2051 }
1285011e 2052 }
32131a9c 2053
6d2f8887 2054 /* Save the stack slot for later. */
32131a9c
RK
2055 reg_equiv_memory_loc[i] = x;
2056 }
2057}
2058
2059/* Mark the slots in regs_ever_live for the hard regs
2060 used by pseudo-reg number REGNO. */
2061
2062void
0c20a65f 2063mark_home_live (int regno)
32131a9c 2064{
b3694847 2065 int i, lim;
770ae6cc 2066
32131a9c
RK
2067 i = reg_renumber[regno];
2068 if (i < 0)
2069 return;
66fd46b6 2070 lim = i + hard_regno_nregs[i][PSEUDO_REGNO_MODE (regno)];
32131a9c
RK
2071 while (i < lim)
2072 regs_ever_live[i++] = 1;
2073}
2074\f
2075/* This function handles the tracking of elimination offsets around branches.
2076
2077 X is a piece of RTL being scanned.
2078
2079 INSN is the insn that it came from, if any.
2080
40f03658 2081 INITIAL_P is nonzero if we are to set the offset to be the initial
32131a9c
RK
2082 offset and zero if we are setting the offset of the label to be the
2083 current offset. */
2084
2085static void
0c20a65f 2086set_label_offsets (rtx x, rtx insn, int initial_p)
32131a9c
RK
2087{
2088 enum rtx_code code = GET_CODE (x);
2089 rtx tem;
e51712db 2090 unsigned int i;
32131a9c
RK
2091 struct elim_table *p;
2092
2093 switch (code)
2094 {
2095 case LABEL_REF:
8be386d9
RS
2096 if (LABEL_REF_NONLOCAL_P (x))
2097 return;
2098
32131a9c
RK
2099 x = XEXP (x, 0);
2100
0f41302f 2101 /* ... fall through ... */
32131a9c
RK
2102
2103 case CODE_LABEL:
2104 /* If we know nothing about this label, set the desired offsets. Note
2105 that this sets the offset at a label to be the offset before a label
2106 if we don't know anything about the label. This is not correct for
2107 the label after a BARRIER, but is the best guess we can make. If
2108 we guessed wrong, we will suppress an elimination that might have
2109 been possible had we been able to guess correctly. */
2110
4cc0fdd2 2111 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
32131a9c
RK
2112 {
2113 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4cc0fdd2 2114 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
32131a9c
RK
2115 = (initial_p ? reg_eliminate[i].initial_offset
2116 : reg_eliminate[i].offset);
4cc0fdd2 2117 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
32131a9c
RK
2118 }
2119
2120 /* Otherwise, if this is the definition of a label and it is
d45cf215 2121 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2122 that label. */
2123
2124 else if (x == insn
2125 && (tem = prev_nonnote_insn (insn)) != 0
4b4bf941 2126 && BARRIER_P (tem))
1f3b1e1a 2127 set_offsets_for_label (insn);
32131a9c
RK
2128 else
2129 /* If neither of the above cases is true, compare each offset
2130 with those previously recorded and suppress any eliminations
2131 where the offsets disagree. */
a8fdc208 2132
32131a9c 2133 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4cc0fdd2 2134 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
32131a9c
RK
2135 != (initial_p ? reg_eliminate[i].initial_offset
2136 : reg_eliminate[i].offset))
2137 reg_eliminate[i].can_eliminate = 0;
2138
2139 return;
2140
2141 case JUMP_INSN:
2142 set_label_offsets (PATTERN (insn), insn, initial_p);
2143
0f41302f 2144 /* ... fall through ... */
32131a9c
RK
2145
2146 case INSN:
2147 case CALL_INSN:
2148 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2149 and hence must have all eliminations at their initial offsets. */
2150 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2151 if (REG_NOTE_KIND (tem) == REG_LABEL)
2152 set_label_offsets (XEXP (tem, 0), insn, 1);
2153 return;
2154
0c0ba09c 2155 case PARALLEL:
32131a9c
RK
2156 case ADDR_VEC:
2157 case ADDR_DIFF_VEC:
0c0ba09c
JJ
2158 /* Each of the labels in the parallel or address vector must be
2159 at their initial offsets. We want the first field for PARALLEL
2160 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
32131a9c 2161
e51712db 2162 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
32131a9c
RK
2163 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2164 insn, initial_p);
2165 return;
2166
2167 case SET:
2168 /* We only care about setting PC. If the source is not RETURN,
2169 IF_THEN_ELSE, or a label, disable any eliminations not at
2170 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2171 isn't one of those possibilities. For branches to a label,
2172 call ourselves recursively.
2173
2174 Note that this can disable elimination unnecessarily when we have
2175 a non-local goto since it will look like a non-constant jump to
2176 someplace in the current function. This isn't a significant
2177 problem since such jumps will normally be when all elimination
2178 pairs are back to their initial offsets. */
2179
2180 if (SET_DEST (x) != pc_rtx)
2181 return;
2182
2183 switch (GET_CODE (SET_SRC (x)))
2184 {
2185 case PC:
2186 case RETURN:
2187 return;
2188
2189 case LABEL_REF:
2190 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2191 return;
2192
2193 case IF_THEN_ELSE:
2194 tem = XEXP (SET_SRC (x), 1);
2195 if (GET_CODE (tem) == LABEL_REF)
2196 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2197 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2198 break;
2199
2200 tem = XEXP (SET_SRC (x), 2);
2201 if (GET_CODE (tem) == LABEL_REF)
2202 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2203 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2204 break;
2205 return;
e9a25f70
JL
2206
2207 default:
2208 break;
32131a9c
RK
2209 }
2210
2211 /* If we reach here, all eliminations must be at their initial
2212 offset because we are doing a jump to a variable address. */
2213 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2214 if (p->offset != p->initial_offset)
2215 p->can_eliminate = 0;
e9a25f70 2216 break;
05d10675 2217
e9a25f70
JL
2218 default:
2219 break;
32131a9c
RK
2220 }
2221}
2222\f
a8fdc208 2223/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2224 replacement (such as sp), plus an offset.
2225
2226 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2227 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2228 MEM, we are allowed to replace a sum of a register and the constant zero
2229 with the register, which we cannot do outside a MEM. In addition, we need
2230 to record the fact that a register is referenced outside a MEM.
2231
ff32812a 2232 If INSN is an insn, it is the insn containing X. If we replace a REG
40f03658 2233 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
32131a9c 2234 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
38e01259 2235 the REG is being modified.
32131a9c 2236
ff32812a
RS
2237 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2238 That's used when we eliminate in expressions stored in notes.
2239 This means, do not set ref_outside_mem even if the reference
2240 is outside of MEMs.
2241
32131a9c
RK
2242 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2243 replacements done assuming all offsets are at their initial values. If
2244 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2245 encounter, return the actual location so that find_reloads will do
2246 the proper thing. */
2247
2248rtx
0c20a65f 2249eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
32131a9c
RK
2250{
2251 enum rtx_code code = GET_CODE (x);
2252 struct elim_table *ep;
2253 int regno;
2254 rtx new;
2255 int i, j;
6f7d635c 2256 const char *fmt;
32131a9c
RK
2257 int copied = 0;
2258
d6633f01
NS
2259 if (! current_function_decl)
2260 return x;
9969bb2c 2261
32131a9c
RK
2262 switch (code)
2263 {
2264 case CONST_INT:
2265 case CONST_DOUBLE:
69ef87e2 2266 case CONST_VECTOR:
32131a9c
RK
2267 case CONST:
2268 case SYMBOL_REF:
2269 case CODE_LABEL:
2270 case PC:
2271 case CC0:
2272 case ASM_INPUT:
2273 case ADDR_VEC:
2274 case ADDR_DIFF_VEC:
2275 case RETURN:
2276 return x;
2277
2278 case REG:
2279 regno = REGNO (x);
2280
2281 /* First handle the case where we encounter a bare register that
2282 is eliminable. Replace it with a PLUS. */
2283 if (regno < FIRST_PSEUDO_REGISTER)
2284 {
2285 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2286 ep++)
2287 if (ep->from_rtx == x && ep->can_eliminate)
dfac187e 2288 return plus_constant (ep->to_rtx, ep->previous_offset);
32131a9c
RK
2289
2290 }
cd7c9015
RK
2291 else if (reg_renumber && reg_renumber[regno] < 0
2292 && reg_equiv_constant && reg_equiv_constant[regno]
2b49ee39
R
2293 && ! CONSTANT_P (reg_equiv_constant[regno]))
2294 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2295 mem_mode, insn);
32131a9c
RK
2296 return x;
2297
c5c76735
JL
2298 /* You might think handling MINUS in a manner similar to PLUS is a
2299 good idea. It is not. It has been tried multiple times and every
2300 time the change has had to have been reverted.
2301
2302 Other parts of reload know a PLUS is special (gen_reload for example)
2303 and require special code to handle code a reloaded PLUS operand.
2304
2305 Also consider backends where the flags register is clobbered by a
a457ee07 2306 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
c5c76735
JL
2307 lea instruction comes to mind). If we try to reload a MINUS, we
2308 may kill the flags register that was holding a useful value.
2309
2310 So, please before trying to handle MINUS, consider reload as a
2311 whole instead of this little section as well as the backend issues. */
32131a9c
RK
2312 case PLUS:
2313 /* If this is the sum of an eliminable register and a constant, rework
6d2f8887 2314 the sum. */
f8cfc6aa 2315 if (REG_P (XEXP (x, 0))
32131a9c
RK
2316 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2317 && CONSTANT_P (XEXP (x, 1)))
2318 {
2319 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2320 ep++)
2321 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2322 {
32131a9c
RK
2323 /* The only time we want to replace a PLUS with a REG (this
2324 occurs when the constant operand of the PLUS is the negative
2325 of the offset) is when we are inside a MEM. We won't want
2326 to do so at other times because that would change the
2327 structure of the insn in a way that reload can't handle.
2328 We special-case the commonest situation in
2329 eliminate_regs_in_insn, so just replace a PLUS with a
2330 PLUS here, unless inside a MEM. */
a23b64d5 2331 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2332 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2333 return ep->to_rtx;
2334 else
38a448ca
RH
2335 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2336 plus_constant (XEXP (x, 1),
2337 ep->previous_offset));
32131a9c
RK
2338 }
2339
2340 /* If the register is not eliminable, we are done since the other
2341 operand is a constant. */
2342 return x;
2343 }
2344
2345 /* If this is part of an address, we want to bring any constant to the
2346 outermost PLUS. We will do this by doing register replacement in
2347 our operands and seeing if a constant shows up in one of them.
2348
dfac187e
BS
2349 Note that there is no risk of modifying the structure of the insn,
2350 since we only get called for its operands, thus we are either
2351 modifying the address inside a MEM, or something like an address
2352 operand of a load-address insn. */
32131a9c
RK
2353
2354 {
1914f5da
RH
2355 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2356 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c 2357
cd7c9015 2358 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
32131a9c
RK
2359 {
2360 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2361 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2362 we must replace the constant here since it may no longer
2363 be in the position of any operand. */
f8cfc6aa 2364 if (GET_CODE (new0) == PLUS && REG_P (new1)
32131a9c
RK
2365 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2366 && reg_renumber[REGNO (new1)] < 0
2367 && reg_equiv_constant != 0
2368 && reg_equiv_constant[REGNO (new1)] != 0)
2369 new1 = reg_equiv_constant[REGNO (new1)];
f8cfc6aa 2370 else if (GET_CODE (new1) == PLUS && REG_P (new0)
32131a9c
RK
2371 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2372 && reg_renumber[REGNO (new0)] < 0
2373 && reg_equiv_constant[REGNO (new0)] != 0)
2374 new0 = reg_equiv_constant[REGNO (new0)];
2375
2376 new = form_sum (new0, new1);
2377
2378 /* As above, if we are not inside a MEM we do not want to
2379 turn a PLUS into something else. We might try to do so here
2380 for an addition of 0 if we aren't optimizing. */
2381 if (! mem_mode && GET_CODE (new) != PLUS)
38a448ca 2382 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
32131a9c
RK
2383 else
2384 return new;
2385 }
2386 }
2387 return x;
2388
981c7390 2389 case MULT:
05d10675 2390 /* If this is the product of an eliminable register and a
981c7390
RK
2391 constant, apply the distribute law and move the constant out
2392 so that we have (plus (mult ..) ..). This is needed in order
9faa82d8 2393 to keep load-address insns valid. This case is pathological.
981c7390 2394 We ignore the possibility of overflow here. */
f8cfc6aa 2395 if (REG_P (XEXP (x, 0))
981c7390
RK
2396 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2397 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2398 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2399 ep++)
2400 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2401 {
2402 if (! mem_mode
2403 /* Refs inside notes don't count for this purpose. */
2404 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2405 || GET_CODE (insn) == INSN_LIST)))
2406 ep->ref_outside_mem = 1;
2407
2408 return
38a448ca 2409 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
981c7390
RK
2410 ep->previous_offset * INTVAL (XEXP (x, 1)));
2411 }
32131a9c 2412
0f41302f 2413 /* ... fall through ... */
32131a9c 2414
32131a9c
RK
2415 case CALL:
2416 case COMPARE:
c5c76735 2417 /* See comments before PLUS about handling MINUS. */
930aeef3 2418 case MINUS:
32131a9c
RK
2419 case DIV: case UDIV:
2420 case MOD: case UMOD:
2421 case AND: case IOR: case XOR:
45620ed4
RK
2422 case ROTATERT: case ROTATE:
2423 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
2424 case NE: case EQ:
2425 case GE: case GT: case GEU: case GTU:
2426 case LE: case LT: case LEU: case LTU:
2427 {
1914f5da 2428 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2429 rtx new1
1914f5da 2430 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2431
2432 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
38a448ca 2433 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
32131a9c
RK
2434 }
2435 return x;
2436
981c7390
RK
2437 case EXPR_LIST:
2438 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2439 if (XEXP (x, 0))
2440 {
1914f5da 2441 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
981c7390 2442 if (new != XEXP (x, 0))
13bb79d4
R
2443 {
2444 /* If this is a REG_DEAD note, it is not valid anymore.
2445 Using the eliminated version could result in creating a
2446 REG_DEAD note for the stack or frame pointer. */
2447 if (GET_MODE (x) == REG_DEAD)
2448 return (XEXP (x, 1)
2449 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2450 : NULL_RTX);
2451
2452 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2453 }
981c7390
RK
2454 }
2455
0f41302f 2456 /* ... fall through ... */
981c7390
RK
2457
2458 case INSN_LIST:
2459 /* Now do eliminations in the rest of the chain. If this was
2460 an EXPR_LIST, this might result in allocating more memory than is
2461 strictly needed, but it simplifies the code. */
2462 if (XEXP (x, 1))
2463 {
1914f5da 2464 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
981c7390 2465 if (new != XEXP (x, 1))
cd7c9015
RK
2466 return
2467 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
981c7390
RK
2468 }
2469 return x;
2470
32131a9c
RK
2471 case PRE_INC:
2472 case POST_INC:
2473 case PRE_DEC:
2474 case POST_DEC:
32131a9c
RK
2475 case STRICT_LOW_PART:
2476 case NEG: case NOT:
2477 case SIGN_EXTEND: case ZERO_EXTEND:
2478 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2479 case FLOAT: case FIX:
2480 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2481 case ABS:
2482 case SQRT:
2483 case FFS:
2928cd7a
RH
2484 case CLZ:
2485 case CTZ:
2486 case POPCOUNT:
2487 case PARITY:
1914f5da 2488 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c 2489 if (new != XEXP (x, 0))
38a448ca 2490 return gen_rtx_fmt_e (code, GET_MODE (x), new);
32131a9c
RK
2491 return x;
2492
2493 case SUBREG:
ddef6bc7 2494 /* Similar to above processing, but preserve SUBREG_BYTE.
32131a9c
RK
2495 Convert (subreg (mem)) to (mem) if not paradoxical.
2496 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2497 pseudo didn't get a hard reg, we must replace this with the
bd235d86 2498 eliminated version of the memory location because push_reload
32131a9c 2499 may do the replacement in certain circumstances. */
f8cfc6aa 2500 if (REG_P (SUBREG_REG (x))
32131a9c
RK
2501 && (GET_MODE_SIZE (GET_MODE (x))
2502 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2503 && reg_equiv_memory_loc != 0
2504 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2505 {
cb2afeb3 2506 new = SUBREG_REG (x);
32131a9c
RK
2507 }
2508 else
1914f5da 2509 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c 2510
ddef6bc7 2511 if (new != SUBREG_REG (x))
32131a9c 2512 {
29ae5012
RK
2513 int x_size = GET_MODE_SIZE (GET_MODE (x));
2514 int new_size = GET_MODE_SIZE (GET_MODE (new));
2515
3c0cb5de 2516 if (MEM_P (new)
6d49a073 2517 && ((x_size < new_size
1914f5da 2518#ifdef WORD_REGISTER_OPERATIONS
6d49a073
JW
2519 /* On these machines, combine can create rtl of the form
2520 (set (subreg:m1 (reg:m2 R) 0) ...)
05d10675 2521 where m1 < m2, and expects something interesting to
6d49a073
JW
2522 happen to the entire word. Moreover, it will use the
2523 (reg:m2 R) later, expecting all bits to be preserved.
05d10675 2524 So if the number of words is the same, preserve the
bd235d86 2525 subreg so that push_reload can see it. */
5d9669fd
RK
2526 && ! ((x_size - 1) / UNITS_PER_WORD
2527 == (new_size -1 ) / UNITS_PER_WORD)
1914f5da 2528#endif
6d49a073 2529 )
5d9669fd 2530 || x_size == new_size)
1914f5da 2531 )
a2ff290c 2532 return adjust_address_nv (new, GET_MODE (x), SUBREG_BYTE (x));
32131a9c 2533 else
ddef6bc7 2534 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
32131a9c
RK
2535 }
2536
2537 return x;
2538
32131a9c
RK
2539 case MEM:
2540 /* Our only special processing is to pass the mode of the MEM to our
2541 recursive call and copy the flags. While we are here, handle this
2542 case more efficiently. */
f1ec5147
RK
2543 return
2544 replace_equiv_address_nv (x,
2545 eliminate_regs (XEXP (x, 0),
2546 GET_MODE (x), insn));
05d10675 2547
dfac187e 2548 case USE:
055c7759
JDA
2549 /* Handle insn_list USE that a call to a pure function may generate. */
2550 new = eliminate_regs (XEXP (x, 0), 0, insn);
2551 if (new != XEXP (x, 0))
2552 return gen_rtx_USE (GET_MODE (x), new);
2553 return x;
2554
dfac187e
BS
2555 case CLOBBER:
2556 case ASM_OPERANDS:
2557 case SET:
2558 abort ();
2559
e9a25f70
JL
2560 default:
2561 break;
32131a9c
RK
2562 }
2563
2564 /* Process each of our operands recursively. If any have changed, make a
2565 copy of the rtx. */
2566 fmt = GET_RTX_FORMAT (code);
2567 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2568 {
2569 if (*fmt == 'e')
2570 {
1914f5da 2571 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
2572 if (new != XEXP (x, i) && ! copied)
2573 {
2574 rtx new_x = rtx_alloc (code);
e1de1560 2575 memcpy (new_x, x, RTX_SIZE (code));
32131a9c
RK
2576 x = new_x;
2577 copied = 1;
2578 }
2579 XEXP (x, i) = new;
2580 }
2581 else if (*fmt == 'E')
2582 {
2583 int copied_vec = 0;
2584 for (j = 0; j < XVECLEN (x, i); j++)
2585 {
1914f5da 2586 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
32131a9c
RK
2587 if (new != XVECEXP (x, i, j) && ! copied_vec)
2588 {
8f985ec4
ZW
2589 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2590 XVEC (x, i)->elem);
32131a9c
RK
2591 if (! copied)
2592 {
2593 rtx new_x = rtx_alloc (code);
e1de1560 2594 memcpy (new_x, x, RTX_SIZE (code));
32131a9c
RK
2595 x = new_x;
2596 copied = 1;
2597 }
2598 XVEC (x, i) = new_v;
2599 copied_vec = 1;
2600 }
2601 XVECEXP (x, i, j) = new;
2602 }
2603 }
2604 }
2605
2606 return x;
2607}
dfac187e
BS
2608
2609/* Scan rtx X for modifications of elimination target registers. Update
2610 the table of eliminables to reflect the changed state. MEM_MODE is
2611 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2612
2613static void
0c20a65f 2614elimination_effects (rtx x, enum machine_mode mem_mode)
dfac187e
BS
2615{
2616 enum rtx_code code = GET_CODE (x);
2617 struct elim_table *ep;
2618 int regno;
2619 int i, j;
2620 const char *fmt;
2621
2622 switch (code)
2623 {
2624 case CONST_INT:
2625 case CONST_DOUBLE:
69ef87e2 2626 case CONST_VECTOR:
dfac187e
BS
2627 case CONST:
2628 case SYMBOL_REF:
2629 case CODE_LABEL:
2630 case PC:
2631 case CC0:
2632 case ASM_INPUT:
2633 case ADDR_VEC:
2634 case ADDR_DIFF_VEC:
2635 case RETURN:
2636 return;
2637
dfac187e
BS
2638 case REG:
2639 regno = REGNO (x);
2640
2641 /* First handle the case where we encounter a bare register that
2642 is eliminable. Replace it with a PLUS. */
2643 if (regno < FIRST_PSEUDO_REGISTER)
2644 {
2645 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2646 ep++)
2647 if (ep->from_rtx == x && ep->can_eliminate)
2648 {
2649 if (! mem_mode)
2650 ep->ref_outside_mem = 1;
2651 return;
2652 }
2653
2654 }
2655 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2656 && reg_equiv_constant[regno]
92a21141 2657 && ! function_invariant_p (reg_equiv_constant[regno]))
dfac187e
BS
2658 elimination_effects (reg_equiv_constant[regno], mem_mode);
2659 return;
2660
2661 case PRE_INC:
2662 case POST_INC:
2663 case PRE_DEC:
2664 case POST_DEC:
4b983fdc
RH
2665 case POST_MODIFY:
2666 case PRE_MODIFY:
dfac187e
BS
2667 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2668 if (ep->to_rtx == XEXP (x, 0))
2669 {
2670 int size = GET_MODE_SIZE (mem_mode);
2671
2672 /* If more bytes than MEM_MODE are pushed, account for them. */
2673#ifdef PUSH_ROUNDING
2674 if (ep->to_rtx == stack_pointer_rtx)
2675 size = PUSH_ROUNDING (size);
2676#endif
2677 if (code == PRE_DEC || code == POST_DEC)
2678 ep->offset += size;
4b983fdc 2679 else if (code == PRE_INC || code == POST_INC)
dfac187e 2680 ep->offset -= size;
4b983fdc
RH
2681 else if ((code == PRE_MODIFY || code == POST_MODIFY)
2682 && GET_CODE (XEXP (x, 1)) == PLUS
2683 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2684 && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2685 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
dfac187e
BS
2686 }
2687
4b983fdc
RH
2688 /* These two aren't unary operators. */
2689 if (code == POST_MODIFY || code == PRE_MODIFY)
2690 break;
2691
dfac187e
BS
2692 /* Fall through to generic unary operation case. */
2693 case STRICT_LOW_PART:
2694 case NEG: case NOT:
2695 case SIGN_EXTEND: case ZERO_EXTEND:
2696 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2697 case FLOAT: case FIX:
2698 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2699 case ABS:
2700 case SQRT:
2701 case FFS:
2928cd7a
RH
2702 case CLZ:
2703 case CTZ:
2704 case POPCOUNT:
2705 case PARITY:
dfac187e
BS
2706 elimination_effects (XEXP (x, 0), mem_mode);
2707 return;
2708
2709 case SUBREG:
f8cfc6aa 2710 if (REG_P (SUBREG_REG (x))
dfac187e
BS
2711 && (GET_MODE_SIZE (GET_MODE (x))
2712 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2713 && reg_equiv_memory_loc != 0
2714 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2715 return;
2716
2717 elimination_effects (SUBREG_REG (x), mem_mode);
2718 return;
2719
2720 case USE:
2721 /* If using a register that is the source of an eliminate we still
2722 think can be performed, note it cannot be performed since we don't
2723 know how this register is used. */
2724 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2725 if (ep->from_rtx == XEXP (x, 0))
2726 ep->can_eliminate = 0;
2727
2728 elimination_effects (XEXP (x, 0), mem_mode);
2729 return;
2730
2731 case CLOBBER:
2732 /* If clobbering a register that is the replacement register for an
2733 elimination we still think can be performed, note that it cannot
2734 be performed. Otherwise, we need not be concerned about it. */
2735 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2736 if (ep->to_rtx == XEXP (x, 0))
2737 ep->can_eliminate = 0;
2738
2739 elimination_effects (XEXP (x, 0), mem_mode);
2740 return;
2741
2742 case SET:
2743 /* Check for setting a register that we know about. */
f8cfc6aa 2744 if (REG_P (SET_DEST (x)))
dfac187e
BS
2745 {
2746 /* See if this is setting the replacement register for an
2747 elimination.
2748
2749 If DEST is the hard frame pointer, we do nothing because we
2750 assume that all assignments to the frame pointer are for
2751 non-local gotos and are being done at a time when they are valid
2752 and do not disturb anything else. Some machines want to
2753 eliminate a fake argument pointer (or even a fake frame pointer)
2754 with either the real frame or the stack pointer. Assignments to
2755 the hard frame pointer must not prevent this elimination. */
2756
2757 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2758 ep++)
2759 if (ep->to_rtx == SET_DEST (x)
2760 && SET_DEST (x) != hard_frame_pointer_rtx)
2761 {
2762 /* If it is being incremented, adjust the offset. Otherwise,
2763 this elimination can't be done. */
2764 rtx src = SET_SRC (x);
2765
2766 if (GET_CODE (src) == PLUS
2767 && XEXP (src, 0) == SET_DEST (x)
2768 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2769 ep->offset -= INTVAL (XEXP (src, 1));
2770 else
2771 ep->can_eliminate = 0;
2772 }
2773 }
2774
2775 elimination_effects (SET_DEST (x), 0);
2776 elimination_effects (SET_SRC (x), 0);
2777 return;
2778
2779 case MEM:
dfac187e
BS
2780 /* Our only special processing is to pass the mode of the MEM to our
2781 recursive call. */
2782 elimination_effects (XEXP (x, 0), GET_MODE (x));
2783 return;
2784
2785 default:
2786 break;
2787 }
2788
2789 fmt = GET_RTX_FORMAT (code);
2790 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2791 {
2792 if (*fmt == 'e')
2793 elimination_effects (XEXP (x, i), mem_mode);
2794 else if (*fmt == 'E')
2795 for (j = 0; j < XVECLEN (x, i); j++)
2796 elimination_effects (XVECEXP (x, i, j), mem_mode);
2797 }
2798}
2799
2800/* Descend through rtx X and verify that no references to eliminable registers
2801 remain. If any do remain, mark the involved register as not
2802 eliminable. */
1d813780 2803
dfac187e 2804static void
0c20a65f 2805check_eliminable_occurrences (rtx x)
dfac187e
BS
2806{
2807 const char *fmt;
2808 int i;
2809 enum rtx_code code;
2810
2811 if (x == 0)
2812 return;
1d7254c5 2813
dfac187e
BS
2814 code = GET_CODE (x);
2815
2816 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2817 {
2818 struct elim_table *ep;
2819
2820 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
761fa0f7 2821 if (ep->from_rtx == x)
dfac187e
BS
2822 ep->can_eliminate = 0;
2823 return;
2824 }
1d7254c5 2825
dfac187e
BS
2826 fmt = GET_RTX_FORMAT (code);
2827 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2828 {
2829 if (*fmt == 'e')
2830 check_eliminable_occurrences (XEXP (x, i));
2831 else if (*fmt == 'E')
2832 {
2833 int j;
2834 for (j = 0; j < XVECLEN (x, i); j++)
2835 check_eliminable_occurrences (XVECEXP (x, i, j));
2836 }
2837 }
2838}
32131a9c
RK
2839\f
2840/* Scan INSN and eliminate all eliminable registers in it.
2841
2842 If REPLACE is nonzero, do the replacement destructively. Also
2843 delete the insn as dead it if it is setting an eliminable register.
2844
2845 If REPLACE is zero, do all our allocations in reload_obstack.
2846
2847 If no eliminations were done and this insn doesn't require any elimination
2848 processing (these are not identical conditions: it might be updating sp,
2849 but not referencing fp; this needs to be seen during reload_as_needed so
2850 that the offset between fp and sp can be taken into consideration), zero
2851 is returned. Otherwise, 1 is returned. */
2852
2853static int
0c20a65f 2854eliminate_regs_in_insn (rtx insn, int replace)
32131a9c 2855{
dfac187e 2856 int icode = recog_memoized (insn);
32131a9c 2857 rtx old_body = PATTERN (insn);
dfac187e 2858 int insn_is_asm = asm_noperands (old_body) >= 0;
774672d2 2859 rtx old_set = single_set (insn);
32131a9c
RK
2860 rtx new_body;
2861 int val = 0;
4977bab6 2862 int i;
dfac187e
BS
2863 rtx substed_operand[MAX_RECOG_OPERANDS];
2864 rtx orig_operand[MAX_RECOG_OPERANDS];
32131a9c 2865 struct elim_table *ep;
ace3ffcd 2866 rtx plus_src;
32131a9c 2867
dfac187e
BS
2868 if (! insn_is_asm && icode < 0)
2869 {
2870 if (GET_CODE (PATTERN (insn)) == USE
2871 || GET_CODE (PATTERN (insn)) == CLOBBER
2872 || GET_CODE (PATTERN (insn)) == ADDR_VEC
2873 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2874 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
2875 return 0;
2876 abort ();
2877 }
2878
f8cfc6aa 2879 if (old_set != 0 && REG_P (SET_DEST (old_set))
774672d2 2880 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
2881 {
2882 /* Check for setting an eliminable register. */
2883 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
774672d2 2884 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
32131a9c 2885 {
dd1eab0a
RK
2886#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2887 /* If this is setting the frame pointer register to the
2888 hardware frame pointer register and this is an elimination
2889 that will be done (tested above), this insn is really
2890 adjusting the frame pointer downward to compensate for
2891 the adjustment done before a nonlocal goto. */
2892 if (ep->from == FRAME_POINTER_REGNUM
2893 && ep->to == HARD_FRAME_POINTER_REGNUM)
2894 {
75eefe3f
UW
2895 rtx base = SET_SRC (old_set);
2896 rtx base_insn = insn;
b19ee4bd 2897 HOST_WIDE_INT offset = 0;
75eefe3f
UW
2898
2899 while (base != ep->to_rtx)
8026ebba 2900 {
75eefe3f
UW
2901 rtx prev_insn, prev_set;
2902
2903 if (GET_CODE (base) == PLUS
2904 && GET_CODE (XEXP (base, 1)) == CONST_INT)
2905 {
2906 offset += INTVAL (XEXP (base, 1));
2907 base = XEXP (base, 0);
2908 }
2909 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
2910 && (prev_set = single_set (prev_insn)) != 0
2911 && rtx_equal_p (SET_DEST (prev_set), base))
2912 {
2913 base = SET_SRC (prev_set);
2914 base_insn = prev_insn;
2915 }
2916 else
2917 break;
8026ebba 2918 }
dd1eab0a 2919
75eefe3f 2920 if (base == ep->to_rtx)
dd1eab0a 2921 {
c77fbfbe
GK
2922 rtx src
2923 = plus_constant (ep->to_rtx, offset - ep->offset);
2924
2925 new_body = old_body;
2926 if (! replace)
2927 {
2928 new_body = copy_insn (old_body);
2929 if (REG_NOTES (insn))
2930 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
2931 }
2932 PATTERN (insn) = new_body;
2933 old_set = single_set (insn);
2934
2935 /* First see if this insn remains valid when we
2936 make the change. If not, keep the INSN_CODE
2937 the same and let reload fit it up. */
2938 validate_change (insn, &SET_SRC (old_set), src, 1);
2939 validate_change (insn, &SET_DEST (old_set),
2940 ep->to_rtx, 1);
2941 if (! apply_change_group ())
dd1eab0a 2942 {
c77fbfbe
GK
2943 SET_SRC (old_set) = src;
2944 SET_DEST (old_set) = ep->to_rtx;
dd1eab0a
RK
2945 }
2946
2947 val = 1;
2948 goto done;
2949 }
2950 }
2951#endif
2952
32131a9c
RK
2953 /* In this case this insn isn't serving a useful purpose. We
2954 will delete it in reload_as_needed once we know that this
2955 elimination is, in fact, being done.
2956
abc95ed3 2957 If REPLACE isn't set, we can't delete this insn, but needn't
32131a9c
RK
2958 process it since it won't be used unless something changes. */
2959 if (replace)
8a34409d 2960 {
1d7254c5 2961 delete_dead_insn (insn);
8a34409d
RH
2962 return 1;
2963 }
32131a9c
RK
2964 val = 1;
2965 goto done;
2966 }
aa5524a9 2967 }
32131a9c 2968
aa5524a9 2969 /* We allow one special case which happens to work on all machines we
ace3ffcd
KH
2970 currently support: a single set with the source or a REG_EQUAL
2971 note being a PLUS of an eliminable register and a constant. */
2972 plus_src = 0;
f8cfc6aa 2973 if (old_set && REG_P (SET_DEST (old_set)))
aa5524a9 2974 {
ace3ffcd
KH
2975 /* First see if the source is of the form (plus (reg) CST). */
2976 if (GET_CODE (SET_SRC (old_set)) == PLUS
f8cfc6aa 2977 && REG_P (XEXP (SET_SRC (old_set), 0))
ace3ffcd
KH
2978 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT
2979 && REGNO (XEXP (SET_SRC (old_set), 0)) < FIRST_PSEUDO_REGISTER)
2980 plus_src = SET_SRC (old_set);
f8cfc6aa 2981 else if (REG_P (SET_SRC (old_set)))
ace3ffcd
KH
2982 {
2983 /* Otherwise, see if we have a REG_EQUAL note of the form
2984 (plus (reg) CST). */
2985 rtx links;
2986 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
2987 {
2988 if (REG_NOTE_KIND (links) == REG_EQUAL
2989 && GET_CODE (XEXP (links, 0)) == PLUS
f8cfc6aa 2990 && REG_P (XEXP (XEXP (links, 0), 0))
ace3ffcd
KH
2991 && GET_CODE (XEXP (XEXP (links, 0), 1)) == CONST_INT
2992 && REGNO (XEXP (XEXP (links, 0), 0)) < FIRST_PSEUDO_REGISTER)
2993 {
2994 plus_src = XEXP (links, 0);
2995 break;
2996 }
2997 }
2998 }
2999 }
3000 if (plus_src)
3001 {
3002 rtx reg = XEXP (plus_src, 0);
3003 HOST_WIDE_INT offset = INTVAL (XEXP (plus_src, 1));
32131a9c 3004
aa5524a9
BS
3005 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3006 if (ep->from_rtx == reg && ep->can_eliminate)
3007 {
3008 offset += ep->offset;
32131a9c 3009
aa5524a9
BS
3010 if (offset == 0)
3011 {
f34c06e5
R
3012 int num_clobbers;
3013 /* We assume here that if we need a PARALLEL with
3014 CLOBBERs for this assignment, we can do with the
3015 MATCH_SCRATCHes that add_clobbers allocates.
3016 There's not much we can do if that doesn't work. */
aa5524a9
BS
3017 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3018 SET_DEST (old_set),
3019 ep->to_rtx);
f34c06e5
R
3020 num_clobbers = 0;
3021 INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
3022 if (num_clobbers)
3023 {
3024 rtvec vec = rtvec_alloc (num_clobbers + 1);
3025
3026 vec->elem[0] = PATTERN (insn);
3027 PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
3028 add_clobbers (PATTERN (insn), INSN_CODE (insn));
3029 }
aa5524a9
BS
3030 if (INSN_CODE (insn) < 0)
3031 abort ();
3032 }
ace3ffcd
KH
3033 /* If we have a nonzero offset, and the source is already
3034 a simple REG, the following transformation would
3035 increase the cost of the insn by replacing a simple REG
3036 with (plus (reg sp) CST). So try only when plus_src
3037 comes from old_set proper, not REG_NOTES. */
3038 else if (SET_SRC (old_set) == plus_src)
aa5524a9
BS
3039 {
3040 new_body = old_body;
3041 if (! replace)
3042 {
3043 new_body = copy_insn (old_body);
3044 if (REG_NOTES (insn))
3045 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3046 }
3047 PATTERN (insn) = new_body;
3048 old_set = single_set (insn);
922d9d40 3049
aa5524a9
BS
3050 XEXP (SET_SRC (old_set), 0) = ep->to_rtx;
3051 XEXP (SET_SRC (old_set), 1) = GEN_INT (offset);
3052 }
ace3ffcd
KH
3053 else
3054 break;
3055
aa5524a9
BS
3056 val = 1;
3057 /* This can't have an effect on elimination offsets, so skip right
3058 to the end. */
3059 goto done;
3060 }
32131a9c
RK
3061 }
3062
dfac187e
BS
3063 /* Determine the effects of this insn on elimination offsets. */
3064 elimination_effects (old_body, 0);
3065
3066 /* Eliminate all eliminable registers occurring in operands that
3067 can be handled by reload. */
3068 extract_insn (insn);
dfac187e
BS
3069 for (i = 0; i < recog_data.n_operands; i++)
3070 {
3071 orig_operand[i] = recog_data.operand[i];
3072 substed_operand[i] = recog_data.operand[i];
3073
3074 /* For an asm statement, every operand is eliminable. */
3075 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3076 {
3077 /* Check for setting a register that we know about. */
3078 if (recog_data.operand_type[i] != OP_IN
f8cfc6aa 3079 && REG_P (orig_operand[i]))
dfac187e
BS
3080 {
3081 /* If we are assigning to a register that can be eliminated, it
3082 must be as part of a PARALLEL, since the code above handles
3083 single SETs. We must indicate that we can no longer
3084 eliminate this reg. */
3085 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3086 ep++)
761fa0f7 3087 if (ep->from_rtx == orig_operand[i])
dfac187e
BS
3088 ep->can_eliminate = 0;
3089 }
3090
3091 substed_operand[i] = eliminate_regs (recog_data.operand[i], 0,
3092 replace ? insn : NULL_RTX);
3093 if (substed_operand[i] != orig_operand[i])
4977bab6 3094 val = 1;
dfac187e
BS
3095 /* Terminate the search in check_eliminable_occurrences at
3096 this point. */
3097 *recog_data.operand_loc[i] = 0;
3098
3099 /* If an output operand changed from a REG to a MEM and INSN is an
3100 insn, write a CLOBBER insn. */
3101 if (recog_data.operand_type[i] != OP_IN
f8cfc6aa 3102 && REG_P (orig_operand[i])
3c0cb5de 3103 && MEM_P (substed_operand[i])
dfac187e
BS
3104 && replace)
3105 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3106 insn);
3107 }
3108 }
3109
3110 for (i = 0; i < recog_data.n_dups; i++)
3111 *recog_data.dup_loc[i]
1d7254c5 3112 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
dfac187e
BS
3113
3114 /* If any eliminable remain, they aren't eliminable anymore. */
3115 check_eliminable_occurrences (old_body);
32131a9c 3116
dfac187e
BS
3117 /* Substitute the operands; the new values are in the substed_operand
3118 array. */
3119 for (i = 0; i < recog_data.n_operands; i++)
3120 *recog_data.operand_loc[i] = substed_operand[i];
3121 for (i = 0; i < recog_data.n_dups; i++)
1d7254c5 3122 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
32131a9c 3123
dfac187e 3124 /* If we are replacing a body that was a (set X (plus Y Z)), try to
32131a9c
RK
3125 re-recognize the insn. We do this in case we had a simple addition
3126 but now can do this as a load-address. This saves an insn in this
dfac187e
BS
3127 common case.
3128 If re-recognition fails, the old insn code number will still be used,
3129 and some register operands may have changed into PLUS expressions.
3130 These will be handled by find_reloads by loading them into a register
1d7254c5 3131 again. */
32131a9c 3132
dfac187e 3133 if (val)
32131a9c 3134 {
7c791b13
RK
3135 /* If we aren't replacing things permanently and we changed something,
3136 make another copy to ensure that all the RTL is new. Otherwise
3137 things can go wrong if find_reload swaps commutative operands
0f41302f 3138 and one is inside RTL that has been copied while the other is not. */
dfac187e
BS
3139 new_body = old_body;
3140 if (! replace)
1b3b5765
BS
3141 {
3142 new_body = copy_insn (old_body);
3143 if (REG_NOTES (insn))
3144 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3145 }
dfac187e 3146 PATTERN (insn) = new_body;
7c791b13 3147
774672d2
RK
3148 /* If we had a move insn but now we don't, rerecognize it. This will
3149 cause spurious re-recognition if the old move had a PARALLEL since
3150 the new one still will, but we can't call single_set without
3151 having put NEW_BODY into the insn and the re-recognition won't
3152 hurt in this rare case. */
dfac187e
BS
3153 /* ??? Why this huge if statement - why don't we just rerecognize the
3154 thing always? */
3155 if (! insn_is_asm
3156 && old_set != 0
f8cfc6aa 3157 && ((REG_P (SET_SRC (old_set))
774672d2 3158 && (GET_CODE (new_body) != SET
f8cfc6aa 3159 || !REG_P (SET_SRC (new_body))))
774672d2 3160 /* If this was a load from or store to memory, compare
1ccbefce
RH
3161 the MEM in recog_data.operand to the one in the insn.
3162 If they are not equal, then rerecognize the insn. */
774672d2 3163 || (old_set != 0
3c0cb5de 3164 && ((MEM_P (SET_SRC (old_set))
1ccbefce 3165 && SET_SRC (old_set) != recog_data.operand[1])
3c0cb5de 3166 || (MEM_P (SET_DEST (old_set))
1ccbefce 3167 && SET_DEST (old_set) != recog_data.operand[0])))
774672d2
RK
3168 /* If this was an add insn before, rerecognize. */
3169 || GET_CODE (SET_SRC (old_set)) == PLUS))
4a5d0fb5 3170 {
dfac187e
BS
3171 int new_icode = recog (PATTERN (insn), insn, 0);
3172 if (new_icode < 0)
3173 INSN_CODE (insn) = icode;
4a5d0fb5 3174 }
dfac187e 3175 }
32131a9c 3176
dfac187e
BS
3177 /* Restore the old body. If there were any changes to it, we made a copy
3178 of it while the changes were still in place, so we'll correctly return
3179 a modified insn below. */
3180 if (! replace)
3181 {
3182 /* Restore the old body. */
3183 for (i = 0; i < recog_data.n_operands; i++)
3184 *recog_data.operand_loc[i] = orig_operand[i];
3185 for (i = 0; i < recog_data.n_dups; i++)
1d7254c5 3186 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
32131a9c 3187 }
a8fdc208 3188
dfac187e
BS
3189 /* Update all elimination pairs to reflect the status after the current
3190 insn. The changes we make were determined by the earlier call to
3191 elimination_effects.
a8efe40d 3192
423adbb9 3193 We also detect cases where register elimination cannot be done,
32131a9c
RK
3194 namely, if a register would be both changed and referenced outside a MEM
3195 in the resulting insn since such an insn is often undefined and, even if
3196 not, we cannot know what meaning will be given to it. Note that it is
3197 valid to have a register used in an address in an insn that changes it
3198 (presumably with a pre- or post-increment or decrement).
3199
3200 If anything changes, return nonzero. */
3201
32131a9c
RK
3202 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3203 {
3204 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3205 ep->can_eliminate = 0;
3206
3207 ep->ref_outside_mem = 0;
3208
3209 if (ep->previous_offset != ep->offset)
3210 val = 1;
32131a9c
RK
3211 }
3212
3213 done:
9faa82d8 3214 /* If we changed something, perform elimination in REG_NOTES. This is
05b4c365
RK
3215 needed even when REPLACE is zero because a REG_DEAD note might refer
3216 to a register that we eliminate and could cause a different number
3217 of spill registers to be needed in the final reload pass than in
3218 the pre-passes. */
20748cab 3219 if (val && REG_NOTES (insn) != 0)
1914f5da 3220 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3221
32131a9c
RK
3222 return val;
3223}
3224
cb2afeb3
R
3225/* Loop through all elimination pairs.
3226 Recalculate the number not at initial offset.
3227
3228 Compute the maximum offset (minimum offset if the stack does not
3229 grow downward) for each elimination pair. */
3230
3231static void
0c20a65f 3232update_eliminable_offsets (void)
cb2afeb3
R
3233{
3234 struct elim_table *ep;
3235
3236 num_not_at_initial_offset = 0;
3237 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3238 {
3239 ep->previous_offset = ep->offset;
3240 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3241 num_not_at_initial_offset++;
cb2afeb3
R
3242 }
3243}
3244
32131a9c
RK
3245/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3246 replacement we currently believe is valid, mark it as not eliminable if X
3247 modifies DEST in any way other than by adding a constant integer to it.
3248
3249 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3250 all assignments to the hard frame pointer are nonlocal gotos and are being
3251 done at a time when they are valid and do not disturb anything else.
32131a9c 3252 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3253 frame or stack pointer. Assignments to the hard frame pointer must not
3254 prevent this elimination.
32131a9c
RK
3255
3256 Called via note_stores from reload before starting its passes to scan
3257 the insns of the function. */
3258
3259static void
0c20a65f 3260mark_not_eliminable (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
32131a9c 3261{
b3694847 3262 unsigned int i;
32131a9c
RK
3263
3264 /* A SUBREG of a hard register here is just changing its mode. We should
3265 not see a SUBREG of an eliminable hard register, but check just in
3266 case. */
3267 if (GET_CODE (dest) == SUBREG)
3268 dest = SUBREG_REG (dest);
3269
3ec2ea3e 3270 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3271 return;
3272
3273 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3274 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3275 && (GET_CODE (x) != SET
3276 || GET_CODE (SET_SRC (x)) != PLUS
3277 || XEXP (SET_SRC (x), 0) != dest
3278 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3279 {
3280 reg_eliminate[i].can_eliminate_previous
3281 = reg_eliminate[i].can_eliminate = 0;
3282 num_eliminable--;
3283 }
3284}
09dd1133 3285
c47f5ea5
BS
3286/* Verify that the initial elimination offsets did not change since the
3287 last call to set_initial_elim_offsets. This is used to catch cases
3288 where something illegal happened during reload_as_needed that could
3289 cause incorrect code to be generated if we did not check for it. */
c8d8ed65 3290
c47f5ea5 3291static void
0c20a65f 3292verify_initial_elim_offsets (void)
c47f5ea5 3293{
b19ee4bd 3294 HOST_WIDE_INT t;
c47f5ea5
BS
3295
3296#ifdef ELIMINABLE_REGS
3297 struct elim_table *ep;
3298
3299 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3300 {
3301 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3302 if (t != ep->initial_offset)
3303 abort ();
3304 }
3305#else
3306 INITIAL_FRAME_POINTER_OFFSET (t);
3307 if (t != reg_eliminate[0].initial_offset)
3308 abort ();
05d10675 3309#endif
c47f5ea5
BS
3310}
3311
09dd1133 3312/* Reset all offsets on eliminable registers to their initial values. */
1d813780 3313
09dd1133 3314static void
0c20a65f 3315set_initial_elim_offsets (void)
09dd1133 3316{
1f3b1e1a 3317 struct elim_table *ep = reg_eliminate;
09dd1133
BS
3318
3319#ifdef ELIMINABLE_REGS
1f3b1e1a 3320 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
09dd1133
BS
3321 {
3322 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
1f3b1e1a 3323 ep->previous_offset = ep->offset = ep->initial_offset;
09dd1133
BS
3324 }
3325#else
1f3b1e1a
JL
3326 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3327 ep->previous_offset = ep->offset = ep->initial_offset;
09dd1133
BS
3328#endif
3329
3330 num_not_at_initial_offset = 0;
1f3b1e1a 3331}
09dd1133 3332
1f3b1e1a
JL
3333/* Initialize the known label offsets.
3334 Set a known offset for each forced label to be at the initial offset
3335 of each elimination. We do this because we assume that all
3336 computed jumps occur from a location where each elimination is
3337 at its initial offset.
3338 For all other labels, show that we don't know the offsets. */
09dd1133 3339
1f3b1e1a 3340static void
0c20a65f 3341set_initial_label_offsets (void)
1f3b1e1a
JL
3342{
3343 rtx x;
4cc0fdd2 3344 memset (offsets_known_at, 0, num_labels);
09dd1133
BS
3345
3346 for (x = forced_labels; x; x = XEXP (x, 1))
3347 if (XEXP (x, 0))
3348 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3349}
3350
1f3b1e1a
JL
3351/* Set all elimination offsets to the known values for the code label given
3352 by INSN. */
1d813780 3353
1f3b1e1a 3354static void
0c20a65f 3355set_offsets_for_label (rtx insn)
1f3b1e1a 3356{
973838fd 3357 unsigned int i;
1f3b1e1a
JL
3358 int label_nr = CODE_LABEL_NUMBER (insn);
3359 struct elim_table *ep;
3360
3361 num_not_at_initial_offset = 0;
3362 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3363 {
4cc0fdd2
JDA
3364 ep->offset = ep->previous_offset
3365 = offsets_at[label_nr - first_label_num][i];
1f3b1e1a
JL
3366 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3367 num_not_at_initial_offset++;
3368 }
3369}
3370
09dd1133 3371/* See if anything that happened changes which eliminations are valid.
981f6289 3372 For example, on the SPARC, whether or not the frame pointer can
09dd1133
BS
3373 be eliminated can depend on what registers have been used. We need
3374 not check some conditions again (such as flag_omit_frame_pointer)
3375 since they can't have changed. */
3376
3377static void
0c20a65f 3378update_eliminables (HARD_REG_SET *pset)
09dd1133 3379{
09dd1133 3380 int previous_frame_pointer_needed = frame_pointer_needed;
09dd1133
BS
3381 struct elim_table *ep;
3382
3383 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3384 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3385#ifdef ELIMINABLE_REGS
3386 || ! CAN_ELIMINATE (ep->from, ep->to)
3387#endif
3388 )
3389 ep->can_eliminate = 0;
3390
3391 /* Look for the case where we have discovered that we can't replace
3392 register A with register B and that means that we will now be
3393 trying to replace register A with register C. This means we can
3394 no longer replace register C with register B and we need to disable
3395 such an elimination, if it exists. This occurs often with A == ap,
3396 B == sp, and C == fp. */
3397
3398 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3399 {
3400 struct elim_table *op;
b3694847 3401 int new_to = -1;
09dd1133
BS
3402
3403 if (! ep->can_eliminate && ep->can_eliminate_previous)
3404 {
3405 /* Find the current elimination for ep->from, if there is a
3406 new one. */
3407 for (op = reg_eliminate;
3408 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3409 if (op->from == ep->from && op->can_eliminate)
3410 {
3411 new_to = op->to;
3412 break;
3413 }
3414
3415 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3416 disable it. */
3417 for (op = reg_eliminate;
3418 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3419 if (op->from == new_to && op->to == ep->to)
3420 op->can_eliminate = 0;
3421 }
3422 }
3423
3424 /* See if any registers that we thought we could eliminate the previous
3425 time are no longer eliminable. If so, something has changed and we
3426 must spill the register. Also, recompute the number of eliminable
3427 registers and see if the frame pointer is needed; it is if there is
3428 no elimination of the frame pointer that we can perform. */
3429
3430 frame_pointer_needed = 1;
3431 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3432 {
3433 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3434 && ep->to != HARD_FRAME_POINTER_REGNUM)
3435 frame_pointer_needed = 0;
3436
3437 if (! ep->can_eliminate && ep->can_eliminate_previous)
3438 {
3439 ep->can_eliminate_previous = 0;
3440 SET_HARD_REG_BIT (*pset, ep->from);
3441 num_eliminable--;
3442 }
3443 }
3444
09dd1133
BS
3445 /* If we didn't need a frame pointer last time, but we do now, spill
3446 the hard frame pointer. */
3447 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3448 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
09dd1133
BS
3449}
3450
3451/* Initialize the table of registers to eliminate. */
1d813780 3452
09dd1133 3453static void
0c20a65f 3454init_elim_table (void)
09dd1133
BS
3455{
3456 struct elim_table *ep;
590cf94d 3457#ifdef ELIMINABLE_REGS
0b5826ac 3458 const struct elim_table_1 *ep1;
590cf94d 3459#endif
09dd1133 3460
590cf94d 3461 if (!reg_eliminate)
703ad42b 3462 reg_eliminate = xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
05d10675 3463
09dd1133
BS
3464 /* Does this function require a frame pointer? */
3465
3466 frame_pointer_needed = (! flag_omit_frame_pointer
09dd1133
BS
3467 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3468 and restore sp for alloca. So we can't eliminate
3469 the frame pointer in that case. At some point,
3470 we should improve this by emitting the
3471 sp-adjusting insns for this case. */
3472 || (current_function_calls_alloca
3473 && EXIT_IGNORE_STACK)
09dd1133
BS
3474 || FRAME_POINTER_REQUIRED);
3475
3476 num_eliminable = 0;
3477
3478#ifdef ELIMINABLE_REGS
590cf94d
KG
3479 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3480 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
09dd1133 3481 {
590cf94d
KG
3482 ep->from = ep1->from;
3483 ep->to = ep1->to;
09dd1133
BS
3484 ep->can_eliminate = ep->can_eliminate_previous
3485 = (CAN_ELIMINATE (ep->from, ep->to)
3486 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3487 }
3488#else
590cf94d
KG
3489 reg_eliminate[0].from = reg_eliminate_1[0].from;
3490 reg_eliminate[0].to = reg_eliminate_1[0].to;
09dd1133
BS
3491 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3492 = ! frame_pointer_needed;
3493#endif
3494
3495 /* Count the number of eliminable registers and build the FROM and TO
2fb00d7f 3496 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
f84d109f 3497 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
09dd1133
BS
3498 We depend on this. */
3499 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3500 {
3501 num_eliminable += ep->can_eliminate;
3502 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3503 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3504 }
3505}
32131a9c
RK
3506\f
3507/* Kick all pseudos out of hard register REGNO.
32131a9c
RK
3508
3509 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3510 because we found we can't eliminate some register. In the case, no pseudos
3511 are allowed to be in the register, even if they are only in a block that
3512 doesn't require spill registers, unlike the case when we are spilling this
3513 hard reg to produce another spill register.
3514
3515 Return nonzero if any pseudos needed to be kicked out. */
3516
03acd8f8 3517static void
0c20a65f 3518spill_hard_reg (unsigned int regno, int cant_eliminate)
32131a9c 3519{
b3694847 3520 int i;
32131a9c 3521
9ff3516a 3522 if (cant_eliminate)
03acd8f8
BS
3523 {
3524 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3525 regs_ever_live[regno] = 1;
3526 }
9ff3516a 3527
32131a9c
RK
3528 /* Spill every pseudo reg that was allocated to this reg
3529 or to something that overlaps this reg. */
3530
3531 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3532 if (reg_renumber[i] >= 0
770ae6cc
RK
3533 && (unsigned int) reg_renumber[i] <= regno
3534 && ((unsigned int) reg_renumber[i]
66fd46b6
JH
3535 + hard_regno_nregs[(unsigned int) reg_renumber[i]]
3536 [PSEUDO_REGNO_MODE (i)]
32131a9c 3537 > regno))
f5d8c9f4 3538 SET_REGNO_REG_SET (&spilled_pseudos, i);
03acd8f8 3539}
32131a9c 3540
03acd8f8
BS
3541/* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3542 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
770ae6cc 3543
03acd8f8 3544static void
0c20a65f 3545ior_hard_reg_set (HARD_REG_SET *set1, HARD_REG_SET *set2)
03acd8f8
BS
3546{
3547 IOR_HARD_REG_SET (*set1, *set2);
3548}
05d10675 3549
03acd8f8
BS
3550/* After find_reload_regs has been run for all insn that need reloads,
3551 and/or spill_hard_regs was called, this function is used to actually
3552 spill pseudo registers and try to reallocate them. It also sets up the
3553 spill_regs array for use by choose_reload_regs. */
a8fdc208 3554
03acd8f8 3555static int
0c20a65f 3556finish_spills (int global)
03acd8f8
BS
3557{
3558 struct insn_chain *chain;
3559 int something_changed = 0;
3560 int i;
3561
3562 /* Build the spill_regs array for the function. */
3563 /* If there are some registers still to eliminate and one of the spill regs
3564 wasn't ever used before, additional stack space may have to be
3565 allocated to store this register. Thus, we may have changed the offset
3566 between the stack and frame pointers, so mark that something has changed.
32131a9c 3567
03acd8f8
BS
3568 One might think that we need only set VAL to 1 if this is a call-used
3569 register. However, the set of registers that must be saved by the
3570 prologue is not identical to the call-used set. For example, the
3571 register used by the call insn for the return PC is a call-used register,
3572 but must be saved by the prologue. */
3573
3574 n_spills = 0;
3575 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3576 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3577 {
3578 spill_reg_order[i] = n_spills;
3579 spill_regs[n_spills++] = i;
3580 if (num_eliminable && ! regs_ever_live[i])
3581 something_changed = 1;
3582 regs_ever_live[i] = 1;
3583 }
3584 else
3585 spill_reg_order[i] = -1;
3586
efc9bd41
RK
3587 EXECUTE_IF_SET_IN_REG_SET
3588 (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i,
3589 {
3590 /* Record the current hard register the pseudo is allocated to in
3591 pseudo_previous_regs so we avoid reallocating it to the same
3592 hard reg in a later pass. */
3593 if (reg_renumber[i] < 0)
3594 abort ();
3595
3596 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3597 /* Mark it as no longer having a hard register home. */
3598 reg_renumber[i] = -1;
3599 /* We will need to scan everything again. */
3600 something_changed = 1;
3601 });
7609e720 3602
03acd8f8
BS
3603 /* Retry global register allocation if possible. */
3604 if (global)
3605 {
703ad42b 3606 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
03acd8f8
BS
3607 /* For every insn that needs reloads, set the registers used as spill
3608 regs in pseudo_forbidden_regs for every pseudo live across the
3609 insn. */
3610 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3611 {
3612 EXECUTE_IF_SET_IN_REG_SET
239a0f5b 3613 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
03acd8f8
BS
3614 {
3615 ior_hard_reg_set (pseudo_forbidden_regs + i,
3616 &chain->used_spill_regs);
3617 });
3618 EXECUTE_IF_SET_IN_REG_SET
239a0f5b 3619 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
03acd8f8
BS
3620 {
3621 ior_hard_reg_set (pseudo_forbidden_regs + i,
3622 &chain->used_spill_regs);
3623 });
3624 }
7609e720 3625
03acd8f8
BS
3626 /* Retry allocating the spilled pseudos. For each reg, merge the
3627 various reg sets that indicate which hard regs can't be used,
3628 and call retry_global_alloc.
05d10675 3629 We change spill_pseudos here to only contain pseudos that did not
03acd8f8
BS
3630 get a new hard register. */
3631 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3632 if (reg_old_renumber[i] != reg_renumber[i])
32131a9c 3633 {
03acd8f8
BS
3634 HARD_REG_SET forbidden;
3635 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3636 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3637 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3638 retry_global_alloc (i, forbidden);
3639 if (reg_renumber[i] >= 0)
f5d8c9f4 3640 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
32131a9c 3641 }
03acd8f8 3642 }
7609e720 3643
03acd8f8
BS
3644 /* Fix up the register information in the insn chain.
3645 This involves deleting those of the spilled pseudos which did not get
3646 a new hard register home from the live_{before,after} sets. */
7609e720
BS
3647 for (chain = reload_insn_chain; chain; chain = chain->next)
3648 {
03acd8f8
BS
3649 HARD_REG_SET used_by_pseudos;
3650 HARD_REG_SET used_by_pseudos2;
3651
239a0f5b
BS
3652 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3653 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
03acd8f8
BS
3654
3655 /* Mark any unallocated hard regs as available for spills. That
3656 makes inheritance work somewhat better. */
3657 if (chain->need_reload)
3658 {
239a0f5b
BS
3659 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3660 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
03acd8f8
BS
3661 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3662
3663 /* Save the old value for the sanity test below. */
3664 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3665
239a0f5b
BS
3666 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3667 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
03acd8f8
BS
3668 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3669 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3670
3671 /* Make sure we only enlarge the set. */
3672 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3673 abort ();
3674 ok:;
3675 }
7609e720 3676 }
03acd8f8
BS
3677
3678 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3679 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3680 {
3681 int regno = reg_renumber[i];
3682 if (reg_old_renumber[i] == regno)
3683 continue;
05d10675 3684
03acd8f8
BS
3685 alter_reg (i, reg_old_renumber[i]);
3686 reg_old_renumber[i] = regno;
c263766c 3687 if (dump_file)
03acd8f8
BS
3688 {
3689 if (regno == -1)
c263766c 3690 fprintf (dump_file, " Register %d now on stack.\n\n", i);
03acd8f8 3691 else
c263766c 3692 fprintf (dump_file, " Register %d now in %d.\n\n",
03acd8f8
BS
3693 i, reg_renumber[i]);
3694 }
3695 }
3696
3697 return something_changed;
7609e720 3698}
32131a9c 3699\f
d754127f 3700/* Find all paradoxical subregs within X and update reg_max_ref_width. */
32131a9c
RK
3701
3702static void
0c20a65f 3703scan_paradoxical_subregs (rtx x)
32131a9c 3704{
b3694847
SS
3705 int i;
3706 const char *fmt;
3707 enum rtx_code code = GET_CODE (x);
32131a9c
RK
3708
3709 switch (code)
3710 {
56f58d3a 3711 case REG:
32131a9c
RK
3712 case CONST_INT:
3713 case CONST:
3714 case SYMBOL_REF:
3715 case LABEL_REF:
3716 case CONST_DOUBLE:
69ef87e2 3717 case CONST_VECTOR: /* shouldn't happen, but just in case. */
32131a9c
RK
3718 case CC0:
3719 case PC:
32131a9c
RK
3720 case USE:
3721 case CLOBBER:
3722 return;
3723
3724 case SUBREG:
f8cfc6aa 3725 if (REG_P (SUBREG_REG (x))
32131a9c
RK
3726 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3727 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3728 = GET_MODE_SIZE (GET_MODE (x));
3729 return;
05d10675 3730
e9a25f70
JL
3731 default:
3732 break;
32131a9c
RK
3733 }
3734
3735 fmt = GET_RTX_FORMAT (code);
3736 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3737 {
3738 if (fmt[i] == 'e')
3739 scan_paradoxical_subregs (XEXP (x, i));
3740 else if (fmt[i] == 'E')
3741 {
b3694847 3742 int j;
1d7254c5 3743 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
32131a9c
RK
3744 scan_paradoxical_subregs (XVECEXP (x, i, j));
3745 }
3746 }
3747}
3748\f
32131a9c
RK
3749/* Reload pseudo-registers into hard regs around each insn as needed.
3750 Additional register load insns are output before the insn that needs it
3751 and perhaps store insns after insns that modify the reloaded pseudo reg.
3752
3753 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3754 which registers are already available in reload registers.
32131a9c
RK
3755 We update these for the reloads that we perform,
3756 as the insns are scanned. */
3757
3758static void
0c20a65f 3759reload_as_needed (int live_known)
32131a9c 3760{
7609e720 3761 struct insn_chain *chain;
553687c9 3762#if defined (AUTO_INC_DEC)
b3694847 3763 int i;
973838fd 3764#endif
32131a9c 3765 rtx x;
32131a9c 3766
703ad42b
KG
3767 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
3768 memset (spill_reg_store, 0, sizeof spill_reg_store);
3769 reg_last_reload_reg = xcalloc (max_regno, sizeof (rtx));
3770 reg_has_output_reload = xmalloc (max_regno);
e6e52be0 3771 CLEAR_HARD_REG_SET (reg_reloaded_valid);
e3e9336f 3772 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
32131a9c 3773
1f3b1e1a 3774 set_initial_elim_offsets ();
32131a9c 3775
7609e720 3776 for (chain = reload_insn_chain; chain; chain = chain->next)
32131a9c 3777 {
0334ef47 3778 rtx prev = 0;
7609e720
BS
3779 rtx insn = chain->insn;
3780 rtx old_next = NEXT_INSN (insn);
32131a9c
RK
3781
3782 /* If we pass a label, copy the offsets from the label information
3783 into the current offsets of each elimination. */
4b4bf941 3784 if (LABEL_P (insn))
1f3b1e1a 3785 set_offsets_for_label (insn);
32131a9c 3786
2c3c49de 3787 else if (INSN_P (insn))
32131a9c 3788 {
449655a6 3789 rtx oldpat = copy_rtx (PATTERN (insn));
32131a9c 3790
2758481d
RS
3791 /* If this is a USE and CLOBBER of a MEM, ensure that any
3792 references to eliminable registers have been removed. */
3793
3794 if ((GET_CODE (PATTERN (insn)) == USE
3795 || GET_CODE (PATTERN (insn)) == CLOBBER)
3c0cb5de 3796 && MEM_P (XEXP (PATTERN (insn), 0)))
2758481d
RS
3797 XEXP (XEXP (PATTERN (insn), 0), 0)
3798 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
29ae5012 3799 GET_MODE (XEXP (PATTERN (insn), 0)),
1914f5da 3800 NULL_RTX);
2758481d 3801
32131a9c
RK
3802 /* If we need to do register elimination processing, do so.
3803 This might delete the insn, in which case we are done. */
2b49ee39 3804 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
32131a9c
RK
3805 {
3806 eliminate_regs_in_insn (insn, 1);
4b4bf941 3807 if (NOTE_P (insn))
cb2afeb3
R
3808 {
3809 update_eliminable_offsets ();
3810 continue;
3811 }
32131a9c
RK
3812 }
3813
7609e720
BS
3814 /* If need_elim is nonzero but need_reload is zero, one might think
3815 that we could simply set n_reloads to 0. However, find_reloads
3816 could have done some manipulation of the insn (such as swapping
3817 commutative operands), and these manipulations are lost during
3818 the first pass for every insn that needs register elimination.
3819 So the actions of find_reloads must be redone here. */
3820
03acd8f8
BS
3821 if (! chain->need_elim && ! chain->need_reload
3822 && ! chain->need_operand_change)
32131a9c
RK
3823 n_reloads = 0;
3824 /* First find the pseudo regs that must be reloaded for this insn.
3825 This info is returned in the tables reload_... (see reload.h).
3826 Also modify the body of INSN by substituting RELOAD
3827 rtx's for those pseudo regs. */
3828 else
3829 {
961192e1 3830 memset (reg_has_output_reload, 0, max_regno);
32131a9c
RK
3831 CLEAR_HARD_REG_SET (reg_is_output_reload);
3832
3833 find_reloads (insn, 1, spill_indirect_levels, live_known,
3834 spill_reg_order);
3835 }
3836
3837 if (n_reloads > 0)
3838 {
cb2afeb3 3839 rtx next = NEXT_INSN (insn);
3c3eeea6 3840 rtx p;
32131a9c 3841
cb2afeb3
R
3842 prev = PREV_INSN (insn);
3843
32131a9c
RK
3844 /* Now compute which reload regs to reload them into. Perhaps
3845 reusing reload regs from previous insns, or else output
3846 load insns to reload them. Maybe output store insns too.
3847 Record the choices of reload reg in reload_reg_rtx. */
03acd8f8 3848 choose_reload_regs (chain);
32131a9c 3849
05d10675 3850 /* Merge any reloads that we didn't combine for fear of
546b63fb
RK
3851 increasing the number of spill registers needed but now
3852 discover can be safely merged. */
f95182a4
ILT
3853 if (SMALL_REGISTER_CLASSES)
3854 merge_assigned_reloads (insn);
546b63fb 3855
32131a9c
RK
3856 /* Generate the insns to reload operands into or out of
3857 their reload regs. */
e04ca094 3858 emit_reload_insns (chain);
32131a9c
RK
3859
3860 /* Substitute the chosen reload regs from reload_reg_rtx
3861 into the insn's body (or perhaps into the bodies of other
3862 load and store insn that we just made for reloading
3863 and that we moved the structure into). */
f759eb8b 3864 subst_reloads (insn);
3c3eeea6
RK
3865
3866 /* If this was an ASM, make sure that all the reload insns
3867 we have generated are valid. If not, give an error
3868 and delete them. */
3869
3870 if (asm_noperands (PATTERN (insn)) >= 0)
3871 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
2c3c49de 3872 if (p != insn && INSN_P (p)
00dcfe80 3873 && GET_CODE (PATTERN (p)) != USE
3c3eeea6 3874 && (recog_memoized (p) < 0
0eadeb15 3875 || (extract_insn (p), ! constrain_operands (1))))
3c3eeea6
RK
3876 {
3877 error_for_asm (insn,
3878 "`asm' operand requires impossible reload");
ca6c03ca 3879 delete_insn (p);
3c3eeea6 3880 }
32131a9c 3881 }
5d7ef82a
BS
3882
3883 if (num_eliminable && chain->need_elim)
3884 update_eliminable_offsets ();
3885
32131a9c
RK
3886 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3887 is no longer validly lying around to save a future reload.
3888 Note that this does not detect pseudos that were reloaded
3889 for this insn in order to be stored in
3890 (obeying register constraints). That is correct; such reload
3891 registers ARE still valid. */
84832317 3892 note_stores (oldpat, forget_old_reloads_1, NULL);
32131a9c
RK
3893
3894 /* There may have been CLOBBER insns placed after INSN. So scan
3895 between INSN and NEXT and use them to forget old reloads. */
7609e720 3896 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4b4bf941 3897 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
84832317 3898 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
32131a9c
RK
3899
3900#ifdef AUTO_INC_DEC
cb2afeb3
R
3901 /* Likewise for regs altered by auto-increment in this insn.
3902 REG_INC notes have been changed by reloading:
3903 find_reloads_address_1 records substitutions for them,
3904 which have been performed by subst_reloads above. */
3905 for (i = n_reloads - 1; i >= 0; i--)
3906 {
eceef4c9 3907 rtx in_reg = rld[i].in_reg;
cb2afeb3
R
3908 if (in_reg)
3909 {
3910 enum rtx_code code = GET_CODE (in_reg);
3911 /* PRE_INC / PRE_DEC will have the reload register ending up
3912 with the same value as the stack slot, but that doesn't
3913 hold true for POST_INC / POST_DEC. Either we have to
3914 convert the memory access to a true POST_INC / POST_DEC,
3915 or we can't use the reload register for inheritance. */
3916 if ((code == POST_INC || code == POST_DEC)
3917 && TEST_HARD_REG_BIT (reg_reloaded_valid,
eceef4c9 3918 REGNO (rld[i].reg_rtx))
04bbb0c5
JW
3919 /* Make sure it is the inc/dec pseudo, and not
3920 some other (e.g. output operand) pseudo. */
fc555370 3921 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
04bbb0c5 3922 == REGNO (XEXP (in_reg, 0))))
05d10675 3923
cb2afeb3 3924 {
eceef4c9 3925 rtx reload_reg = rld[i].reg_rtx;
cb2afeb3
R
3926 enum machine_mode mode = GET_MODE (reload_reg);
3927 int n = 0;
3928 rtx p;
3929
3930 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
3931 {
3932 /* We really want to ignore REG_INC notes here, so
3933 use PATTERN (p) as argument to reg_set_p . */
3934 if (reg_set_p (reload_reg, PATTERN (p)))
3935 break;
4b983fdc 3936 n = count_occurrences (PATTERN (p), reload_reg, 0);
cb2afeb3
R
3937 if (! n)
3938 continue;
3939 if (n == 1)
f67c2384
JL
3940 {
3941 n = validate_replace_rtx (reload_reg,
2fb00d7f
KH
3942 gen_rtx_fmt_e (code,
3943 mode,
3944 reload_reg),
f67c2384
JL
3945 p);
3946
3947 /* We must also verify that the constraints
3948 are met after the replacement. */
3949 extract_insn (p);
3950 if (n)
3951 n = constrain_operands (1);
3952 else
3953 break;
3954
3955 /* If the constraints were not met, then
3956 undo the replacement. */
3957 if (!n)
3958 {
2fb00d7f
KH
3959 validate_replace_rtx (gen_rtx_fmt_e (code,
3960 mode,
3961 reload_reg),
f67c2384
JL
3962 reload_reg, p);
3963 break;
3964 }
05d10675 3965
f67c2384 3966 }
cb2afeb3
R
3967 break;
3968 }
3969 if (n == 1)
02eb1393
R
3970 {
3971 REG_NOTES (p)
3972 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
3973 REG_NOTES (p));
3974 /* Mark this as having an output reload so that the
3975 REG_INC processing code below won't invalidate
3976 the reload for inheritance. */
3977 SET_HARD_REG_BIT (reg_is_output_reload,
3978 REGNO (reload_reg));
3979 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
3980 }
cb2afeb3 3981 else
1d7254c5 3982 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
84832317 3983 NULL);
cb2afeb3 3984 }
02eb1393
R
3985 else if ((code == PRE_INC || code == PRE_DEC)
3986 && TEST_HARD_REG_BIT (reg_reloaded_valid,
eceef4c9 3987 REGNO (rld[i].reg_rtx))
02eb1393
R
3988 /* Make sure it is the inc/dec pseudo, and not
3989 some other (e.g. output operand) pseudo. */
fc555370 3990 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
02eb1393
R
3991 == REGNO (XEXP (in_reg, 0))))
3992 {
3993 SET_HARD_REG_BIT (reg_is_output_reload,
eceef4c9 3994 REGNO (rld[i].reg_rtx));
02eb1393
R
3995 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
3996 }
cb2afeb3
R
3997 }
3998 }
02eb1393
R
3999 /* If a pseudo that got a hard register is auto-incremented,
4000 we must purge records of copying it into pseudos without
4001 hard registers. */
32131a9c
RK
4002 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4003 if (REG_NOTE_KIND (x) == REG_INC)
4004 {
4005 /* See if this pseudo reg was reloaded in this insn.
4006 If so, its last-reload info is still valid
4007 because it is based on this insn's reload. */
4008 for (i = 0; i < n_reloads; i++)
eceef4c9 4009 if (rld[i].out == XEXP (x, 0))
32131a9c
RK
4010 break;
4011
08fb99fa 4012 if (i == n_reloads)
84832317 4013 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
32131a9c
RK
4014 }
4015#endif
4016 }
4017 /* A reload reg's contents are unknown after a label. */
4b4bf941 4018 if (LABEL_P (insn))
e6e52be0 4019 CLEAR_HARD_REG_SET (reg_reloaded_valid);
32131a9c
RK
4020
4021 /* Don't assume a reload reg is still good after a call insn
e3e9336f
DJ
4022 if it is a call-used reg, or if it contains a value that will
4023 be partially clobbered by the call. */
4b4bf941 4024 else if (CALL_P (insn))
e3e9336f 4025 {
8e2e89f7 4026 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
e3e9336f
DJ
4027 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4028 }
32131a9c 4029 }
ff154f78
MM
4030
4031 /* Clean up. */
4032 free (reg_last_reload_reg);
4033 free (reg_has_output_reload);
32131a9c
RK
4034}
4035
4036/* Discard all record of any value reloaded from X,
4037 or reloaded in X from someplace else;
4038 unless X is an output reload reg of the current insn.
4039
4040 X may be a hard reg (the reload reg)
4041 or it may be a pseudo reg that was reloaded from. */
4042
4043static void
0c20a65f
AJ
4044forget_old_reloads_1 (rtx x, rtx ignored ATTRIBUTE_UNUSED,
4045 void *data ATTRIBUTE_UNUSED)
32131a9c 4046{
770ae6cc
RK
4047 unsigned int regno;
4048 unsigned int nr;
0a2e51a9 4049
ddef6bc7
JJ
4050 /* note_stores does give us subregs of hard regs,
4051 subreg_regno_offset will abort if it is not a hard reg. */
0a2e51a9
RS
4052 while (GET_CODE (x) == SUBREG)
4053 {
fefac463
AH
4054 /* We ignore the subreg offset when calculating the regno,
4055 because we are using the entire underlying hard register
4056 below. */
0a2e51a9
RS
4057 x = SUBREG_REG (x);
4058 }
32131a9c 4059
f8cfc6aa 4060 if (!REG_P (x))
32131a9c
RK
4061 return;
4062
fefac463 4063 regno = REGNO (x);
32131a9c
RK
4064
4065 if (regno >= FIRST_PSEUDO_REGISTER)
4066 nr = 1;
4067 else
4068 {
770ae6cc
RK
4069 unsigned int i;
4070
66fd46b6 4071 nr = hard_regno_nregs[regno][GET_MODE (x)];
32131a9c
RK
4072 /* Storing into a spilled-reg invalidates its contents.
4073 This can happen if a block-local pseudo is allocated to that reg
4074 and it wasn't spilled because this block's total need is 0.
4075 Then some insn might have an optional reload and use this reg. */
4076 for (i = 0; i < nr; i++)
e6e52be0
R
4077 /* But don't do this if the reg actually serves as an output
4078 reload reg in the current instruction. */
4079 if (n_reloads == 0
4080 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
5d77a50c
BS
4081 {
4082 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
e3e9336f 4083 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, regno + i);
5d77a50c
BS
4084 spill_reg_store[regno + i] = 0;
4085 }
32131a9c
RK
4086 }
4087
4088 /* Since value of X has changed,
4089 forget any value previously copied from it. */
4090
4091 while (nr-- > 0)
4092 /* But don't forget a copy if this is the output reload
4093 that establishes the copy's validity. */
4094 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4095 reg_last_reload_reg[regno + nr] = 0;
4096}
4097\f
32131a9c
RK
4098/* The following HARD_REG_SETs indicate when each hard register is
4099 used for a reload of various parts of the current insn. */
4100
9e3a9cf2
BS
4101/* If reg is unavailable for all reloads. */
4102static HARD_REG_SET reload_reg_unavailable;
32131a9c
RK
4103/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4104static HARD_REG_SET reload_reg_used;
546b63fb
RK
4105/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4106static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
47c8cf91
ILT
4107/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4108static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
4109/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4110static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
47c8cf91
ILT
4111/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4112static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
4113/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4114static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4115/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4116static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
4117/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4118static HARD_REG_SET reload_reg_used_in_op_addr;
893bc853
RK
4119/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4120static HARD_REG_SET reload_reg_used_in_op_addr_reload;
546b63fb
RK
4121/* If reg is in use for a RELOAD_FOR_INSN reload. */
4122static HARD_REG_SET reload_reg_used_in_insn;
4123/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4124static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
4125
4126/* If reg is in use as a reload reg for any sort of reload. */
4127static HARD_REG_SET reload_reg_used_at_all;
4128
be7ae2a4
RK
4129/* If reg is use as an inherited reload. We just mark the first register
4130 in the group. */
4131static HARD_REG_SET reload_reg_used_for_inherit;
4132
f1db3576
JL
4133/* Records which hard regs are used in any way, either as explicit use or
4134 by being allocated to a pseudo during any point of the current insn. */
4135static HARD_REG_SET reg_used_in_insn;
297927a8 4136
546b63fb
RK
4137/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4138 TYPE. MODE is used to indicate how many consecutive regs are
4139 actually used. */
32131a9c
RK
4140
4141static void
0c20a65f
AJ
4142mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
4143 enum machine_mode mode)
32131a9c 4144{
66fd46b6 4145 unsigned int nregs = hard_regno_nregs[regno][mode];
770ae6cc 4146 unsigned int i;
32131a9c
RK
4147
4148 for (i = regno; i < nregs + regno; i++)
4149 {
546b63fb 4150 switch (type)
32131a9c
RK
4151 {
4152 case RELOAD_OTHER:
4153 SET_HARD_REG_BIT (reload_reg_used, i);
4154 break;
4155
546b63fb
RK
4156 case RELOAD_FOR_INPUT_ADDRESS:
4157 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4158 break;
4159
47c8cf91
ILT
4160 case RELOAD_FOR_INPADDR_ADDRESS:
4161 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4162 break;
4163
546b63fb
RK
4164 case RELOAD_FOR_OUTPUT_ADDRESS:
4165 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4166 break;
4167
47c8cf91
ILT
4168 case RELOAD_FOR_OUTADDR_ADDRESS:
4169 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4170 break;
4171
32131a9c
RK
4172 case RELOAD_FOR_OPERAND_ADDRESS:
4173 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4174 break;
4175
893bc853
RK
4176 case RELOAD_FOR_OPADDR_ADDR:
4177 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4178 break;
4179
546b63fb
RK
4180 case RELOAD_FOR_OTHER_ADDRESS:
4181 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4182 break;
4183
32131a9c 4184 case RELOAD_FOR_INPUT:
546b63fb 4185 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4186 break;
4187
4188 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4189 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4190 break;
4191
4192 case RELOAD_FOR_INSN:
4193 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4194 break;
4195 }
4196
4197 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4198 }
4199}
4200
be7ae2a4
RK
4201/* Similarly, but show REGNO is no longer in use for a reload. */
4202
4203static void
0c20a65f
AJ
4204clear_reload_reg_in_use (unsigned int regno, int opnum,
4205 enum reload_type type, enum machine_mode mode)
be7ae2a4 4206{
66fd46b6 4207 unsigned int nregs = hard_regno_nregs[regno][mode];
770ae6cc 4208 unsigned int start_regno, end_regno, r;
be7ae2a4 4209 int i;
cb2afeb3
R
4210 /* A complication is that for some reload types, inheritance might
4211 allow multiple reloads of the same types to share a reload register.
4212 We set check_opnum if we have to check only reloads with the same
4213 operand number, and check_any if we have to check all reloads. */
4214 int check_opnum = 0;
4215 int check_any = 0;
4216 HARD_REG_SET *used_in_set;
be7ae2a4 4217
cb2afeb3 4218 switch (type)
be7ae2a4 4219 {
cb2afeb3
R
4220 case RELOAD_OTHER:
4221 used_in_set = &reload_reg_used;
4222 break;
be7ae2a4 4223
cb2afeb3
R
4224 case RELOAD_FOR_INPUT_ADDRESS:
4225 used_in_set = &reload_reg_used_in_input_addr[opnum];
4226 break;
be7ae2a4 4227
cb2afeb3
R
4228 case RELOAD_FOR_INPADDR_ADDRESS:
4229 check_opnum = 1;
4230 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4231 break;
47c8cf91 4232
cb2afeb3
R
4233 case RELOAD_FOR_OUTPUT_ADDRESS:
4234 used_in_set = &reload_reg_used_in_output_addr[opnum];
4235 break;
be7ae2a4 4236
cb2afeb3
R
4237 case RELOAD_FOR_OUTADDR_ADDRESS:
4238 check_opnum = 1;
4239 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4240 break;
47c8cf91 4241
cb2afeb3
R
4242 case RELOAD_FOR_OPERAND_ADDRESS:
4243 used_in_set = &reload_reg_used_in_op_addr;
4244 break;
be7ae2a4 4245
cb2afeb3
R
4246 case RELOAD_FOR_OPADDR_ADDR:
4247 check_any = 1;
4248 used_in_set = &reload_reg_used_in_op_addr_reload;
4249 break;
893bc853 4250
cb2afeb3
R
4251 case RELOAD_FOR_OTHER_ADDRESS:
4252 used_in_set = &reload_reg_used_in_other_addr;
4253 check_any = 1;
4254 break;
be7ae2a4 4255
cb2afeb3
R
4256 case RELOAD_FOR_INPUT:
4257 used_in_set = &reload_reg_used_in_input[opnum];
4258 break;
be7ae2a4 4259
cb2afeb3
R
4260 case RELOAD_FOR_OUTPUT:
4261 used_in_set = &reload_reg_used_in_output[opnum];
4262 break;
be7ae2a4 4263
cb2afeb3
R
4264 case RELOAD_FOR_INSN:
4265 used_in_set = &reload_reg_used_in_insn;
4266 break;
4267 default:
4268 abort ();
4269 }
4270 /* We resolve conflicts with remaining reloads of the same type by
68e82b83 4271 excluding the intervals of reload registers by them from the
cb2afeb3
R
4272 interval of freed reload registers. Since we only keep track of
4273 one set of interval bounds, we might have to exclude somewhat
3e92902c 4274 more than what would be necessary if we used a HARD_REG_SET here.
cb2afeb3
R
4275 But this should only happen very infrequently, so there should
4276 be no reason to worry about it. */
05d10675 4277
cb2afeb3
R
4278 start_regno = regno;
4279 end_regno = regno + nregs;
4280 if (check_opnum || check_any)
4281 {
4282 for (i = n_reloads - 1; i >= 0; i--)
4283 {
eceef4c9
BS
4284 if (rld[i].when_needed == type
4285 && (check_any || rld[i].opnum == opnum)
4286 && rld[i].reg_rtx)
cb2afeb3 4287 {
770ae6cc
RK
4288 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4289 unsigned int conflict_end
cb2afeb3 4290 = (conflict_start
66fd46b6 4291 + hard_regno_nregs[conflict_start][rld[i].mode]);
cb2afeb3
R
4292
4293 /* If there is an overlap with the first to-be-freed register,
4294 adjust the interval start. */
4295 if (conflict_start <= start_regno && conflict_end > start_regno)
4296 start_regno = conflict_end;
4297 /* Otherwise, if there is a conflict with one of the other
4298 to-be-freed registers, adjust the interval end. */
4299 if (conflict_start > start_regno && conflict_start < end_regno)
4300 end_regno = conflict_start;
4301 }
be7ae2a4
RK
4302 }
4303 }
770ae6cc
RK
4304
4305 for (r = start_regno; r < end_regno; r++)
4306 CLEAR_HARD_REG_BIT (*used_in_set, r);
be7ae2a4
RK
4307}
4308
32131a9c 4309/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4310 specified by OPNUM and TYPE. */
32131a9c
RK
4311
4312static int
0c20a65f 4313reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
32131a9c 4314{
546b63fb
RK
4315 int i;
4316
2edc8d65 4317 /* In use for a RELOAD_OTHER means it's not available for anything. */
9e3a9cf2
BS
4318 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4319 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
32131a9c 4320 return 0;
546b63fb
RK
4321
4322 switch (type)
32131a9c
RK
4323 {
4324 case RELOAD_OTHER:
2edc8d65
RK
4325 /* In use for anything means we can't use it for RELOAD_OTHER. */
4326 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
224f1d71 4327 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
808ededc 4328 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
224f1d71
RK
4329 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4330 return 0;
4331
4332 for (i = 0; i < reload_n_operands; i++)
4333 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4334 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
224f1d71 4335 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4336 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
224f1d71
RK
4337 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4338 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4339 return 0;
4340
4341 return 1;
32131a9c 4342
32131a9c 4343 case RELOAD_FOR_INPUT:
546b63fb
RK
4344 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4345 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4346 return 0;
4347
893bc853
RK
4348 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4349 return 0;
4350
546b63fb
RK
4351 /* If it is used for some other input, can't use it. */
4352 for (i = 0; i < reload_n_operands; i++)
4353 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4354 return 0;
4355
4356 /* If it is used in a later operand's address, can't use it. */
4357 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
4358 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4359 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
4360 return 0;
4361
4362 return 1;
4363
4364 case RELOAD_FOR_INPUT_ADDRESS:
4365 /* Can't use a register if it is used for an input address for this
4366 operand or used as an input in an earlier one. */
47c8cf91
ILT
4367 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4368 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4369 return 0;
4370
4371 for (i = 0; i < opnum; i++)
4372 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4373 return 0;
4374
4375 return 1;
4376
4377 case RELOAD_FOR_INPADDR_ADDRESS:
4378 /* Can't use a register if it is used for an input address
05d10675
BS
4379 for this operand or used as an input in an earlier
4380 one. */
47c8cf91 4381 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
546b63fb
RK
4382 return 0;
4383
4384 for (i = 0; i < opnum; i++)
4385 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4386 return 0;
4387
4388 return 1;
4389
4390 case RELOAD_FOR_OUTPUT_ADDRESS:
4391 /* Can't use a register if it is used for an output address for this
d1d18b46
DJ
4392 operand or used as an output in this or a later operand. Note
4393 that multiple output operands are emitted in reverse order, so
4394 the conflicting ones are those with lower indices. */
546b63fb
RK
4395 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4396 return 0;
4397
d1d18b46 4398 for (i = 0; i <= opnum; i++)
546b63fb
RK
4399 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4400 return 0;
4401
4402 return 1;
4403
47c8cf91
ILT
4404 case RELOAD_FOR_OUTADDR_ADDRESS:
4405 /* Can't use a register if it is used for an output address
05d10675 4406 for this operand or used as an output in this or a
d1d18b46
DJ
4407 later operand. Note that multiple output operands are
4408 emitted in reverse order, so the conflicting ones are
4409 those with lower indices. */
47c8cf91
ILT
4410 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4411 return 0;
4412
d1d18b46 4413 for (i = 0; i <= opnum; i++)
47c8cf91
ILT
4414 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4415 return 0;
4416
4417 return 1;
4418
32131a9c 4419 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4420 for (i = 0; i < reload_n_operands; i++)
4421 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4422 return 0;
4423
4424 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4425 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4426
893bc853
RK
4427 case RELOAD_FOR_OPADDR_ADDR:
4428 for (i = 0; i < reload_n_operands; i++)
05d10675
BS
4429 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4430 return 0;
893bc853 4431
a94ce333 4432 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
893bc853 4433
32131a9c 4434 case RELOAD_FOR_OUTPUT:
546b63fb 4435 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
d1d18b46
DJ
4436 outputs, or an operand address for this or an earlier output.
4437 Note that multiple output operands are emitted in reverse order,
4438 so the conflicting ones are those with higher indices. */
546b63fb
RK
4439 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4440 return 0;
4441
4442 for (i = 0; i < reload_n_operands; i++)
4443 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4444 return 0;
4445
d1d18b46 4446 for (i = opnum; i < reload_n_operands; i++)
47c8cf91
ILT
4447 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4448 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
546b63fb
RK
4449 return 0;
4450
4451 return 1;
4452
4453 case RELOAD_FOR_INSN:
4454 for (i = 0; i < reload_n_operands; i++)
4455 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4456 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4457 return 0;
4458
4459 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4460 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4461
4462 case RELOAD_FOR_OTHER_ADDRESS:
4463 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4464 }
4465 abort ();
4466}
4467
32131a9c 4468/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4469 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4470 is still available in REGNO at the end of the insn.
4471
4472 We can assume that the reload reg was already tested for availability
4473 at the time it is needed, and we should not check this again,
4474 in case the reg has already been marked in use. */
4475
4476static int
0c20a65f 4477reload_reg_reaches_end_p (unsigned int regno, int opnum, enum reload_type type)
32131a9c 4478{
546b63fb
RK
4479 int i;
4480
4481 switch (type)
32131a9c
RK
4482 {
4483 case RELOAD_OTHER:
4484 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4485 its value must reach the end. */
4486 return 1;
4487
4488 /* If this use is for part of the insn,
05d10675 4489 its value reaches if no subsequent part uses the same register.
546b63fb
RK
4490 Just like the above function, don't try to do this with lots
4491 of fallthroughs. */
4492
4493 case RELOAD_FOR_OTHER_ADDRESS:
4494 /* Here we check for everything else, since these don't conflict
4495 with anything else and everything comes later. */
4496
4497 for (i = 0; i < reload_n_operands; i++)
4498 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4499 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4500 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4501 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4502 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
4503 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4504 return 0;
4505
4506 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
808ededc 4507 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
546b63fb
RK
4508 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4509 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4510
4511 case RELOAD_FOR_INPUT_ADDRESS:
47c8cf91 4512 case RELOAD_FOR_INPADDR_ADDRESS:
546b63fb
RK
4513 /* Similar, except that we check only for this and subsequent inputs
4514 and the address of only subsequent inputs and we do not need
4515 to check for RELOAD_OTHER objects since they are known not to
4516 conflict. */
4517
4518 for (i = opnum; i < reload_n_operands; i++)
4519 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4520 return 0;
4521
4522 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
4523 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4524 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
4525 return 0;
4526
4527 for (i = 0; i < reload_n_operands; i++)
4528 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4529 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4530 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4531 return 0;
4532
893bc853
RK
4533 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4534 return 0;
4535
2af88768
GK
4536 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4537 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4538 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
546b63fb 4539
32131a9c 4540 case RELOAD_FOR_INPUT:
546b63fb 4541 /* Similar to input address, except we start at the next operand for
05d10675 4542 both input and input address and we do not check for
546b63fb
RK
4543 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4544 would conflict. */
4545
4546 for (i = opnum + 1; i < reload_n_operands; i++)
4547 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4548 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
4549 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4550 return 0;
4551
0f41302f 4552 /* ... fall through ... */
546b63fb 4553
32131a9c 4554 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4555 /* Check outputs and their addresses. */
4556
4557 for (i = 0; i < reload_n_operands; i++)
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4559 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4560 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4561 return 0;
4562
2af88768 4563 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
546b63fb 4564
893bc853
RK
4565 case RELOAD_FOR_OPADDR_ADDR:
4566 for (i = 0; i < reload_n_operands; i++)
4567 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4568 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
893bc853
RK
4569 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4570 return 0;
4571
2af88768
GK
4572 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4573 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4574 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
893bc853 4575
546b63fb 4576 case RELOAD_FOR_INSN:
893bc853 4577 /* These conflict with other outputs with RELOAD_OTHER. So
546b63fb
RK
4578 we need only check for output addresses. */
4579
d1d18b46 4580 opnum = reload_n_operands;
546b63fb 4581
0f41302f 4582 /* ... fall through ... */
546b63fb 4583
32131a9c 4584 case RELOAD_FOR_OUTPUT:
546b63fb 4585 case RELOAD_FOR_OUTPUT_ADDRESS:
47c8cf91 4586 case RELOAD_FOR_OUTADDR_ADDRESS:
546b63fb 4587 /* We already know these can't conflict with a later output. So the
d1d18b46
DJ
4588 only thing to check are later output addresses.
4589 Note that multiple output operands are emitted in reverse order,
4590 so the conflicting ones are those with lower indices. */
4591 for (i = 0; i < opnum; i++)
47c8cf91
ILT
4592 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4593 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
546b63fb
RK
4594 return 0;
4595
32131a9c
RK
4596 return 1;
4597 }
546b63fb 4598
32131a9c
RK
4599 abort ();
4600}
4601\f
351aa1c1
RK
4602/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4603 Return 0 otherwise.
4604
4605 This function uses the same algorithm as reload_reg_free_p above. */
4606
f5963e61 4607int
0c20a65f 4608reloads_conflict (int r1, int r2)
351aa1c1 4609{
eceef4c9
BS
4610 enum reload_type r1_type = rld[r1].when_needed;
4611 enum reload_type r2_type = rld[r2].when_needed;
4612 int r1_opnum = rld[r1].opnum;
4613 int r2_opnum = rld[r2].opnum;
351aa1c1 4614
2edc8d65
RK
4615 /* RELOAD_OTHER conflicts with everything. */
4616 if (r2_type == RELOAD_OTHER)
351aa1c1
RK
4617 return 1;
4618
4619 /* Otherwise, check conflicts differently for each type. */
4620
4621 switch (r1_type)
4622 {
4623 case RELOAD_FOR_INPUT:
05d10675 4624 return (r2_type == RELOAD_FOR_INSN
351aa1c1 4625 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
893bc853 4626 || r2_type == RELOAD_FOR_OPADDR_ADDR
351aa1c1 4627 || r2_type == RELOAD_FOR_INPUT
47c8cf91
ILT
4628 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4629 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4630 && r2_opnum > r1_opnum));
351aa1c1
RK
4631
4632 case RELOAD_FOR_INPUT_ADDRESS:
4633 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4634 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4635
47c8cf91
ILT
4636 case RELOAD_FOR_INPADDR_ADDRESS:
4637 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4638 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4639
351aa1c1
RK
4640 case RELOAD_FOR_OUTPUT_ADDRESS:
4641 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
d1d18b46 4642 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
351aa1c1 4643
47c8cf91
ILT
4644 case RELOAD_FOR_OUTADDR_ADDRESS:
4645 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
d1d18b46 4646 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
47c8cf91 4647
351aa1c1
RK
4648 case RELOAD_FOR_OPERAND_ADDRESS:
4649 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
a94ce333 4650 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
351aa1c1 4651
893bc853 4652 case RELOAD_FOR_OPADDR_ADDR:
05d10675 4653 return (r2_type == RELOAD_FOR_INPUT
a94ce333 4654 || r2_type == RELOAD_FOR_OPADDR_ADDR);
893bc853 4655
351aa1c1
RK
4656 case RELOAD_FOR_OUTPUT:
4657 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
47c8cf91
ILT
4658 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4659 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
d1d18b46 4660 && r2_opnum >= r1_opnum));
351aa1c1
RK
4661
4662 case RELOAD_FOR_INSN:
4663 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4664 || r2_type == RELOAD_FOR_INSN
4665 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4666
4667 case RELOAD_FOR_OTHER_ADDRESS:
4668 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4669
adab4fc5 4670 case RELOAD_OTHER:
2edc8d65 4671 return 1;
adab4fc5 4672
351aa1c1
RK
4673 default:
4674 abort ();
4675 }
4676}
4677\f
32131a9c
RK
4678/* Indexed by reload number, 1 if incoming value
4679 inherited from previous insns. */
4680char reload_inherited[MAX_RELOADS];
4681
4682/* For an inherited reload, this is the insn the reload was inherited from,
4683 if we know it. Otherwise, this is 0. */
4684rtx reload_inheritance_insn[MAX_RELOADS];
4685
40f03658 4686/* If nonzero, this is a place to get the value of the reload,
32131a9c
RK
4687 rather than using reload_in. */
4688rtx reload_override_in[MAX_RELOADS];
4689
e6e52be0
R
4690/* For each reload, the hard register number of the register used,
4691 or -1 if we did not need a register for this reload. */
32131a9c
RK
4692int reload_spill_index[MAX_RELOADS];
4693
304a22dd
R
4694/* Subroutine of free_for_value_p, used to check a single register.
4695 START_REGNO is the starting regno of the full reload register
4696 (possibly comprising multiple hard registers) that we are considering. */
f5470689 4697
6e684430 4698static int
0c20a65f
AJ
4699reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
4700 enum reload_type type, rtx value, rtx out,
4701 int reloadnum, int ignore_address_reloads)
6e684430
R
4702{
4703 int time1;
09a308fe
R
4704 /* Set if we see an input reload that must not share its reload register
4705 with any new earlyclobber, but might otherwise share the reload
4706 register with an output or input-output reload. */
4707 int check_earlyclobber = 0;
6e684430 4708 int i;
dfe96118
R
4709 int copy = 0;
4710
9e3a9cf2 4711 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
dc8842bf
AH
4712 return 0;
4713
dfe96118
R
4714 if (out == const0_rtx)
4715 {
4716 copy = 1;
4717 out = NULL_RTX;
4718 }
6e684430
R
4719
4720 /* We use some pseudo 'time' value to check if the lifetimes of the
4721 new register use would overlap with the one of a previous reload
4722 that is not read-only or uses a different value.
4723 The 'time' used doesn't have to be linear in any shape or form, just
4724 monotonic.
4725 Some reload types use different 'buckets' for each operand.
4726 So there are MAX_RECOG_OPERANDS different time values for each
cecbf6e2
R
4727 such reload type.
4728 We compute TIME1 as the time when the register for the prospective
4729 new reload ceases to be live, and TIME2 for each existing
4730 reload as the time when that the reload register of that reload
4731 becomes live.
4732 Where there is little to be gained by exact lifetime calculations,
4733 we just make conservative assumptions, i.e. a longer lifetime;
4734 this is done in the 'default:' cases. */
6e684430
R
4735 switch (type)
4736 {
4737 case RELOAD_FOR_OTHER_ADDRESS:
203588e7 4738 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
c2b4b171 4739 time1 = copy ? 0 : 1;
6e684430 4740 break;
dfe96118
R
4741 case RELOAD_OTHER:
4742 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
4743 break;
05d10675
BS
4744 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4745 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
4746 respectively, to the time values for these, we get distinct time
4747 values. To get distinct time values for each operand, we have to
4748 multiply opnum by at least three. We round that up to four because
4749 multiply by four is often cheaper. */
6e684430 4750 case RELOAD_FOR_INPADDR_ADDRESS:
dfe96118 4751 time1 = opnum * 4 + 2;
6e684430
R
4752 break;
4753 case RELOAD_FOR_INPUT_ADDRESS:
dfe96118
R
4754 time1 = opnum * 4 + 3;
4755 break;
4756 case RELOAD_FOR_INPUT:
4757 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4758 executes (inclusive). */
4759 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
6e684430 4760 break;
cb2afeb3 4761 case RELOAD_FOR_OPADDR_ADDR:
05d10675
BS
4762 /* opnum * 4 + 4
4763 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
cb2afeb3
R
4764 time1 = MAX_RECOG_OPERANDS * 4 + 1;
4765 break;
4766 case RELOAD_FOR_OPERAND_ADDRESS:
4767 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4768 is executed. */
dfe96118
R
4769 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
4770 break;
4771 case RELOAD_FOR_OUTADDR_ADDRESS:
4772 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
6e684430 4773 break;
6e684430 4774 case RELOAD_FOR_OUTPUT_ADDRESS:
dfe96118 4775 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
6e684430
R
4776 break;
4777 default:
dfe96118 4778 time1 = MAX_RECOG_OPERANDS * 5 + 5;
6e684430
R
4779 }
4780
4781 for (i = 0; i < n_reloads; i++)
4782 {
eceef4c9 4783 rtx reg = rld[i].reg_rtx;
f8cfc6aa 4784 if (reg && REG_P (reg)
6e684430 4785 && ((unsigned) regno - true_regnum (reg)
66fd46b6 4786 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
f5470689 4787 && i != reloadnum)
6e684430 4788 {
304a22dd
R
4789 rtx other_input = rld[i].in;
4790
4791 /* If the other reload loads the same input value, that
4792 will not cause a conflict only if it's loading it into
4793 the same register. */
4794 if (true_regnum (reg) != start_regno)
4795 other_input = NULL_RTX;
4796 if (! other_input || ! rtx_equal_p (other_input, value)
eceef4c9 4797 || rld[i].out || out)
6e684430 4798 {
09a308fe 4799 int time2;
eceef4c9 4800 switch (rld[i].when_needed)
f5470689
R
4801 {
4802 case RELOAD_FOR_OTHER_ADDRESS:
4803 time2 = 0;
4804 break;
4805 case RELOAD_FOR_INPADDR_ADDRESS:
cb2afeb3
R
4806 /* find_reloads makes sure that a
4807 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4808 by at most one - the first -
4809 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
4810 address reload is inherited, the address address reload
4811 goes away, so we can ignore this conflict. */
dfe96118
R
4812 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
4813 && ignore_address_reloads
4814 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
4815 Then the address address is still needed to store
4816 back the new address. */
eceef4c9 4817 && ! rld[reloadnum].out)
cb2afeb3 4818 continue;
dfe96118
R
4819 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
4820 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
4821 reloads go away. */
eceef4c9 4822 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
dfe96118
R
4823 && ignore_address_reloads
4824 /* Unless we are reloading an auto_inc expression. */
eceef4c9 4825 && ! rld[reloadnum].out)
dfe96118 4826 continue;
eceef4c9 4827 time2 = rld[i].opnum * 4 + 2;
f5470689
R
4828 break;
4829 case RELOAD_FOR_INPUT_ADDRESS:
eceef4c9 4830 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
dfe96118 4831 && ignore_address_reloads
eceef4c9 4832 && ! rld[reloadnum].out)
dfe96118 4833 continue;
eceef4c9 4834 time2 = rld[i].opnum * 4 + 3;
f5470689
R
4835 break;
4836 case RELOAD_FOR_INPUT:
eceef4c9 4837 time2 = rld[i].opnum * 4 + 4;
09a308fe 4838 check_earlyclobber = 1;
f5470689 4839 break;
eceef4c9 4840 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
05d10675 4841 == MAX_RECOG_OPERAND * 4 */
cb2afeb3 4842 case RELOAD_FOR_OPADDR_ADDR:
dfe96118
R
4843 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
4844 && ignore_address_reloads
eceef4c9 4845 && ! rld[reloadnum].out)
cb2afeb3 4846 continue;
dfe96118 4847 time2 = MAX_RECOG_OPERANDS * 4 + 1;
cb2afeb3
R
4848 break;
4849 case RELOAD_FOR_OPERAND_ADDRESS:
dfe96118 4850 time2 = MAX_RECOG_OPERANDS * 4 + 2;
09a308fe 4851 check_earlyclobber = 1;
dfe96118
R
4852 break;
4853 case RELOAD_FOR_INSN:
4854 time2 = MAX_RECOG_OPERANDS * 4 + 3;
cb2afeb3 4855 break;
f5470689 4856 case RELOAD_FOR_OUTPUT:
05d10675
BS
4857 /* All RELOAD_FOR_OUTPUT reloads become live just after the
4858 instruction is executed. */
dfe96118 4859 time2 = MAX_RECOG_OPERANDS * 4 + 4;
f5470689 4860 break;
05d10675
BS
4861 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
4862 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
4863 value. */
cb2afeb3 4864 case RELOAD_FOR_OUTADDR_ADDRESS:
dfe96118
R
4865 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
4866 && ignore_address_reloads
eceef4c9 4867 && ! rld[reloadnum].out)
cb2afeb3 4868 continue;
eceef4c9 4869 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
dfe96118 4870 break;
f5470689 4871 case RELOAD_FOR_OUTPUT_ADDRESS:
eceef4c9 4872 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
f5470689
R
4873 break;
4874 case RELOAD_OTHER:
dfe96118
R
4875 /* If there is no conflict in the input part, handle this
4876 like an output reload. */
304a22dd 4877 if (! rld[i].in || rtx_equal_p (other_input, value))
f5470689 4878 {
dfe96118 4879 time2 = MAX_RECOG_OPERANDS * 4 + 4;
57850c85 4880 /* Earlyclobbered outputs must conflict with inputs. */
09a308fe
R
4881 if (earlyclobber_operand_p (rld[i].out))
4882 time2 = MAX_RECOG_OPERANDS * 4 + 3;
1d7254c5 4883
f5470689
R
4884 break;
4885 }
dfe96118
R
4886 time2 = 1;
4887 /* RELOAD_OTHER might be live beyond instruction execution,
4888 but this is not obvious when we set time2 = 1. So check
4889 here if there might be a problem with the new reload
4890 clobbering the register used by the RELOAD_OTHER. */
4891 if (out)
4892 return 0;
4893 break;
f5470689 4894 default:
dfe96118 4895 return 0;
f5470689 4896 }
25963977 4897 if ((time1 >= time2
eceef4c9 4898 && (! rld[i].in || rld[i].out
304a22dd 4899 || ! rtx_equal_p (other_input, value)))
eceef4c9 4900 || (out && rld[reloadnum].out_reg
701d55e8 4901 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
f5470689 4902 return 0;
6e684430 4903 }
6e684430
R
4904 }
4905 }
09a308fe
R
4906
4907 /* Earlyclobbered outputs must conflict with inputs. */
4908 if (check_earlyclobber && out && earlyclobber_operand_p (out))
4909 return 0;
4910
6e684430
R
4911 return 1;
4912}
4913
c02cad8f
BS
4914/* Return 1 if the value in reload reg REGNO, as used by a reload
4915 needed for the part of the insn specified by OPNUM and TYPE,
4916 may be used to load VALUE into it.
4917
4918 MODE is the mode in which the register is used, this is needed to
4919 determine how many hard regs to test.
4920
4921 Other read-only reloads with the same value do not conflict
40f03658 4922 unless OUT is nonzero and these other reloads have to live while
c02cad8f
BS
4923 output reloads live.
4924 If OUT is CONST0_RTX, this is a special case: it means that the
4925 test should not be for using register REGNO as reload register, but
4926 for copying from register REGNO into the reload register.
4927
4928 RELOADNUM is the number of the reload we want to load this value for;
4929 a reload does not conflict with itself.
4930
4931 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
4932 reloads that load an address for the very reload we are considering.
4933
4934 The caller has to make sure that there is no conflict with the return
4935 register. */
4936
4937static int
0c20a65f
AJ
4938free_for_value_p (int regno, enum machine_mode mode, int opnum,
4939 enum reload_type type, rtx value, rtx out, int reloadnum,
4940 int ignore_address_reloads)
c02cad8f 4941{
66fd46b6 4942 int nregs = hard_regno_nregs[regno][mode];
c02cad8f 4943 while (nregs-- > 0)
304a22dd
R
4944 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
4945 value, out, reloadnum,
4946 ignore_address_reloads))
c02cad8f
BS
4947 return 0;
4948 return 1;
4949}
4950
86caf04d
PB
4951/* Return nonzero if the rtx X is invariant over the current function. */
4952/* ??? Actually, the places where we use this expect exactly what
4953 * is tested here, and not everything that is function invariant. In
4954 * particular, the frame pointer and arg pointer are special cased;
4955 * pic_offset_table_rtx is not, and this will cause aborts when we
4956 * go to spill these things to memory. */
4957
4958static int
4959function_invariant_p (rtx x)
4960{
4961 if (CONSTANT_P (x))
4962 return 1;
4963 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4964 return 1;
4965 if (GET_CODE (x) == PLUS
4966 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
4967 && CONSTANT_P (XEXP (x, 1)))
4968 return 1;
4969 return 0;
4970}
4971
ff6534ad
BS
4972/* Determine whether the reload reg X overlaps any rtx'es used for
4973 overriding inheritance. Return nonzero if so. */
4974
4975static int
0c20a65f 4976conflicts_with_override (rtx x)
ff6534ad
BS
4977{
4978 int i;
4979 for (i = 0; i < n_reloads; i++)
4980 if (reload_override_in[i]
4981 && reg_overlap_mentioned_p (x, reload_override_in[i]))
4982 return 1;
4983 return 0;
4984}
4985\f
67e61fe7
BS
4986/* Give an error message saying we failed to find a reload for INSN,
4987 and clear out reload R. */
4988static void
0c20a65f 4989failed_reload (rtx insn, int r)
67e61fe7
BS
4990{
4991 if (asm_noperands (PATTERN (insn)) < 0)
4992 /* It's the compiler's fault. */
1f978f5f 4993 fatal_insn ("could not find a spill register", insn);
67e61fe7
BS
4994
4995 /* It's the user's fault; the operand's mode and constraint
4996 don't match. Disable this reload so we don't crash in final. */
4997 error_for_asm (insn,
4998 "`asm' operand constraint incompatible with operand size");
4999 rld[r].in = 0;
5000 rld[r].out = 0;
5001 rld[r].reg_rtx = 0;
5002 rld[r].optional = 1;
5003 rld[r].secondary_p = 1;
5004}
5005
5006/* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5007 for reload R. If it's valid, get an rtx for it. Return nonzero if
5008 successful. */
5009static int
0c20a65f 5010set_reload_reg (int i, int r)
67e61fe7
BS
5011{
5012 int regno;
5013 rtx reg = spill_reg_rtx[i];
5014
5015 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5016 spill_reg_rtx[i] = reg
5017 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5018
5019 regno = true_regnum (reg);
5020
5021 /* Detect when the reload reg can't hold the reload mode.
5022 This used to be one `if', but Sequent compiler can't handle that. */
5023 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5024 {
5025 enum machine_mode test_mode = VOIDmode;
5026 if (rld[r].in)
5027 test_mode = GET_MODE (rld[r].in);
5028 /* If rld[r].in has VOIDmode, it means we will load it
5029 in whatever mode the reload reg has: to wit, rld[r].mode.
5030 We have already tested that for validity. */
5031 /* Aside from that, we need to test that the expressions
5032 to reload from or into have modes which are valid for this
5033 reload register. Otherwise the reload insns would be invalid. */
5034 if (! (rld[r].in != 0 && test_mode != VOIDmode
5035 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5036 if (! (rld[r].out != 0
5037 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5038 {
5039 /* The reg is OK. */
5040 last_spill_reg = i;
5041
5042 /* Mark as in use for this insn the reload regs we use
5043 for this. */
5044 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5045 rld[r].when_needed, rld[r].mode);
5046
5047 rld[r].reg_rtx = reg;
5048 reload_spill_index[r] = spill_regs[i];
5049 return 1;
5050 }
5051 }
5052 return 0;
5053}
5054
32131a9c 5055/* Find a spill register to use as a reload register for reload R.
40f03658 5056 LAST_RELOAD is nonzero if this is the last reload for the insn being
32131a9c
RK
5057 processed.
5058
eceef4c9 5059 Set rld[R].reg_rtx to the register allocated.
32131a9c 5060
f5d8c9f4
BS
5061 We return 1 if successful, or 0 if we couldn't find a spill reg and
5062 we didn't change anything. */
32131a9c
RK
5063
5064static int
0c20a65f
AJ
5065allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
5066 int last_reload)
32131a9c 5067{
67e61fe7 5068 int i, pass, count;
32131a9c
RK
5069
5070 /* If we put this reload ahead, thinking it is a group,
5071 then insist on finding a group. Otherwise we can grab a
a8fdc208 5072 reg that some other reload needs.
32131a9c
RK
5073 (That can happen when we have a 68000 DATA_OR_FP_REG
5074 which is a group of data regs or one fp reg.)
5075 We need not be so restrictive if there are no more reloads
5076 for this insn.
5077
5078 ??? Really it would be nicer to have smarter handling
5079 for that kind of reg class, where a problem like this is normal.
5080 Perhaps those classes should be avoided for reloading
5081 by use of more alternatives. */
5082
8ec450a4 5083 int force_group = rld[r].nregs > 1 && ! last_reload;
32131a9c
RK
5084
5085 /* If we want a single register and haven't yet found one,
5086 take any reg in the right class and not in use.
5087 If we want a consecutive group, here is where we look for it.
5088
5089 We use two passes so we can first look for reload regs to
5090 reuse, which are already in use for other reloads in this insn,
5091 and only then use additional registers.
5092 I think that maximizing reuse is needed to make sure we don't
5093 run out of reload regs. Suppose we have three reloads, and
5094 reloads A and B can share regs. These need two regs.
5095 Suppose A and B are given different regs.
5096 That leaves none for C. */
5097 for (pass = 0; pass < 2; pass++)
5098 {
5099 /* I is the index in spill_regs.
5100 We advance it round-robin between insns to use all spill regs
5101 equally, so that inherited reloads have a chance
f5d8c9f4
BS
5102 of leapfrogging each other. */
5103
5104 i = last_spill_reg;
05d10675 5105
a5339699 5106 for (count = 0; count < n_spills; count++)
32131a9c 5107 {
eceef4c9 5108 int class = (int) rld[r].class;
03acd8f8 5109 int regnum;
32131a9c 5110
03acd8f8
BS
5111 i++;
5112 if (i >= n_spills)
5113 i -= n_spills;
5114 regnum = spill_regs[i];
32131a9c 5115
eceef4c9
BS
5116 if ((reload_reg_free_p (regnum, rld[r].opnum,
5117 rld[r].when_needed)
5118 || (rld[r].in
05d10675
BS
5119 /* We check reload_reg_used to make sure we
5120 don't clobber the return register. */
03acd8f8 5121 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
c02cad8f
BS
5122 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5123 rld[r].when_needed, rld[r].in,
5124 rld[r].out, r, 1)))
03acd8f8 5125 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
8ec450a4 5126 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
be7ae2a4
RK
5127 /* Look first for regs to share, then for unshared. But
5128 don't share regs used for inherited reloads; they are
5129 the ones we want to preserve. */
5130 && (pass
5131 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
03acd8f8 5132 regnum)
be7ae2a4 5133 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
03acd8f8 5134 regnum))))
32131a9c 5135 {
66fd46b6 5136 int nr = hard_regno_nregs[regnum][rld[r].mode];
32131a9c
RK
5137 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5138 (on 68000) got us two FP regs. If NR is 1,
5139 we would reject both of them. */
5140 if (force_group)
67e61fe7 5141 nr = rld[r].nregs;
32131a9c
RK
5142 /* If we need only one reg, we have already won. */
5143 if (nr == 1)
5144 {
5145 /* But reject a single reg if we demand a group. */
5146 if (force_group)
5147 continue;
5148 break;
5149 }
5150 /* Otherwise check that as many consecutive regs as we need
f5d8c9f4
BS
5151 are available here. */
5152 while (nr > 1)
5153 {
5154 int regno = regnum + nr - 1;
5155 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5156 && spill_reg_order[regno] >= 0
5157 && reload_reg_free_p (regno, rld[r].opnum,
5158 rld[r].when_needed)))
5159 break;
5160 nr--;
5161 }
32131a9c
RK
5162 if (nr == 1)
5163 break;
5164 }
5165 }
5166
5167 /* If we found something on pass 1, omit pass 2. */
5168 if (count < n_spills)
5169 break;
5170 }
1d7254c5 5171
32131a9c 5172 /* We should have found a spill register by now. */
f5d8c9f4 5173 if (count >= n_spills)
32131a9c
RK
5174 return 0;
5175
f5d8c9f4
BS
5176 /* I is the index in SPILL_REG_RTX of the reload register we are to
5177 allocate. Get an rtx for it and find its register number. */
32131a9c 5178
f5d8c9f4 5179 return set_reload_reg (i, r);
32131a9c
RK
5180}
5181\f
67e61fe7
BS
5182/* Initialize all the tables needed to allocate reload registers.
5183 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5184 is the array we use to restore the reg_rtx field for every reload. */
efc9bd41 5185
32131a9c 5186static void
0c20a65f 5187choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
32131a9c 5188{
67e61fe7 5189 int i;
32131a9c 5190
67e61fe7
BS
5191 for (i = 0; i < n_reloads; i++)
5192 rld[i].reg_rtx = save_reload_reg_rtx[i];
32131a9c 5193
961192e1 5194 memset (reload_inherited, 0, MAX_RELOADS);
703ad42b
KG
5195 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5196 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
32131a9c
RK
5197
5198 CLEAR_HARD_REG_SET (reload_reg_used);
5199 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 5200 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
893bc853 5201 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
546b63fb
RK
5202 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5203 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 5204
f1db3576
JL
5205 CLEAR_HARD_REG_SET (reg_used_in_insn);
5206 {
5207 HARD_REG_SET tmp;
239a0f5b 5208 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
f1db3576 5209 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
239a0f5b 5210 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
f1db3576 5211 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
239a0f5b
BS
5212 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5213 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
f1db3576 5214 }
efc9bd41 5215
546b63fb
RK
5216 for (i = 0; i < reload_n_operands; i++)
5217 {
5218 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5219 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5220 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
47c8cf91 5221 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
546b63fb 5222 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
47c8cf91 5223 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
546b63fb 5224 }
32131a9c 5225
9e3a9cf2 5226 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
05d10675 5227
67e61fe7 5228 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
32131a9c 5229
67e61fe7
BS
5230 for (i = 0; i < n_reloads; i++)
5231 /* If we have already decided to use a certain register,
5232 don't use it in another way. */
5233 if (rld[i].reg_rtx)
5234 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5235 rld[i].when_needed, rld[i].mode);
5236}
32131a9c 5237
67e61fe7
BS
5238/* Assign hard reg targets for the pseudo-registers we must reload
5239 into hard regs for this insn.
5240 Also output the instructions to copy them in and out of the hard regs.
5241
5242 For machines with register classes, we are responsible for
5243 finding a reload reg in the proper class. */
5244
5245static void
0c20a65f 5246choose_reload_regs (struct insn_chain *chain)
67e61fe7
BS
5247{
5248 rtx insn = chain->insn;
b3694847 5249 int i, j;
770ae6cc 5250 unsigned int max_group_size = 1;
67e61fe7 5251 enum reg_class group_class = NO_REGS;
f5d8c9f4 5252 int pass, win, inheritance;
67e61fe7
BS
5253
5254 rtx save_reload_reg_rtx[MAX_RELOADS];
32131a9c 5255
32131a9c
RK
5256 /* In order to be certain of getting the registers we need,
5257 we must sort the reloads into order of increasing register class.
5258 Then our grabbing of reload registers will parallel the process
a8fdc208 5259 that provided the reload registers.
32131a9c
RK
5260
5261 Also note whether any of the reloads wants a consecutive group of regs.
5262 If so, record the maximum size of the group desired and what
5263 register class contains all the groups needed by this insn. */
5264
5265 for (j = 0; j < n_reloads; j++)
5266 {
5267 reload_order[j] = j;
5268 reload_spill_index[j] = -1;
5269
8ec450a4 5270 if (rld[j].nregs > 1)
32131a9c 5271 {
8ec450a4 5272 max_group_size = MAX (rld[j].nregs, max_group_size);
770ae6cc 5273 group_class
8e2e89f7 5274 = reg_class_superunion[(int) rld[j].class][(int) group_class];
32131a9c
RK
5275 }
5276
eceef4c9 5277 save_reload_reg_rtx[j] = rld[j].reg_rtx;
32131a9c
RK
5278 }
5279
5280 if (n_reloads > 1)
5281 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5282
58b1581b
RS
5283 /* If -O, try first with inheritance, then turning it off.
5284 If not -O, don't do inheritance.
5285 Using inheritance when not optimizing leads to paradoxes
5286 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5287 because one side of the comparison might be inherited. */
f5d8c9f4 5288 win = 0;
58b1581b 5289 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c 5290 {
67e61fe7
BS
5291 choose_reload_regs_init (chain, save_reload_reg_rtx);
5292
32131a9c
RK
5293 /* Process the reloads in order of preference just found.
5294 Beyond this point, subregs can be found in reload_reg_rtx.
5295
770ae6cc
RK
5296 This used to look for an existing reloaded home for all of the
5297 reloads, and only then perform any new reloads. But that could lose
5298 if the reloads were done out of reg-class order because a later
5299 reload with a looser constraint might have an old home in a register
5300 needed by an earlier reload with a tighter constraint.
32131a9c
RK
5301
5302 To solve this, we make two passes over the reloads, in the order
5303 described above. In the first pass we try to inherit a reload
5304 from a previous insn. If there is a later reload that needs a
5305 class that is a proper subset of the class being processed, we must
5306 also allocate a spill register during the first pass.
5307
5308 Then make a second pass over the reloads to allocate any reloads
5309 that haven't been given registers yet. */
5310
5311 for (j = 0; j < n_reloads; j++)
5312 {
b3694847 5313 int r = reload_order[j];
8593b745 5314 rtx search_equiv = NULL_RTX;
32131a9c
RK
5315
5316 /* Ignore reloads that got marked inoperative. */
eceef4c9
BS
5317 if (rld[r].out == 0 && rld[r].in == 0
5318 && ! rld[r].secondary_p)
32131a9c
RK
5319 continue;
5320
b29514ee 5321 /* If find_reloads chose to use reload_in or reload_out as a reload
b080c137
RK
5322 register, we don't need to chose one. Otherwise, try even if it
5323 found one since we might save an insn if we find the value lying
b29514ee
R
5324 around.
5325 Try also when reload_in is a pseudo without a hard reg. */
eceef4c9
BS
5326 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5327 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5328 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
3c0cb5de 5329 && !MEM_P (rld[r].in)
eceef4c9 5330 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
32131a9c
RK
5331 continue;
5332
5333#if 0 /* No longer needed for correct operation.
5334 It might give better code, or might not; worth an experiment? */
5335 /* If this is an optional reload, we can't inherit from earlier insns
5336 until we are sure that any non-optional reloads have been allocated.
5337 The following code takes advantage of the fact that optional reloads
5338 are at the end of reload_order. */
eceef4c9 5339 if (rld[r].optional != 0)
32131a9c 5340 for (i = 0; i < j; i++)
eceef4c9
BS
5341 if ((rld[reload_order[i]].out != 0
5342 || rld[reload_order[i]].in != 0
5343 || rld[reload_order[i]].secondary_p)
5344 && ! rld[reload_order[i]].optional
5345 && rld[reload_order[i]].reg_rtx == 0)
f5d8c9f4 5346 allocate_reload_reg (chain, reload_order[i], 0);
32131a9c
RK
5347#endif
5348
5349 /* First see if this pseudo is already available as reloaded
5350 for a previous insn. We cannot try to inherit for reloads
5351 that are smaller than the maximum number of registers needed
5352 for groups unless the register we would allocate cannot be used
5353 for the groups.
5354
5355 We could check here to see if this is a secondary reload for
5356 an object that is already in a register of the desired class.
5357 This would avoid the need for the secondary reload register.
5358 But this is complex because we can't easily determine what
b080c137
RK
5359 objects might want to be loaded via this reload. So let a
5360 register be allocated here. In `emit_reload_insns' we suppress
5361 one of the loads in the case described above. */
32131a9c
RK
5362
5363 if (inheritance)
5364 {
ddef6bc7 5365 int byte = 0;
b3694847 5366 int regno = -1;
6a651371 5367 enum machine_mode mode = VOIDmode;
32131a9c 5368
eceef4c9 5369 if (rld[r].in == 0)
32131a9c 5370 ;
f8cfc6aa 5371 else if (REG_P (rld[r].in))
db660765 5372 {
eceef4c9
BS
5373 regno = REGNO (rld[r].in);
5374 mode = GET_MODE (rld[r].in);
db660765 5375 }
f8cfc6aa 5376 else if (REG_P (rld[r].in_reg))
db660765 5377 {
eceef4c9
BS
5378 regno = REGNO (rld[r].in_reg);
5379 mode = GET_MODE (rld[r].in_reg);
db660765 5380 }
eceef4c9 5381 else if (GET_CODE (rld[r].in_reg) == SUBREG
f8cfc6aa 5382 && REG_P (SUBREG_REG (rld[r].in_reg)))
b60a8416 5383 {
ddef6bc7 5384 byte = SUBREG_BYTE (rld[r].in_reg);
eceef4c9 5385 regno = REGNO (SUBREG_REG (rld[r].in_reg));
cb2afeb3 5386 if (regno < FIRST_PSEUDO_REGISTER)
ddef6bc7 5387 regno = subreg_regno (rld[r].in_reg);
eceef4c9 5388 mode = GET_MODE (rld[r].in_reg);
cb2afeb3
R
5389 }
5390#ifdef AUTO_INC_DEC
eceef4c9
BS
5391 else if ((GET_CODE (rld[r].in_reg) == PRE_INC
5392 || GET_CODE (rld[r].in_reg) == PRE_DEC
5393 || GET_CODE (rld[r].in_reg) == POST_INC
5394 || GET_CODE (rld[r].in_reg) == POST_DEC)
f8cfc6aa 5395 && REG_P (XEXP (rld[r].in_reg, 0)))
cb2afeb3 5396 {
eceef4c9
BS
5397 regno = REGNO (XEXP (rld[r].in_reg, 0));
5398 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5399 rld[r].out = rld[r].in;
b60a8416 5400 }
cb2afeb3 5401#endif
32131a9c
RK
5402#if 0
5403 /* This won't work, since REGNO can be a pseudo reg number.
5404 Also, it takes much more hair to keep track of all the things
5405 that can invalidate an inherited reload of part of a pseudoreg. */
eceef4c9 5406 else if (GET_CODE (rld[r].in) == SUBREG
f8cfc6aa 5407 && REG_P (SUBREG_REG (rld[r].in)))
ddef6bc7 5408 regno = subreg_regno (rld[r].in);
32131a9c
RK
5409#endif
5410
5411 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5412 {
eceef4c9 5413 enum reg_class class = rld[r].class, last_class;
cb2afeb3 5414 rtx last_reg = reg_last_reload_reg[regno];
02188693 5415 enum machine_mode need_mode;
05d10675 5416
ddef6bc7
JJ
5417 i = REGNO (last_reg);
5418 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
cb2afeb3 5419 last_class = REGNO_REG_CLASS (i);
02188693 5420
ddef6bc7 5421 if (byte == 0)
ce701d1b
BS
5422 need_mode = mode;
5423 else
5424 need_mode
ddef6bc7 5425 = smallest_mode_for_size (GET_MODE_SIZE (mode) + byte,
ce701d1b 5426 GET_MODE_CLASS (mode));
02188693 5427
c9d8a813 5428 if (
cff9f8d5
AH
5429#ifdef CANNOT_CHANGE_MODE_CLASS
5430 (!REG_CANNOT_CHANGE_MODE_P (i, GET_MODE (last_reg),
5431 need_mode)
1fe668e5 5432 &&
cff9f8d5 5433#endif
c9d8a813 5434 (GET_MODE_SIZE (GET_MODE (last_reg))
02188693 5435 >= GET_MODE_SIZE (need_mode))
cff9f8d5
AH
5436#ifdef CANNOT_CHANGE_MODE_CLASS
5437 )
c9d8a813 5438#endif
cb2afeb3 5439 && reg_reloaded_contents[i] == regno
e6e52be0 5440 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
8ec450a4 5441 && HARD_REGNO_MODE_OK (i, rld[r].mode)
cb2afeb3
R
5442 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5443 /* Even if we can't use this register as a reload
5444 register, we might use it for reload_override_in,
5445 if copying it to the desired class is cheap
5446 enough. */
e56b4594 5447 || ((REGISTER_MOVE_COST (mode, last_class, class)
cb2afeb3
R
5448 < MEMORY_MOVE_COST (mode, class, 1))
5449#ifdef SECONDARY_INPUT_RELOAD_CLASS
5450 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5451 last_reg)
5452 == NO_REGS)
5453#endif
5454#ifdef SECONDARY_MEMORY_NEEDED
5455 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5456 mode)
5457#endif
5458 ))
5459
8ec450a4 5460 && (rld[r].nregs == max_group_size
32131a9c 5461 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
e6e52be0 5462 i))
c02cad8f
BS
5463 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5464 rld[r].when_needed, rld[r].in,
5465 const0_rtx, r, 1))
32131a9c
RK
5466 {
5467 /* If a group is needed, verify that all the subsequent
0f41302f 5468 registers still have their values intact. */
66fd46b6 5469 int nr = hard_regno_nregs[i][rld[r].mode];
32131a9c
RK
5470 int k;
5471
5472 for (k = 1; k < nr; k++)
e6e52be0
R
5473 if (reg_reloaded_contents[i + k] != regno
5474 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
32131a9c
RK
5475 break;
5476
5477 if (k == nr)
5478 {
c74fa651 5479 int i1;
eb4d554e 5480 int bad_for_class;
c74fa651 5481
cb2afeb3
R
5482 last_reg = (GET_MODE (last_reg) == mode
5483 ? last_reg : gen_rtx_REG (mode, i));
5484
eb4d554e
GK
5485 bad_for_class = 0;
5486 for (k = 0; k < nr; k++)
5487 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5488 i+k);
5489
c74fa651
RS
5490 /* We found a register that contains the
5491 value we need. If this register is the
5492 same as an `earlyclobber' operand of the
5493 current insn, just mark it as a place to
5494 reload from since we can't use it as the
5495 reload register itself. */
5496
5497 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5498 if (reg_overlap_mentioned_for_reload_p
5499 (reg_last_reload_reg[regno],
5500 reload_earlyclobbers[i1]))
5501 break;
5502
8908158d 5503 if (i1 != n_earlyclobbers
c02cad8f
BS
5504 || ! (free_for_value_p (i, rld[r].mode,
5505 rld[r].opnum,
5506 rld[r].when_needed, rld[r].in,
5507 rld[r].out, r, 1))
e6e52be0 5508 /* Don't use it if we'd clobber a pseudo reg. */
f1db3576 5509 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
eceef4c9 5510 && rld[r].out
e6e52be0 5511 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
0c7f2259 5512 /* Don't clobber the frame pointer. */
1d7254c5 5513 || (i == HARD_FRAME_POINTER_REGNUM
2f460a0a 5514 && frame_pointer_needed
1d7254c5 5515 && rld[r].out)
8908158d
RS
5516 /* Don't really use the inherited spill reg
5517 if we need it wider than we've got it. */
8ec450a4 5518 || (GET_MODE_SIZE (rld[r].mode)
b29514ee 5519 > GET_MODE_SIZE (mode))
eb4d554e 5520 || bad_for_class
cb2afeb3 5521
b29514ee
R
5522 /* If find_reloads chose reload_out as reload
5523 register, stay with it - that leaves the
5524 inherited register for subsequent reloads. */
eceef4c9 5525 || (rld[r].out && rld[r].reg_rtx
67e61fe7 5526 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
cb2afeb3 5527 {
4c3a2649
BS
5528 if (! rld[r].optional)
5529 {
5530 reload_override_in[r] = last_reg;
5531 reload_inheritance_insn[r]
5532 = reg_reloaded_insn[i];
5533 }
cb2afeb3 5534 }
c74fa651
RS
5535 else
5536 {
54c40e68 5537 int k;
c74fa651
RS
5538 /* We can use this as a reload reg. */
5539 /* Mark the register as in use for this part of
5540 the insn. */
e6e52be0 5541 mark_reload_reg_in_use (i,
eceef4c9
BS
5542 rld[r].opnum,
5543 rld[r].when_needed,
8ec450a4 5544 rld[r].mode);
eceef4c9 5545 rld[r].reg_rtx = last_reg;
c74fa651
RS
5546 reload_inherited[r] = 1;
5547 reload_inheritance_insn[r]
5548 = reg_reloaded_insn[i];
5549 reload_spill_index[r] = i;
54c40e68
RS
5550 for (k = 0; k < nr; k++)
5551 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
e6e52be0 5552 i + k);
c74fa651 5553 }
32131a9c
RK
5554 }
5555 }
5556 }
5557 }
5558
5559 /* Here's another way to see if the value is already lying around. */
5560 if (inheritance
eceef4c9 5561 && rld[r].in != 0
32131a9c 5562 && ! reload_inherited[r]
eceef4c9
BS
5563 && rld[r].out == 0
5564 && (CONSTANT_P (rld[r].in)
5565 || GET_CODE (rld[r].in) == PLUS
f8cfc6aa 5566 || REG_P (rld[r].in)
3c0cb5de 5567 || MEM_P (rld[r].in))
8ec450a4 5568 && (rld[r].nregs == max_group_size
eceef4c9
BS
5569 || ! reg_classes_intersect_p (rld[r].class, group_class)))
5570 search_equiv = rld[r].in;
8593b745
R
5571 /* If this is an output reload from a simple move insn, look
5572 if an equivalence for the input is available. */
eceef4c9 5573 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
8593b745
R
5574 {
5575 rtx set = single_set (insn);
5576
5577 if (set
eceef4c9 5578 && rtx_equal_p (rld[r].out, SET_DEST (set))
8593b745
R
5579 && CONSTANT_P (SET_SRC (set)))
5580 search_equiv = SET_SRC (set);
5581 }
5582
5583 if (search_equiv)
32131a9c 5584 {
b3694847 5585 rtx equiv
eceef4c9 5586 = find_equiv_reg (search_equiv, insn, rld[r].class,
9714cf43 5587 -1, NULL, 0, rld[r].mode);
f428f252 5588 int regno = 0;
32131a9c
RK
5589
5590 if (equiv != 0)
5591 {
f8cfc6aa 5592 if (REG_P (equiv))
32131a9c
RK
5593 regno = REGNO (equiv);
5594 else if (GET_CODE (equiv) == SUBREG)
5595 {
f8a9e02b
RK
5596 /* This must be a SUBREG of a hard register.
5597 Make a new REG since this might be used in an
5598 address and not all machines support SUBREGs
5599 there. */
ddef6bc7 5600 regno = subreg_regno (equiv);
8ec450a4 5601 equiv = gen_rtx_REG (rld[r].mode, regno);
32131a9c
RK
5602 }
5603 else
5604 abort ();
5605 }
5606
5607 /* If we found a spill reg, reject it unless it is free
5608 and of the desired class. */
f58d8c95
JW
5609 if (equiv != 0)
5610 {
5611 int regs_used = 0;
5612 int bad_for_class = 0;
5613 int max_regno = regno + rld[r].nregs;
5614
5615 for (i = regno; i < max_regno; i++)
5616 {
5617 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
5618 i);
0c20a65f 5619 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
f58d8c95
JW
5620 i);
5621 }
5622
5623 if ((regs_used
c02cad8f
BS
5624 && ! free_for_value_p (regno, rld[r].mode,
5625 rld[r].opnum, rld[r].when_needed,
5626 rld[r].in, rld[r].out, r, 1))
f58d8c95
JW
5627 || bad_for_class)
5628 equiv = 0;
5629 }
32131a9c 5630
8ec450a4 5631 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
32131a9c
RK
5632 equiv = 0;
5633
5634 /* We found a register that contains the value we need.
5635 If this register is the same as an `earlyclobber' operand
5636 of the current insn, just mark it as a place to reload from
5637 since we can't use it as the reload register itself. */
5638
5639 if (equiv != 0)
5640 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5641 if (reg_overlap_mentioned_for_reload_p (equiv,
5642 reload_earlyclobbers[i]))
32131a9c 5643 {
4c3a2649
BS
5644 if (! rld[r].optional)
5645 reload_override_in[r] = equiv;
32131a9c
RK
5646 equiv = 0;
5647 break;
5648 }
5649
3c785e47
R
5650 /* If the equiv register we have found is explicitly clobbered
5651 in the current insn, it depends on the reload type if we
5652 can use it, use it for reload_override_in, or not at all.
5653 In particular, we then can't use EQUIV for a
5654 RELOAD_FOR_OUTPUT_ADDRESS reload. */
32131a9c 5655
9532e31f 5656 if (equiv != 0)
174fa2c4 5657 {
9532e31f
BS
5658 if (regno_clobbered_p (regno, insn, rld[r].mode, 0))
5659 switch (rld[r].when_needed)
5660 {
5661 case RELOAD_FOR_OTHER_ADDRESS:
5662 case RELOAD_FOR_INPADDR_ADDRESS:
5663 case RELOAD_FOR_INPUT_ADDRESS:
5664 case RELOAD_FOR_OPADDR_ADDR:
5665 break;
5666 case RELOAD_OTHER:
5667 case RELOAD_FOR_INPUT:
5668 case RELOAD_FOR_OPERAND_ADDRESS:
5669 if (! rld[r].optional)
5670 reload_override_in[r] = equiv;
5671 /* Fall through. */
5672 default:
5673 equiv = 0;
5674 break;
5675 }
5676 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
5677 switch (rld[r].when_needed)
5678 {
5679 case RELOAD_FOR_OTHER_ADDRESS:
5680 case RELOAD_FOR_INPADDR_ADDRESS:
5681 case RELOAD_FOR_INPUT_ADDRESS:
5682 case RELOAD_FOR_OPADDR_ADDR:
5683 case RELOAD_FOR_OPERAND_ADDRESS:
5684 case RELOAD_FOR_INPUT:
5685 break;
5686 case RELOAD_OTHER:
5687 if (! rld[r].optional)
5688 reload_override_in[r] = equiv;
5689 /* Fall through. */
5690 default:
5691 equiv = 0;
5692 break;
5693 }
32131a9c
RK
5694 }
5695
5696 /* If we found an equivalent reg, say no code need be generated
5697 to load it, and use it as our reload reg. */
a6a2274a
KH
5698 if (equiv != 0
5699 && (regno != HARD_FRAME_POINTER_REGNUM
2f460a0a 5700 || !frame_pointer_needed))
32131a9c 5701 {
66fd46b6 5702 int nr = hard_regno_nregs[regno][rld[r].mode];
100338df 5703 int k;
eceef4c9 5704 rld[r].reg_rtx = equiv;
32131a9c 5705 reload_inherited[r] = 1;
100338df 5706
91d7e7ac
R
5707 /* If reg_reloaded_valid is not set for this register,
5708 there might be a stale spill_reg_store lying around.
5709 We must clear it, since otherwise emit_reload_insns
5710 might delete the store. */
5711 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
5712 spill_reg_store[regno] = NULL_RTX;
100338df
JL
5713 /* If any of the hard registers in EQUIV are spill
5714 registers, mark them as in use for this insn. */
5715 for (k = 0; k < nr; k++)
be7ae2a4 5716 {
100338df
JL
5717 i = spill_reg_order[regno + k];
5718 if (i >= 0)
5719 {
eceef4c9
BS
5720 mark_reload_reg_in_use (regno, rld[r].opnum,
5721 rld[r].when_needed,
8ec450a4 5722 rld[r].mode);
100338df
JL
5723 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5724 regno + k);
5725 }
be7ae2a4 5726 }
32131a9c
RK
5727 }
5728 }
5729
5730 /* If we found a register to use already, or if this is an optional
5731 reload, we are done. */
eceef4c9 5732 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
32131a9c
RK
5733 continue;
5734
1d7254c5
KH
5735#if 0
5736 /* No longer needed for correct operation. Might or might
5737 not give better code on the average. Want to experiment? */
32131a9c
RK
5738
5739 /* See if there is a later reload that has a class different from our
5740 class that intersects our class or that requires less register
5741 than our reload. If so, we must allocate a register to this
5742 reload now, since that reload might inherit a previous reload
5743 and take the only available register in our class. Don't do this
5744 for optional reloads since they will force all previous reloads
5745 to be allocated. Also don't do this for reloads that have been
5746 turned off. */
5747
5748 for (i = j + 1; i < n_reloads; i++)
5749 {
5750 int s = reload_order[i];
5751
eceef4c9
BS
5752 if ((rld[s].in == 0 && rld[s].out == 0
5753 && ! rld[s].secondary_p)
5754 || rld[s].optional)
32131a9c
RK
5755 continue;
5756
eceef4c9
BS
5757 if ((rld[s].class != rld[r].class
5758 && reg_classes_intersect_p (rld[r].class,
5759 rld[s].class))
8ec450a4 5760 || rld[s].nregs < rld[r].nregs)
05d10675 5761 break;
32131a9c
RK
5762 }
5763
5764 if (i == n_reloads)
5765 continue;
5766
f5d8c9f4 5767 allocate_reload_reg (chain, r, j == n_reloads - 1);
32131a9c
RK
5768#endif
5769 }
5770
5771 /* Now allocate reload registers for anything non-optional that
5772 didn't get one yet. */
5773 for (j = 0; j < n_reloads; j++)
5774 {
b3694847 5775 int r = reload_order[j];
32131a9c
RK
5776
5777 /* Ignore reloads that got marked inoperative. */
eceef4c9 5778 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
32131a9c
RK
5779 continue;
5780
5781 /* Skip reloads that already have a register allocated or are
0f41302f 5782 optional. */
eceef4c9 5783 if (rld[r].reg_rtx != 0 || rld[r].optional)
32131a9c
RK
5784 continue;
5785
f5d8c9f4 5786 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
32131a9c
RK
5787 break;
5788 }
5789
5790 /* If that loop got all the way, we have won. */
5791 if (j == n_reloads)
f5d8c9f4
BS
5792 {
5793 win = 1;
5794 break;
5795 }
32131a9c 5796
32131a9c 5797 /* Loop around and try without any inheritance. */
32131a9c
RK
5798 }
5799
f5d8c9f4
BS
5800 if (! win)
5801 {
5802 /* First undo everything done by the failed attempt
5803 to allocate with inheritance. */
5804 choose_reload_regs_init (chain, save_reload_reg_rtx);
5805
5806 /* Some sanity tests to verify that the reloads found in the first
5807 pass are identical to the ones we have now. */
5808 if (chain->n_reloads != n_reloads)
5809 abort ();
5810
5811 for (i = 0; i < n_reloads; i++)
5812 {
5813 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
5814 continue;
5815 if (chain->rld[i].when_needed != rld[i].when_needed)
5816 abort ();
5817 for (j = 0; j < n_spills; j++)
5818 if (spill_regs[j] == chain->rld[i].regno)
5819 if (! set_reload_reg (j, i))
5820 failed_reload (chain->insn, i);
5821 }
5822 }
5823
32131a9c
RK
5824 /* If we thought we could inherit a reload, because it seemed that
5825 nothing else wanted the same reload register earlier in the insn,
cb2afeb3
R
5826 verify that assumption, now that all reloads have been assigned.
5827 Likewise for reloads where reload_override_in has been set. */
32131a9c 5828
cb2afeb3
R
5829 /* If doing expensive optimizations, do one preliminary pass that doesn't
5830 cancel any inheritance, but removes reloads that have been needed only
5831 for reloads that we know can be inherited. */
5832 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
32131a9c 5833 {
cb2afeb3 5834 for (j = 0; j < n_reloads; j++)
029b38ff 5835 {
b3694847 5836 int r = reload_order[j];
cb2afeb3 5837 rtx check_reg;
eceef4c9
BS
5838 if (reload_inherited[r] && rld[r].reg_rtx)
5839 check_reg = rld[r].reg_rtx;
cb2afeb3 5840 else if (reload_override_in[r]
f8cfc6aa 5841 && (REG_P (reload_override_in[r])
05d10675 5842 || GET_CODE (reload_override_in[r]) == SUBREG))
cb2afeb3
R
5843 check_reg = reload_override_in[r];
5844 else
5845 continue;
c02cad8f
BS
5846 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
5847 rld[r].opnum, rld[r].when_needed, rld[r].in,
5848 (reload_inherited[r]
5849 ? rld[r].out : const0_rtx),
5850 r, 1))
029b38ff 5851 {
cb2afeb3
R
5852 if (pass)
5853 continue;
5854 reload_inherited[r] = 0;
5855 reload_override_in[r] = 0;
029b38ff 5856 }
cb2afeb3
R
5857 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
5858 reload_override_in, then we do not need its related
5859 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
5860 likewise for other reload types.
5861 We handle this by removing a reload when its only replacement
5862 is mentioned in reload_in of the reload we are going to inherit.
5863 A special case are auto_inc expressions; even if the input is
5864 inherited, we still need the address for the output. We can
fe92fe26 5865 recognize them because they have RELOAD_OUT set to RELOAD_IN.
eaec9b3d 5866 If we succeeded removing some reload and we are doing a preliminary
cb2afeb3
R
5867 pass just to remove such reloads, make another pass, since the
5868 removal of one reload might allow us to inherit another one. */
eceef4c9
BS
5869 else if (rld[r].in
5870 && rld[r].out != rld[r].in
5871 && remove_address_replacements (rld[r].in) && pass)
cb2afeb3 5872 pass = 2;
32131a9c
RK
5873 }
5874 }
5875
5876 /* Now that reload_override_in is known valid,
5877 actually override reload_in. */
5878 for (j = 0; j < n_reloads; j++)
5879 if (reload_override_in[j])
eceef4c9 5880 rld[j].in = reload_override_in[j];
32131a9c 5881
272d0bee 5882 /* If this reload won't be done because it has been canceled or is
32131a9c
RK
5883 optional and not inherited, clear reload_reg_rtx so other
5884 routines (such as subst_reloads) don't get confused. */
5885 for (j = 0; j < n_reloads; j++)
eceef4c9
BS
5886 if (rld[j].reg_rtx != 0
5887 && ((rld[j].optional && ! reload_inherited[j])
5888 || (rld[j].in == 0 && rld[j].out == 0
5889 && ! rld[j].secondary_p)))
be7ae2a4 5890 {
eceef4c9 5891 int regno = true_regnum (rld[j].reg_rtx);
be7ae2a4
RK
5892
5893 if (spill_reg_order[regno] >= 0)
eceef4c9 5894 clear_reload_reg_in_use (regno, rld[j].opnum,
8ec450a4 5895 rld[j].when_needed, rld[j].mode);
eceef4c9 5896 rld[j].reg_rtx = 0;
c0029be5 5897 reload_spill_index[j] = -1;
be7ae2a4 5898 }
32131a9c
RK
5899
5900 /* Record which pseudos and which spill regs have output reloads. */
5901 for (j = 0; j < n_reloads; j++)
5902 {
b3694847 5903 int r = reload_order[j];
32131a9c
RK
5904
5905 i = reload_spill_index[r];
5906
e6e52be0 5907 /* I is nonneg if this reload uses a register.
eceef4c9 5908 If rld[r].reg_rtx is 0, this is an optional reload
32131a9c 5909 that we opted to ignore. */
f8cfc6aa 5910 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
eceef4c9 5911 && rld[r].reg_rtx != 0)
32131a9c 5912 {
b3694847 5913 int nregno = REGNO (rld[r].out_reg);
372e033b
RS
5914 int nr = 1;
5915
5916 if (nregno < FIRST_PSEUDO_REGISTER)
66fd46b6 5917 nr = hard_regno_nregs[nregno][rld[r].mode];
32131a9c
RK
5918
5919 while (--nr >= 0)
372e033b
RS
5920 reg_has_output_reload[nregno + nr] = 1;
5921
5922 if (i >= 0)
32131a9c 5923 {
66fd46b6 5924 nr = hard_regno_nregs[i][rld[r].mode];
372e033b 5925 while (--nr >= 0)
e6e52be0 5926 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
32131a9c
RK
5927 }
5928
eceef4c9
BS
5929 if (rld[r].when_needed != RELOAD_OTHER
5930 && rld[r].when_needed != RELOAD_FOR_OUTPUT
5931 && rld[r].when_needed != RELOAD_FOR_INSN)
32131a9c
RK
5932 abort ();
5933 }
5934 }
5935}
cb2afeb3
R
5936
5937/* Deallocate the reload register for reload R. This is called from
5938 remove_address_replacements. */
1d813780 5939
cb2afeb3 5940void
0c20a65f 5941deallocate_reload_reg (int r)
cb2afeb3
R
5942{
5943 int regno;
5944
eceef4c9 5945 if (! rld[r].reg_rtx)
cb2afeb3 5946 return;
eceef4c9
BS
5947 regno = true_regnum (rld[r].reg_rtx);
5948 rld[r].reg_rtx = 0;
cb2afeb3 5949 if (spill_reg_order[regno] >= 0)
eceef4c9 5950 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
8ec450a4 5951 rld[r].mode);
cb2afeb3
R
5952 reload_spill_index[r] = -1;
5953}
32131a9c 5954\f
40f03658 5955/* If SMALL_REGISTER_CLASSES is nonzero, we may not have merged two
546b63fb
RK
5956 reloads of the same item for fear that we might not have enough reload
5957 registers. However, normally they will get the same reload register
05d10675 5958 and hence actually need not be loaded twice.
546b63fb
RK
5959
5960 Here we check for the most common case of this phenomenon: when we have
5961 a number of reloads for the same object, each of which were allocated
5962 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5963 reload, and is not modified in the insn itself. If we find such,
5964 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5965 This will not increase the number of spill registers needed and will
5966 prevent redundant code. */
5967
546b63fb 5968static void
0c20a65f 5969merge_assigned_reloads (rtx insn)
546b63fb
RK
5970{
5971 int i, j;
5972
5973 /* Scan all the reloads looking for ones that only load values and
5974 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5975 assigned and not modified by INSN. */
5976
5977 for (i = 0; i < n_reloads; i++)
5978 {
d668e863
R
5979 int conflicting_input = 0;
5980 int max_input_address_opnum = -1;
5981 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
5982
eceef4c9
BS
5983 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
5984 || rld[i].out != 0 || rld[i].reg_rtx == 0
5985 || reg_set_p (rld[i].reg_rtx, insn))
546b63fb
RK
5986 continue;
5987
5988 /* Look at all other reloads. Ensure that the only use of this
5989 reload_reg_rtx is in a reload that just loads the same value
5990 as we do. Note that any secondary reloads must be of the identical
5991 class since the values, modes, and result registers are the
5992 same, so we need not do anything with any secondary reloads. */
5993
5994 for (j = 0; j < n_reloads; j++)
5995 {
eceef4c9
BS
5996 if (i == j || rld[j].reg_rtx == 0
5997 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
5998 rld[i].reg_rtx))
546b63fb
RK
5999 continue;
6000
eceef4c9
BS
6001 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6002 && rld[j].opnum > max_input_address_opnum)
6003 max_input_address_opnum = rld[j].opnum;
d668e863 6004
546b63fb 6005 /* If the reload regs aren't exactly the same (e.g, different modes)
d668e863
R
6006 or if the values are different, we can't merge this reload.
6007 But if it is an input reload, we might still merge
6008 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
546b63fb 6009
eceef4c9
BS
6010 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6011 || rld[j].out != 0 || rld[j].in == 0
6012 || ! rtx_equal_p (rld[i].in, rld[j].in))
d668e863 6013 {
eceef4c9
BS
6014 if (rld[j].when_needed != RELOAD_FOR_INPUT
6015 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6016 || rld[i].opnum > rld[j].opnum)
6017 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
d668e863
R
6018 break;
6019 conflicting_input = 1;
eceef4c9
BS
6020 if (min_conflicting_input_opnum > rld[j].opnum)
6021 min_conflicting_input_opnum = rld[j].opnum;
d668e863 6022 }
546b63fb
RK
6023 }
6024
6025 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6026 we, in fact, found any matching reloads. */
6027
d668e863
R
6028 if (j == n_reloads
6029 && max_input_address_opnum <= min_conflicting_input_opnum)
546b63fb
RK
6030 {
6031 for (j = 0; j < n_reloads; j++)
eceef4c9
BS
6032 if (i != j && rld[j].reg_rtx != 0
6033 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
d668e863 6034 && (! conflicting_input
eceef4c9
BS
6035 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6036 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
546b63fb 6037 {
eceef4c9
BS
6038 rld[i].when_needed = RELOAD_OTHER;
6039 rld[j].in = 0;
efdb3590 6040 reload_spill_index[j] = -1;
546b63fb
RK
6041 transfer_replacements (i, j);
6042 }
6043
6044 /* If this is now RELOAD_OTHER, look for any reloads that load
6045 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6046 if they were for inputs, RELOAD_OTHER for outputs. Note that
6047 this test is equivalent to looking for reloads for this operand
6048 number. */
dec0798e
R
6049 /* We must take special care when there are two or more reloads to
6050 be merged and a RELOAD_FOR_OUTPUT_ADDRESS reload that loads the
6051 same value or a part of it; we must not change its type if there
6052 is a conflicting input. */
546b63fb 6053
eceef4c9 6054 if (rld[i].when_needed == RELOAD_OTHER)
546b63fb 6055 for (j = 0; j < n_reloads; j++)
eceef4c9 6056 if (rld[j].in != 0
91667711 6057 && rld[j].when_needed != RELOAD_OTHER
dec0798e
R
6058 && rld[j].when_needed != RELOAD_FOR_OTHER_ADDRESS
6059 && (! conflicting_input
6060 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6061 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
eceef4c9
BS
6062 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6063 rld[i].in))
c15c18c5
JW
6064 {
6065 int k;
6066
6067 rld[j].when_needed
6068 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6069 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6070 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6071
6072 /* Check to see if we accidentally converted two reloads
8a5db2b4
JW
6073 that use the same reload register with different inputs
6074 to the same type. If so, the resulting code won't work,
6075 so abort. */
c15c18c5
JW
6076 if (rld[j].reg_rtx)
6077 for (k = 0; k < j; k++)
6078 if (rld[k].in != 0 && rld[k].reg_rtx != 0
6079 && rld[k].when_needed == rld[j].when_needed
8a5db2b4
JW
6080 && rtx_equal_p (rld[k].reg_rtx, rld[j].reg_rtx)
6081 && ! rtx_equal_p (rld[k].in, rld[j].in))
c15c18c5
JW
6082 abort ();
6083 }
546b63fb
RK
6084 }
6085 }
05d10675 6086}
546b63fb 6087\f
367b1cf5
BS
6088/* These arrays are filled by emit_reload_insns and its subroutines. */
6089static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6090static rtx other_input_address_reload_insns = 0;
6091static rtx other_input_reload_insns = 0;
6092static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6093static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6094static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6095static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6096static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6097static rtx operand_reload_insns = 0;
6098static rtx other_operand_reload_insns = 0;
6099static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6100
6101/* Values to be put in spill_reg_store are put here first. */
6102static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6103static HARD_REG_SET reg_reloaded_died;
6104
6105/* Generate insns to perform reload RL, which is for the insn in CHAIN and
6106 has the number J. OLD contains the value to be used as input. */
770ae6cc 6107
32131a9c 6108static void
0c20a65f
AJ
6109emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
6110 rtx old, int j)
32131a9c 6111{
7609e720 6112 rtx insn = chain->insn;
b3694847 6113 rtx reloadreg = rl->reg_rtx;
367b1cf5
BS
6114 rtx oldequiv_reg = 0;
6115 rtx oldequiv = 0;
6116 int special = 0;
6117 enum machine_mode mode;
6118 rtx *where;
6119
6120 /* Determine the mode to reload in.
6121 This is very tricky because we have three to choose from.
6122 There is the mode the insn operand wants (rl->inmode).
6123 There is the mode of the reload register RELOADREG.
6124 There is the intrinsic mode of the operand, which we could find
6125 by stripping some SUBREGs.
6126 It turns out that RELOADREG's mode is irrelevant:
6127 we can change that arbitrarily.
6128
6129 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6130 then the reload reg may not support QImode moves, so use SImode.
6131 If foo is in memory due to spilling a pseudo reg, this is safe,
6132 because the QImode value is in the least significant part of a
6133 slot big enough for a SImode. If foo is some other sort of
6134 memory reference, then it is impossible to reload this case,
6135 so previous passes had better make sure this never happens.
6136
6137 Then consider a one-word union which has SImode and one of its
6138 members is a float, being fetched as (SUBREG:SF union:SI).
6139 We must fetch that as SFmode because we could be loading into
6140 a float-only register. In this case OLD's mode is correct.
6141
6142 Consider an immediate integer: it has VOIDmode. Here we need
6143 to get a mode from something else.
6144
6145 In some cases, there is a fourth mode, the operand's
6146 containing mode. If the insn specifies a containing mode for
6147 this operand, it overrides all others.
6148
6149 I am not sure whether the algorithm here is always right,
6150 but it does the right things in those cases. */
6151
6152 mode = GET_MODE (old);
6153 if (mode == VOIDmode)
6154 mode = rl->inmode;
7609e720 6155
367b1cf5
BS
6156#ifdef SECONDARY_INPUT_RELOAD_CLASS
6157 /* If we need a secondary register for this operation, see if
6158 the value is already in a register in that class. Don't
6159 do this if the secondary register will be used as a scratch
6160 register. */
6161
6162 if (rl->secondary_in_reload >= 0
6163 && rl->secondary_in_icode == CODE_FOR_nothing
6164 && optimize)
6165 oldequiv
6166 = find_equiv_reg (old, insn,
6167 rld[rl->secondary_in_reload].class,
9714cf43 6168 -1, NULL, 0, mode);
367b1cf5 6169#endif
e6e52be0 6170
367b1cf5
BS
6171 /* If reloading from memory, see if there is a register
6172 that already holds the same value. If so, reload from there.
6173 We can pass 0 as the reload_reg_p argument because
6174 any other reload has either already been emitted,
6175 in which case find_equiv_reg will see the reload-insn,
6176 or has yet to be emitted, in which case it doesn't matter
6177 because we will use this equiv reg right away. */
6178
6179 if (oldequiv == 0 && optimize
3c0cb5de 6180 && (MEM_P (old)
f8cfc6aa 6181 || (REG_P (old)
367b1cf5
BS
6182 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6183 && reg_renumber[REGNO (old)] < 0)))
9714cf43 6184 oldequiv = find_equiv_reg (old, insn, ALL_REGS, -1, NULL, 0, mode);
367b1cf5
BS
6185
6186 if (oldequiv)
6187 {
770ae6cc 6188 unsigned int regno = true_regnum (oldequiv);
367b1cf5
BS
6189
6190 /* Don't use OLDEQUIV if any other reload changes it at an
6191 earlier stage of this insn or at this stage. */
c02cad8f
BS
6192 if (! free_for_value_p (regno, rl->mode, rl->opnum, rl->when_needed,
6193 rl->in, const0_rtx, j, 0))
367b1cf5
BS
6194 oldequiv = 0;
6195
6196 /* If it is no cheaper to copy from OLDEQUIV into the
6197 reload register than it would be to move from memory,
6198 don't use it. Likewise, if we need a secondary register
6d2f8887 6199 or memory. */
367b1cf5
BS
6200
6201 if (oldequiv != 0
fc555370 6202 && (((enum reg_class) REGNO_REG_CLASS (regno) != rl->class
e56b4594 6203 && (REGISTER_MOVE_COST (mode, REGNO_REG_CLASS (regno),
367b1cf5
BS
6204 rl->class)
6205 >= MEMORY_MOVE_COST (mode, rl->class, 1)))
6206#ifdef SECONDARY_INPUT_RELOAD_CLASS
6207 || (SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6208 mode, oldequiv)
6209 != NO_REGS)
6210#endif
6211#ifdef SECONDARY_MEMORY_NEEDED
6212 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6213 rl->class,
6214 mode)
6215#endif
6216 ))
6217 oldequiv = 0;
6218 }
32131a9c 6219
367b1cf5
BS
6220 /* delete_output_reload is only invoked properly if old contains
6221 the original pseudo register. Since this is replaced with a
6222 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6223 find the pseudo in RELOAD_IN_REG. */
6224 if (oldequiv == 0
6225 && reload_override_in[j]
f8cfc6aa 6226 && REG_P (rl->in_reg))
367b1cf5
BS
6227 {
6228 oldequiv = old;
6229 old = rl->in_reg;
6230 }
6231 if (oldequiv == 0)
6232 oldequiv = old;
f8cfc6aa 6233 else if (REG_P (oldequiv))
367b1cf5
BS
6234 oldequiv_reg = oldequiv;
6235 else if (GET_CODE (oldequiv) == SUBREG)
6236 oldequiv_reg = SUBREG_REG (oldequiv);
6237
6238 /* If we are reloading from a register that was recently stored in
6239 with an output-reload, see if we can prove there was
6240 actually no need to store the old value in it. */
6241
f8cfc6aa 6242 if (optimize && REG_P (oldequiv)
367b1cf5
BS
6243 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6244 && spill_reg_store[REGNO (oldequiv)]
f8cfc6aa 6245 && REG_P (old)
367b1cf5
BS
6246 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6247 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6248 rl->out_reg)))
6249 delete_output_reload (insn, j, REGNO (oldequiv));
6250
6251 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6252 then load RELOADREG from OLDEQUIV. Note that we cannot use
6253 gen_lowpart_common since it can do the wrong thing when
6254 RELOADREG has a multi-word mode. Note that RELOADREG
6255 must always be a REG here. */
6256
6257 if (GET_MODE (reloadreg) != mode)
f12448c8 6258 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
367b1cf5
BS
6259 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6260 oldequiv = SUBREG_REG (oldequiv);
6261 if (GET_MODE (oldequiv) != VOIDmode
6262 && mode != GET_MODE (oldequiv))
ddef6bc7 6263 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
367b1cf5
BS
6264
6265 /* Switch to the right place to emit the reload insns. */
6266 switch (rl->when_needed)
6267 {
6268 case RELOAD_OTHER:
6269 where = &other_input_reload_insns;
6270 break;
6271 case RELOAD_FOR_INPUT:
6272 where = &input_reload_insns[rl->opnum];
6273 break;
6274 case RELOAD_FOR_INPUT_ADDRESS:
6275 where = &input_address_reload_insns[rl->opnum];
6276 break;
6277 case RELOAD_FOR_INPADDR_ADDRESS:
6278 where = &inpaddr_address_reload_insns[rl->opnum];
6279 break;
6280 case RELOAD_FOR_OUTPUT_ADDRESS:
6281 where = &output_address_reload_insns[rl->opnum];
6282 break;
6283 case RELOAD_FOR_OUTADDR_ADDRESS:
6284 where = &outaddr_address_reload_insns[rl->opnum];
6285 break;
6286 case RELOAD_FOR_OPERAND_ADDRESS:
6287 where = &operand_reload_insns;
6288 break;
6289 case RELOAD_FOR_OPADDR_ADDR:
6290 where = &other_operand_reload_insns;
6291 break;
6292 case RELOAD_FOR_OTHER_ADDRESS:
6293 where = &other_input_address_reload_insns;
6294 break;
6295 default:
6296 abort ();
6297 }
546b63fb 6298
367b1cf5 6299 push_to_sequence (*where);
32131a9c 6300
367b1cf5
BS
6301 /* Auto-increment addresses must be reloaded in a special way. */
6302 if (rl->out && ! rl->out_reg)
32131a9c 6303 {
367b1cf5
BS
6304 /* We are not going to bother supporting the case where a
6305 incremented register can't be copied directly from
6306 OLDEQUIV since this seems highly unlikely. */
6307 if (rl->secondary_in_reload >= 0)
6308 abort ();
32131a9c 6309
367b1cf5
BS
6310 if (reload_inherited[j])
6311 oldequiv = reloadreg;
cb2afeb3 6312
367b1cf5 6313 old = XEXP (rl->in_reg, 0);
32131a9c 6314
f8cfc6aa 6315 if (optimize && REG_P (oldequiv)
367b1cf5
BS
6316 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6317 && spill_reg_store[REGNO (oldequiv)]
f8cfc6aa 6318 && REG_P (old)
367b1cf5
BS
6319 && (dead_or_set_p (insn,
6320 spill_reg_stored_to[REGNO (oldequiv)])
6321 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6322 old)))
6323 delete_output_reload (insn, j, REGNO (oldequiv));
6324
6325 /* Prevent normal processing of this reload. */
6326 special = 1;
6327 /* Output a special code sequence for this case. */
6328 new_spill_reg_store[REGNO (reloadreg)]
6329 = inc_for_reload (reloadreg, oldequiv, rl->out,
6330 rl->inc);
6331 }
32131a9c 6332
367b1cf5
BS
6333 /* If we are reloading a pseudo-register that was set by the previous
6334 insn, see if we can get rid of that pseudo-register entirely
6335 by redirecting the previous insn into our reload register. */
6336
f8cfc6aa 6337 else if (optimize && REG_P (old)
367b1cf5
BS
6338 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6339 && dead_or_set_p (insn, old)
6340 /* This is unsafe if some other reload
6341 uses the same reg first. */
ff6534ad 6342 && ! conflicts_with_override (reloadreg)
c02cad8f
BS
6343 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6344 rl->when_needed, old, rl->out, j, 0))
367b1cf5
BS
6345 {
6346 rtx temp = PREV_INSN (insn);
4b4bf941 6347 while (temp && NOTE_P (temp))
367b1cf5
BS
6348 temp = PREV_INSN (temp);
6349 if (temp
4b4bf941 6350 && NONJUMP_INSN_P (temp)
367b1cf5
BS
6351 && GET_CODE (PATTERN (temp)) == SET
6352 && SET_DEST (PATTERN (temp)) == old
6353 /* Make sure we can access insn_operand_constraint. */
6354 && asm_noperands (PATTERN (temp)) < 0
367b1cf5
BS
6355 /* This is unsafe if operand occurs more than once in current
6356 insn. Perhaps some occurrences aren't reloaded. */
10d1bb36 6357 && count_occurrences (PATTERN (insn), old, 0) == 1)
367b1cf5 6358 {
10d1bb36 6359 rtx old = SET_DEST (PATTERN (temp));
367b1cf5
BS
6360 /* Store into the reload register instead of the pseudo. */
6361 SET_DEST (PATTERN (temp)) = reloadreg;
6362
10d1bb36
JH
6363 /* Verify that resulting insn is valid. */
6364 extract_insn (temp);
6365 if (constrain_operands (1))
32131a9c 6366 {
10d1bb36
JH
6367 /* If the previous insn is an output reload, the source is
6368 a reload register, and its spill_reg_store entry will
6369 contain the previous destination. This is now
6370 invalid. */
f8cfc6aa 6371 if (REG_P (SET_SRC (PATTERN (temp)))
10d1bb36
JH
6372 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6373 {
6374 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6375 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6376 }
32131a9c 6377
10d1bb36
JH
6378 /* If these are the only uses of the pseudo reg,
6379 pretend for GDB it lives in the reload reg we used. */
6380 if (REG_N_DEATHS (REGNO (old)) == 1
6381 && REG_N_SETS (REGNO (old)) == 1)
6382 {
6383 reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6384 alter_reg (REGNO (old), -1);
6385 }
6386 special = 1;
6387 }
6388 else
cb2afeb3 6389 {
10d1bb36 6390 SET_DEST (PATTERN (temp)) = old;
32131a9c 6391 }
367b1cf5
BS
6392 }
6393 }
32131a9c 6394
367b1cf5 6395 /* We can't do that, so output an insn to load RELOADREG. */
32131a9c 6396
367b1cf5
BS
6397#ifdef SECONDARY_INPUT_RELOAD_CLASS
6398 /* If we have a secondary reload, pick up the secondary register
6399 and icode, if any. If OLDEQUIV and OLD are different or
6400 if this is an in-out reload, recompute whether or not we
6401 still need a secondary register and what the icode should
6402 be. If we still need a secondary register and the class or
6403 icode is different, go back to reloading from OLD if using
6404 OLDEQUIV means that we got the wrong type of register. We
6405 cannot have different class or icode due to an in-out reload
6406 because we don't make such reloads when both the input and
6407 output need secondary reload registers. */
6408
07875628 6409 if (! special && rl->secondary_in_reload >= 0)
367b1cf5
BS
6410 {
6411 rtx second_reload_reg = 0;
6412 int secondary_reload = rl->secondary_in_reload;
6413 rtx real_oldequiv = oldequiv;
6414 rtx real_old = old;
6415 rtx tmp;
6416 enum insn_code icode;
6417
6418 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6419 and similarly for OLD.
6420 See comments in get_secondary_reload in reload.c. */
6421 /* If it is a pseudo that cannot be replaced with its
6422 equivalent MEM, we must fall back to reload_in, which
6423 will have all the necessary substitutions registered.
6424 Likewise for a pseudo that can't be replaced with its
6425 equivalent constant.
6426
6427 Take extra care for subregs of such pseudos. Note that
6428 we cannot use reg_equiv_mem in this case because it is
6429 not in the right mode. */
6430
6431 tmp = oldequiv;
6432 if (GET_CODE (tmp) == SUBREG)
6433 tmp = SUBREG_REG (tmp);
f8cfc6aa 6434 if (REG_P (tmp)
367b1cf5
BS
6435 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6436 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6437 || reg_equiv_constant[REGNO (tmp)] != 0))
6438 {
6439 if (! reg_equiv_mem[REGNO (tmp)]
6440 || num_not_at_initial_offset
6441 || GET_CODE (oldequiv) == SUBREG)
6442 real_oldequiv = rl->in;
6443 else
6444 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6445 }
32131a9c 6446
367b1cf5
BS
6447 tmp = old;
6448 if (GET_CODE (tmp) == SUBREG)
6449 tmp = SUBREG_REG (tmp);
f8cfc6aa 6450 if (REG_P (tmp)
367b1cf5
BS
6451 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6452 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6453 || reg_equiv_constant[REGNO (tmp)] != 0))
6454 {
6455 if (! reg_equiv_mem[REGNO (tmp)]
6456 || num_not_at_initial_offset
6457 || GET_CODE (old) == SUBREG)
6458 real_old = rl->in;
6459 else
6460 real_old = reg_equiv_mem[REGNO (tmp)];
6461 }
6462
6463 second_reload_reg = rld[secondary_reload].reg_rtx;
6464 icode = rl->secondary_in_icode;
6465
6466 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6467 || (rl->in != 0 && rl->out != 0))
6468 {
6469 enum reg_class new_class
6470 = SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6471 mode, real_oldequiv);
6472
6473 if (new_class == NO_REGS)
6474 second_reload_reg = 0;
6475 else
32131a9c 6476 {
367b1cf5
BS
6477 enum insn_code new_icode;
6478 enum machine_mode new_mode;
6479
6480 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6481 REGNO (second_reload_reg)))
6482 oldequiv = old, real_oldequiv = real_old;
6483 else
32131a9c 6484 {
367b1cf5
BS
6485 new_icode = reload_in_optab[(int) mode];
6486 if (new_icode != CODE_FOR_nothing
6487 && ((insn_data[(int) new_icode].operand[0].predicate
6488 && ! ((*insn_data[(int) new_icode].operand[0].predicate)
6489 (reloadreg, mode)))
6490 || (insn_data[(int) new_icode].operand[1].predicate
6491 && ! ((*insn_data[(int) new_icode].operand[1].predicate)
6492 (real_oldequiv, mode)))))
6493 new_icode = CODE_FOR_nothing;
6494
6495 if (new_icode == CODE_FOR_nothing)
6496 new_mode = mode;
6497 else
6498 new_mode = insn_data[(int) new_icode].operand[2].mode;
d30e8ef0 6499
367b1cf5 6500 if (GET_MODE (second_reload_reg) != new_mode)
32131a9c 6501 {
367b1cf5
BS
6502 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6503 new_mode))
6504 oldequiv = old, real_oldequiv = real_old;
6505 else
6506 second_reload_reg
f12448c8
AO
6507 = reload_adjust_reg_for_mode (second_reload_reg,
6508 new_mode);
32131a9c 6509 }
32131a9c
RK
6510 }
6511 }
367b1cf5 6512 }
32131a9c 6513
367b1cf5
BS
6514 /* If we still need a secondary reload register, check
6515 to see if it is being used as a scratch or intermediate
6516 register and generate code appropriately. If we need
6517 a scratch register, use REAL_OLDEQUIV since the form of
6518 the insn may depend on the actual address if it is
6519 a MEM. */
546b63fb 6520
367b1cf5
BS
6521 if (second_reload_reg)
6522 {
6523 if (icode != CODE_FOR_nothing)
32131a9c 6524 {
367b1cf5
BS
6525 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6526 second_reload_reg));
07875628 6527 special = 1;
367b1cf5
BS
6528 }
6529 else
6530 {
6531 /* See if we need a scratch register to load the
6532 intermediate register (a tertiary reload). */
6533 enum insn_code tertiary_icode
6534 = rld[secondary_reload].secondary_in_icode;
1554c2c6 6535
367b1cf5
BS
6536 if (tertiary_icode != CODE_FOR_nothing)
6537 {
6538 rtx third_reload_reg
6539 = rld[rld[secondary_reload].secondary_in_reload].reg_rtx;
1554c2c6 6540
367b1cf5
BS
6541 emit_insn ((GEN_FCN (tertiary_icode)
6542 (second_reload_reg, real_oldequiv,
6543 third_reload_reg)));
6544 }
6545 else
6546 gen_reload (second_reload_reg, real_oldequiv,
6547 rl->opnum,
6548 rl->when_needed);
32131a9c 6549
367b1cf5
BS
6550 oldequiv = second_reload_reg;
6551 }
6552 }
6553 }
6554#endif
32131a9c 6555
07875628 6556 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
367b1cf5
BS
6557 {
6558 rtx real_oldequiv = oldequiv;
6559
f8cfc6aa 6560 if ((REG_P (oldequiv)
367b1cf5
BS
6561 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6562 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6563 || reg_equiv_constant[REGNO (oldequiv)] != 0))
6564 || (GET_CODE (oldequiv) == SUBREG
f8cfc6aa 6565 && REG_P (SUBREG_REG (oldequiv))
367b1cf5
BS
6566 && (REGNO (SUBREG_REG (oldequiv))
6567 >= FIRST_PSEUDO_REGISTER)
6568 && ((reg_equiv_memory_loc
6569 [REGNO (SUBREG_REG (oldequiv))] != 0)
6570 || (reg_equiv_constant
716120a7
JJ
6571 [REGNO (SUBREG_REG (oldequiv))] != 0)))
6572 || (CONSTANT_P (oldequiv)
019d2e99
AS
6573 && (PREFERRED_RELOAD_CLASS (oldequiv,
6574 REGNO_REG_CLASS (REGNO (reloadreg)))
6575 == NO_REGS)))
367b1cf5
BS
6576 real_oldequiv = rl->in;
6577 gen_reload (reloadreg, real_oldequiv, rl->opnum,
6578 rl->when_needed);
6579 }
32131a9c 6580
94bd63e5
AH
6581 if (flag_non_call_exceptions)
6582 copy_eh_notes (insn, get_insns ());
6583
367b1cf5
BS
6584 /* End this sequence. */
6585 *where = get_insns ();
6586 end_sequence ();
a6a2274a 6587
367b1cf5
BS
6588 /* Update reload_override_in so that delete_address_reloads_1
6589 can see the actual register usage. */
6590 if (oldequiv_reg)
6591 reload_override_in[j] = oldequiv;
6592}
32131a9c 6593
367b1cf5
BS
6594/* Generate insns to for the output reload RL, which is for the insn described
6595 by CHAIN and has the number J. */
6596static void
0c20a65f
AJ
6597emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
6598 int j)
367b1cf5
BS
6599{
6600 rtx reloadreg = rl->reg_rtx;
6601 rtx insn = chain->insn;
6602 int special = 0;
6603 rtx old = rl->out;
6604 enum machine_mode mode = GET_MODE (old);
6605 rtx p;
32131a9c 6606
367b1cf5
BS
6607 if (rl->when_needed == RELOAD_OTHER)
6608 start_sequence ();
6609 else
6610 push_to_sequence (output_reload_insns[rl->opnum]);
32131a9c 6611
367b1cf5
BS
6612 /* Determine the mode to reload in.
6613 See comments above (for input reloading). */
32131a9c 6614
367b1cf5
BS
6615 if (mode == VOIDmode)
6616 {
6617 /* VOIDmode should never happen for an output. */
6618 if (asm_noperands (PATTERN (insn)) < 0)
6619 /* It's the compiler's fault. */
6620 fatal_insn ("VOIDmode on an output", insn);
6621 error_for_asm (insn, "output operand is constant in `asm'");
6622 /* Prevent crash--use something we know is valid. */
6623 mode = word_mode;
6624 old = gen_rtx_REG (mode, REGNO (reloadreg));
6625 }
546b63fb 6626
367b1cf5 6627 if (GET_MODE (reloadreg) != mode)
f12448c8 6628 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
32131a9c 6629
367b1cf5 6630#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
32131a9c 6631
367b1cf5
BS
6632 /* If we need two reload regs, set RELOADREG to the intermediate
6633 one, since it will be stored into OLD. We might need a secondary
6634 register only for an input reload, so check again here. */
32131a9c 6635
367b1cf5
BS
6636 if (rl->secondary_out_reload >= 0)
6637 {
6638 rtx real_old = old;
cb2afeb3 6639
f8cfc6aa 6640 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
367b1cf5
BS
6641 && reg_equiv_mem[REGNO (old)] != 0)
6642 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6643
367b1cf5
BS
6644 if ((SECONDARY_OUTPUT_RELOAD_CLASS (rl->class,
6645 mode, real_old)
6646 != NO_REGS))
b60a8416 6647 {
367b1cf5
BS
6648 rtx second_reloadreg = reloadreg;
6649 reloadreg = rld[rl->secondary_out_reload].reg_rtx;
32131a9c 6650
367b1cf5
BS
6651 /* See if RELOADREG is to be used as a scratch register
6652 or as an intermediate register. */
6653 if (rl->secondary_out_icode != CODE_FOR_nothing)
6654 {
6655 emit_insn ((GEN_FCN (rl->secondary_out_icode)
6656 (real_old, second_reloadreg, reloadreg)));
6657 special = 1;
6658 }
6659 else
6660 {
6661 /* See if we need both a scratch and intermediate reload
6662 register. */
32131a9c 6663
367b1cf5
BS
6664 int secondary_reload = rl->secondary_out_reload;
6665 enum insn_code tertiary_icode
6666 = rld[secondary_reload].secondary_out_icode;
32131a9c 6667
367b1cf5 6668 if (GET_MODE (reloadreg) != mode)
f12448c8 6669 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
cb2afeb3 6670
367b1cf5
BS
6671 if (tertiary_icode != CODE_FOR_nothing)
6672 {
6673 rtx third_reloadreg
6674 = rld[rld[secondary_reload].secondary_out_reload].reg_rtx;
6675 rtx tem;
6676
6677 /* Copy primary reload reg to secondary reload reg.
6678 (Note that these have been swapped above, then
78adc5a0 6679 secondary reload reg to OLD using our insn.) */
367b1cf5
BS
6680
6681 /* If REAL_OLD is a paradoxical SUBREG, remove it
6682 and try to put the opposite SUBREG on
6683 RELOADREG. */
6684 if (GET_CODE (real_old) == SUBREG
6685 && (GET_MODE_SIZE (GET_MODE (real_old))
6686 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6687 && 0 != (tem = gen_lowpart_common
6688 (GET_MODE (SUBREG_REG (real_old)),
6689 reloadreg)))
6690 real_old = SUBREG_REG (real_old), reloadreg = tem;
6691
6692 gen_reload (reloadreg, second_reloadreg,
6693 rl->opnum, rl->when_needed);
6694 emit_insn ((GEN_FCN (tertiary_icode)
6695 (real_old, reloadreg, third_reloadreg)));
6696 special = 1;
6697 }
05d10675 6698
367b1cf5
BS
6699 else
6700 /* Copy between the reload regs here and then to
6701 OUT later. */
cb2afeb3 6702
367b1cf5
BS
6703 gen_reload (reloadreg, second_reloadreg,
6704 rl->opnum, rl->when_needed);
a7911cd2 6705 }
367b1cf5
BS
6706 }
6707 }
32131a9c
RK
6708#endif
6709
367b1cf5
BS
6710 /* Output the last reload insn. */
6711 if (! special)
6712 {
6713 rtx set;
6714
6715 /* Don't output the last reload if OLD is not the dest of
1d7254c5 6716 INSN and is in the src and is clobbered by INSN. */
367b1cf5 6717 if (! flag_expensive_optimizations
f8cfc6aa 6718 || !REG_P (old)
367b1cf5
BS
6719 || !(set = single_set (insn))
6720 || rtx_equal_p (old, SET_DEST (set))
6721 || !reg_mentioned_p (old, SET_SRC (set))
9532e31f 6722 || !regno_clobbered_p (REGNO (old), insn, rl->mode, 0))
367b1cf5
BS
6723 gen_reload (old, reloadreg, rl->opnum,
6724 rl->when_needed);
6725 }
32131a9c 6726
367b1cf5
BS
6727 /* Look at all insns we emitted, just to be safe. */
6728 for (p = get_insns (); p; p = NEXT_INSN (p))
2c3c49de 6729 if (INSN_P (p))
367b1cf5
BS
6730 {
6731 rtx pat = PATTERN (p);
546b63fb 6732
367b1cf5
BS
6733 /* If this output reload doesn't come from a spill reg,
6734 clear any memory of reloaded copies of the pseudo reg.
6735 If this output reload comes from a spill reg,
6736 reg_has_output_reload will make this do nothing. */
6737 note_stores (pat, forget_old_reloads_1, NULL);
cb2afeb3 6738
367b1cf5
BS
6739 if (reg_mentioned_p (rl->reg_rtx, pat))
6740 {
6741 rtx set = single_set (insn);
6742 if (reload_spill_index[j] < 0
6743 && set
6744 && SET_SRC (set) == rl->reg_rtx)
6745 {
6746 int src = REGNO (SET_SRC (set));
32131a9c 6747
367b1cf5
BS
6748 reload_spill_index[j] = src;
6749 SET_HARD_REG_BIT (reg_is_output_reload, src);
6750 if (find_regno_note (insn, REG_DEAD, src))
6751 SET_HARD_REG_BIT (reg_reloaded_died, src);
6752 }
6753 if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
6754 {
6755 int s = rl->secondary_out_reload;
6756 set = single_set (p);
6757 /* If this reload copies only to the secondary reload
6758 register, the secondary reload does the actual
6759 store. */
6760 if (s >= 0 && set == NULL_RTX)
1d7254c5
KH
6761 /* We can't tell what function the secondary reload
6762 has and where the actual store to the pseudo is
6763 made; leave new_spill_reg_store alone. */
6764 ;
367b1cf5
BS
6765 else if (s >= 0
6766 && SET_SRC (set) == rl->reg_rtx
6767 && SET_DEST (set) == rld[s].reg_rtx)
6768 {
6769 /* Usually the next instruction will be the
6770 secondary reload insn; if we can confirm
6771 that it is, setting new_spill_reg_store to
6772 that insn will allow an extra optimization. */
6773 rtx s_reg = rld[s].reg_rtx;
6774 rtx next = NEXT_INSN (p);
6775 rld[s].out = rl->out;
6776 rld[s].out_reg = rl->out_reg;
6777 set = single_set (next);
6778 if (set && SET_SRC (set) == s_reg
6779 && ! new_spill_reg_store[REGNO (s_reg)])
6780 {
6781 SET_HARD_REG_BIT (reg_is_output_reload,
6782 REGNO (s_reg));
6783 new_spill_reg_store[REGNO (s_reg)] = next;
6784 }
6785 }
6786 else
6787 new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
6788 }
6789 }
6790 }
32131a9c 6791
367b1cf5
BS
6792 if (rl->when_needed == RELOAD_OTHER)
6793 {
2f937369 6794 emit_insn (other_output_reload_insns[rl->opnum]);
367b1cf5
BS
6795 other_output_reload_insns[rl->opnum] = get_insns ();
6796 }
6797 else
6798 output_reload_insns[rl->opnum] = get_insns ();
32131a9c 6799
94bd63e5
AH
6800 if (flag_non_call_exceptions)
6801 copy_eh_notes (insn, get_insns ());
6802
1d7254c5 6803 end_sequence ();
367b1cf5 6804}
32131a9c 6805
367b1cf5
BS
6806/* Do input reloading for reload RL, which is for the insn described by CHAIN
6807 and has the number J. */
6808static void
0c20a65f 6809do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
367b1cf5 6810{
367b1cf5 6811 rtx insn = chain->insn;
3c0cb5de 6812 rtx old = (rl->in && MEM_P (rl->in)
367b1cf5
BS
6813 ? rl->in_reg : rl->in);
6814
6815 if (old != 0
6816 /* AUTO_INC reloads need to be handled even if inherited. We got an
6817 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6818 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
6819 && ! rtx_equal_p (rl->reg_rtx, old)
6820 && rl->reg_rtx != 0)
1d813780 6821 emit_input_reload_insns (chain, rld + j, old, j);
32131a9c 6822
367b1cf5
BS
6823 /* When inheriting a wider reload, we have a MEM in rl->in,
6824 e.g. inheriting a SImode output reload for
6825 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6826 if (optimize && reload_inherited[j] && rl->in
3c0cb5de
JQ
6827 && MEM_P (rl->in)
6828 && MEM_P (rl->in_reg)
367b1cf5
BS
6829 && reload_spill_index[j] >= 0
6830 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
4977bab6 6831 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
32131a9c 6832
367b1cf5
BS
6833 /* If we are reloading a register that was recently stored in with an
6834 output-reload, see if we can prove there was
6835 actually no need to store the old value in it. */
32131a9c 6836
367b1cf5
BS
6837 if (optimize
6838 && (reload_inherited[j] || reload_override_in[j])
6839 && rl->reg_rtx
f8cfc6aa 6840 && REG_P (rl->reg_rtx)
367b1cf5
BS
6841 && spill_reg_store[REGNO (rl->reg_rtx)] != 0
6842#if 0
6843 /* There doesn't seem to be any reason to restrict this to pseudos
6844 and doing so loses in the case where we are copying from a
6845 register of the wrong class. */
6846 && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
6847 >= FIRST_PSEUDO_REGISTER)
6848#endif
6849 /* The insn might have already some references to stackslots
6850 replaced by MEMs, while reload_out_reg still names the
6851 original pseudo. */
6852 && (dead_or_set_p (insn,
6853 spill_reg_stored_to[REGNO (rl->reg_rtx)])
6854 || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
6855 rl->out_reg)))
6856 delete_output_reload (insn, j, REGNO (rl->reg_rtx));
6857}
32131a9c 6858
367b1cf5
BS
6859/* Do output reloading for reload RL, which is for the insn described by
6860 CHAIN and has the number J.
6861 ??? At some point we need to support handling output reloads of
6862 JUMP_INSNs or insns that set cc0. */
6863static void
0c20a65f 6864do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
367b1cf5
BS
6865{
6866 rtx note, old;
6867 rtx insn = chain->insn;
6868 /* If this is an output reload that stores something that is
6869 not loaded in this same reload, see if we can eliminate a previous
6870 store. */
6871 rtx pseudo = rl->out_reg;
6872
6873 if (pseudo
159d5964 6874 && optimize
f8cfc6aa 6875 && REG_P (pseudo)
367b1cf5
BS
6876 && ! rtx_equal_p (rl->in_reg, pseudo)
6877 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
6878 && reg_last_reload_reg[REGNO (pseudo)])
6879 {
6880 int pseudo_no = REGNO (pseudo);
6881 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
6882
6883 /* We don't need to test full validity of last_regno for
6884 inherit here; we only want to know if the store actually
6885 matches the pseudo. */
60ef417d
GK
6886 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
6887 && reg_reloaded_contents[last_regno] == pseudo_no
367b1cf5
BS
6888 && spill_reg_store[last_regno]
6889 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
6890 delete_output_reload (insn, j, last_regno);
6891 }
5e03c156 6892
367b1cf5
BS
6893 old = rl->out_reg;
6894 if (old == 0
6895 || rl->reg_rtx == old
6896 || rl->reg_rtx == 0)
6897 return;
32131a9c 6898
367b1cf5
BS
6899 /* An output operand that dies right away does need a reload,
6900 but need not be copied from it. Show the new location in the
6901 REG_UNUSED note. */
f8cfc6aa 6902 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
367b1cf5
BS
6903 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6904 {
6905 XEXP (note, 0) = rl->reg_rtx;
6906 return;
6907 }
6908 /* Likewise for a SUBREG of an operand that dies. */
6909 else if (GET_CODE (old) == SUBREG
f8cfc6aa 6910 && REG_P (SUBREG_REG (old))
367b1cf5
BS
6911 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6912 SUBREG_REG (old))))
6913 {
6914 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6915 rl->reg_rtx);
6916 return;
6917 }
6918 else if (GET_CODE (old) == SCRATCH)
6919 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6920 but we don't want to make an output reload. */
6921 return;
1554c2c6 6922
367b1cf5 6923 /* If is a JUMP_INSN, we can't support output reloads yet. */
4b4bf941 6924 if (JUMP_P (insn))
367b1cf5 6925 abort ();
5e03c156 6926
367b1cf5
BS
6927 emit_output_reload_insns (chain, rld + j, j);
6928}
1554c2c6 6929
b5ba341f
RS
6930/* Reload number R reloads from or to a group of hard registers starting at
6931 register REGNO. Return true if it can be treated for inheritance purposes
6932 like a group of reloads, each one reloading a single hard register.
6933 The caller has already checked that the spill register and REGNO use
6934 the same number of registers to store the reload value. */
6935
6936static bool
cf9c6ca5 6937inherit_piecemeal_p (int r ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED)
b5ba341f
RS
6938{
6939#ifdef CANNOT_CHANGE_MODE_CLASS
6940 return (!REG_CANNOT_CHANGE_MODE_P (reload_spill_index[r],
6941 GET_MODE (rld[r].reg_rtx),
6942 reg_raw_mode[reload_spill_index[r]])
6943 && !REG_CANNOT_CHANGE_MODE_P (regno,
6944 GET_MODE (rld[r].reg_rtx),
6945 reg_raw_mode[regno]));
6946#else
6947 return true;
6948#endif
6949}
6950
367b1cf5 6951/* Output insns to reload values in and out of the chosen reload regs. */
32131a9c 6952
367b1cf5 6953static void
0c20a65f 6954emit_reload_insns (struct insn_chain *chain)
367b1cf5
BS
6955{
6956 rtx insn = chain->insn;
32131a9c 6957
b3694847 6958 int j;
e6e52be0 6959
367b1cf5 6960 CLEAR_HARD_REG_SET (reg_reloaded_died);
e6e52be0 6961
367b1cf5
BS
6962 for (j = 0; j < reload_n_operands; j++)
6963 input_reload_insns[j] = input_address_reload_insns[j]
6964 = inpaddr_address_reload_insns[j]
6965 = output_reload_insns[j] = output_address_reload_insns[j]
6966 = outaddr_address_reload_insns[j]
6967 = other_output_reload_insns[j] = 0;
6968 other_input_address_reload_insns = 0;
6969 other_input_reload_insns = 0;
6970 operand_reload_insns = 0;
6971 other_operand_reload_insns = 0;
32131a9c 6972
850aac53 6973 /* Dump reloads into the dump file. */
c263766c 6974 if (dump_file)
850aac53 6975 {
c263766c
RH
6976 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
6977 debug_reload_to_stream (dump_file);
850aac53
JL
6978 }
6979
367b1cf5
BS
6980 /* Now output the instructions to copy the data into and out of the
6981 reload registers. Do these in the order that the reloads were reported,
6982 since reloads of base and index registers precede reloads of operands
6983 and the operands may need the base and index registers reloaded. */
32131a9c 6984
367b1cf5
BS
6985 for (j = 0; j < n_reloads; j++)
6986 {
6987 if (rld[j].reg_rtx
6988 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
6989 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
d7e0324f 6990
367b1cf5
BS
6991 do_input_reload (chain, rld + j, j);
6992 do_output_reload (chain, rld + j, j);
32131a9c
RK
6993 }
6994
546b63fb
RK
6995 /* Now write all the insns we made for reloads in the order expected by
6996 the allocation functions. Prior to the insn being reloaded, we write
6997 the following reloads:
6998
6999 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7000
2edc8d65 7001 RELOAD_OTHER reloads.
546b63fb 7002
47c8cf91
ILT
7003 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7004 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7005 RELOAD_FOR_INPUT reload for the operand.
546b63fb 7006
893bc853
RK
7007 RELOAD_FOR_OPADDR_ADDRS reloads.
7008
546b63fb
RK
7009 RELOAD_FOR_OPERAND_ADDRESS reloads.
7010
7011 After the insn being reloaded, we write the following:
7012
47c8cf91
ILT
7013 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7014 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7015 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7016 reloads for the operand. The RELOAD_OTHER output reloads are
7017 output in descending order by reload number. */
546b63fb 7018
dd3adcf8
DJ
7019 emit_insn_before_sameloc (other_input_address_reload_insns, insn);
7020 emit_insn_before_sameloc (other_input_reload_insns, insn);
546b63fb
RK
7021
7022 for (j = 0; j < reload_n_operands; j++)
7023 {
dd3adcf8
DJ
7024 emit_insn_before_sameloc (inpaddr_address_reload_insns[j], insn);
7025 emit_insn_before_sameloc (input_address_reload_insns[j], insn);
7026 emit_insn_before_sameloc (input_reload_insns[j], insn);
546b63fb
RK
7027 }
7028
dd3adcf8
DJ
7029 emit_insn_before_sameloc (other_operand_reload_insns, insn);
7030 emit_insn_before_sameloc (operand_reload_insns, insn);
546b63fb
RK
7031
7032 for (j = 0; j < reload_n_operands; j++)
7033 {
dd3adcf8
DJ
7034 rtx x = emit_insn_after_sameloc (outaddr_address_reload_insns[j], insn);
7035 x = emit_insn_after_sameloc (output_address_reload_insns[j], x);
7036 x = emit_insn_after_sameloc (output_reload_insns[j], x);
7037 emit_insn_after_sameloc (other_output_reload_insns[j], x);
546b63fb
RK
7038 }
7039
32131a9c
RK
7040 /* For all the spill regs newly reloaded in this instruction,
7041 record what they were reloaded from, so subsequent instructions
d445b551
RK
7042 can inherit the reloads.
7043
7044 Update spill_reg_store for the reloads of this insn.
e9e79d69 7045 Copy the elements that were updated in the loop above. */
32131a9c
RK
7046
7047 for (j = 0; j < n_reloads; j++)
7048 {
b3694847
SS
7049 int r = reload_order[j];
7050 int i = reload_spill_index[r];
32131a9c 7051
78a2bc08 7052 /* If this is a non-inherited input reload from a pseudo, we must
05d10675
BS
7053 clear any memory of a previous store to the same pseudo. Only do
7054 something if there will not be an output reload for the pseudo
7055 being reloaded. */
eceef4c9 7056 if (rld[r].in_reg != 0
05d10675
BS
7057 && ! (reload_inherited[r] || reload_override_in[r]))
7058 {
eceef4c9 7059 rtx reg = rld[r].in_reg;
78a2bc08 7060
05d10675 7061 if (GET_CODE (reg) == SUBREG)
78a2bc08 7062 reg = SUBREG_REG (reg);
05d10675 7063
f8cfc6aa 7064 if (REG_P (reg)
78a2bc08
R
7065 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7066 && ! reg_has_output_reload[REGNO (reg)])
7067 {
7068 int nregno = REGNO (reg);
7069
7070 if (reg_last_reload_reg[nregno])
05d10675
BS
7071 {
7072 int last_regno = REGNO (reg_last_reload_reg[nregno]);
78a2bc08 7073
05d10675 7074 if (reg_reloaded_contents[last_regno] == nregno)
78a2bc08 7075 spill_reg_store[last_regno] = 0;
05d10675 7076 }
78a2bc08
R
7077 }
7078 }
05d10675 7079
e6e52be0 7080 /* I is nonneg if this reload used a register.
eceef4c9 7081 If rld[r].reg_rtx is 0, this is an optional reload
51f0c3b7 7082 that we opted to ignore. */
d445b551 7083
eceef4c9 7084 if (i >= 0 && rld[r].reg_rtx != 0)
32131a9c 7085 {
66fd46b6 7086 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
32131a9c 7087 int k;
51f0c3b7
JW
7088 int part_reaches_end = 0;
7089 int all_reaches_end = 1;
32131a9c 7090
51f0c3b7
JW
7091 /* For a multi register reload, we need to check if all or part
7092 of the value lives to the end. */
32131a9c
RK
7093 for (k = 0; k < nr; k++)
7094 {
eceef4c9
BS
7095 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7096 rld[r].when_needed))
51f0c3b7
JW
7097 part_reaches_end = 1;
7098 else
7099 all_reaches_end = 0;
32131a9c
RK
7100 }
7101
51f0c3b7
JW
7102 /* Ignore reloads that don't reach the end of the insn in
7103 entirety. */
7104 if (all_reaches_end)
32131a9c 7105 {
51f0c3b7
JW
7106 /* First, clear out memory of what used to be in this spill reg.
7107 If consecutive registers are used, clear them all. */
d08ea79f 7108
32131a9c 7109 for (k = 0; k < nr; k++)
e3e9336f 7110 {
e6e52be0 7111 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
e3e9336f
DJ
7112 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
7113 }
d08ea79f 7114
51f0c3b7 7115 /* Maybe the spill reg contains a copy of reload_out. */
eceef4c9 7116 if (rld[r].out != 0
f8cfc6aa 7117 && (REG_P (rld[r].out)
cb2afeb3 7118#ifdef AUTO_INC_DEC
eceef4c9 7119 || ! rld[r].out_reg
cb2afeb3 7120#endif
f8cfc6aa 7121 || REG_P (rld[r].out_reg)))
51f0c3b7 7122 {
f8cfc6aa 7123 rtx out = (REG_P (rld[r].out)
eceef4c9
BS
7124 ? rld[r].out
7125 : rld[r].out_reg
7126 ? rld[r].out_reg
7127/* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
b3694847 7128 int nregno = REGNO (out);
51f0c3b7 7129 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6
JH
7130 : hard_regno_nregs[nregno]
7131 [GET_MODE (rld[r].reg_rtx)]);
b5ba341f 7132 bool piecemeal;
51f0c3b7
JW
7133
7134 spill_reg_store[i] = new_spill_reg_store[i];
cb2afeb3 7135 spill_reg_stored_to[i] = out;
eceef4c9 7136 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
51f0c3b7 7137
b5ba341f
RS
7138 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7139 && nr == nnr
7140 && inherit_piecemeal_p (r, nregno));
7141
51f0c3b7 7142 /* If NREGNO is a hard register, it may occupy more than
05d10675 7143 one register. If it does, say what is in the
51f0c3b7
JW
7144 rest of the registers assuming that both registers
7145 agree on how many words the object takes. If not,
7146 invalidate the subsequent registers. */
7147
7148 if (nregno < FIRST_PSEUDO_REGISTER)
7149 for (k = 1; k < nnr; k++)
7150 reg_last_reload_reg[nregno + k]
b5ba341f 7151 = (piecemeal
39d31de8 7152 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
51f0c3b7
JW
7153 : 0);
7154
7155 /* Now do the inverse operation. */
7156 for (k = 0; k < nr; k++)
7157 {
e6e52be0
R
7158 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7159 reg_reloaded_contents[i + k]
b5ba341f 7160 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
51f0c3b7
JW
7161 ? nregno
7162 : nregno + k);
e6e52be0
R
7163 reg_reloaded_insn[i + k] = insn;
7164 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
e3e9336f
DJ
7165 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (out)))
7166 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
51f0c3b7
JW
7167 }
7168 }
d08ea79f 7169
51f0c3b7
JW
7170 /* Maybe the spill reg contains a copy of reload_in. Only do
7171 something if there will not be an output reload for
7172 the register being reloaded. */
eceef4c9
BS
7173 else if (rld[r].out_reg == 0
7174 && rld[r].in != 0
f8cfc6aa 7175 && ((REG_P (rld[r].in)
eceef4c9
BS
7176 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7177 && ! reg_has_output_reload[REGNO (rld[r].in)])
f8cfc6aa 7178 || (REG_P (rld[r].in_reg)
eceef4c9
BS
7179 && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
7180 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
51f0c3b7 7181 {
b3694847 7182 int nregno;
51f0c3b7 7183 int nnr;
e3e9336f 7184 rtx in;
b5ba341f 7185 bool piecemeal;
d445b551 7186
f8cfc6aa 7187 if (REG_P (rld[r].in)
eceef4c9 7188 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
e3e9336f 7189 in = rld[r].in;
f8cfc6aa 7190 else if (REG_P (rld[r].in_reg))
e3e9336f 7191 in = rld[r].in_reg;
cb2afeb3 7192 else
e3e9336f
DJ
7193 in = XEXP (rld[r].in_reg, 0);
7194 nregno = REGNO (in);
d08ea79f 7195
51f0c3b7 7196 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
66fd46b6
JH
7197 : hard_regno_nregs[nregno]
7198 [GET_MODE (rld[r].reg_rtx)]);
05d10675 7199
eceef4c9 7200 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
51f0c3b7 7201
b5ba341f
RS
7202 piecemeal = (nregno < FIRST_PSEUDO_REGISTER
7203 && nr == nnr
7204 && inherit_piecemeal_p (r, nregno));
7205
51f0c3b7
JW
7206 if (nregno < FIRST_PSEUDO_REGISTER)
7207 for (k = 1; k < nnr; k++)
7208 reg_last_reload_reg[nregno + k]
b5ba341f 7209 = (piecemeal
39d31de8 7210 ? regno_reg_rtx[REGNO (rld[r].reg_rtx) + k]
51f0c3b7
JW
7211 : 0);
7212
7213 /* Unless we inherited this reload, show we haven't
cb2afeb3
R
7214 recently done a store.
7215 Previous stores of inherited auto_inc expressions
7216 also have to be discarded. */
7217 if (! reload_inherited[r]
eceef4c9 7218 || (rld[r].out && ! rld[r].out_reg))
51f0c3b7
JW
7219 spill_reg_store[i] = 0;
7220
7221 for (k = 0; k < nr; k++)
7222 {
e6e52be0
R
7223 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7224 reg_reloaded_contents[i + k]
b5ba341f 7225 = (nregno >= FIRST_PSEUDO_REGISTER || !piecemeal
51f0c3b7
JW
7226 ? nregno
7227 : nregno + k);
e6e52be0
R
7228 reg_reloaded_insn[i + k] = insn;
7229 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
e3e9336f
DJ
7230 if (HARD_REGNO_CALL_PART_CLOBBERED (i + k, GET_MODE (in)))
7231 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered, i + k);
51f0c3b7
JW
7232 }
7233 }
7234 }
d445b551 7235
51f0c3b7
JW
7236 /* However, if part of the reload reaches the end, then we must
7237 invalidate the old info for the part that survives to the end. */
7238 else if (part_reaches_end)
7239 {
546b63fb 7240 for (k = 0; k < nr; k++)
e6e52be0 7241 if (reload_reg_reaches_end_p (i + k,
eceef4c9
BS
7242 rld[r].opnum,
7243 rld[r].when_needed))
e6e52be0 7244 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
32131a9c
RK
7245 }
7246 }
7247
7248 /* The following if-statement was #if 0'd in 1.34 (or before...).
7249 It's reenabled in 1.35 because supposedly nothing else
7250 deals with this problem. */
7251
7252 /* If a register gets output-reloaded from a non-spill register,
7253 that invalidates any previous reloaded copy of it.
7254 But forget_old_reloads_1 won't get to see it, because
7255 it thinks only about the original insn. So invalidate it here. */
eceef4c9 7256 if (i < 0 && rld[r].out != 0
f8cfc6aa 7257 && (REG_P (rld[r].out)
3c0cb5de 7258 || (MEM_P (rld[r].out)
f8cfc6aa 7259 && REG_P (rld[r].out_reg))))
32131a9c 7260 {
f8cfc6aa 7261 rtx out = (REG_P (rld[r].out)
eceef4c9 7262 ? rld[r].out : rld[r].out_reg);
b3694847 7263 int nregno = REGNO (out);
c7093272 7264 if (nregno >= FIRST_PSEUDO_REGISTER)
cb2afeb3 7265 {
6a651371 7266 rtx src_reg, store_insn = NULL_RTX;
cb2afeb3
R
7267
7268 reg_last_reload_reg[nregno] = 0;
7269
7270 /* If we can find a hard register that is stored, record
7271 the storing insn so that we may delete this insn with
7272 delete_output_reload. */
eceef4c9 7273 src_reg = rld[r].reg_rtx;
cb2afeb3
R
7274
7275 /* If this is an optional reload, try to find the source reg
7276 from an input reload. */
7277 if (! src_reg)
7278 {
7279 rtx set = single_set (insn);
eceef4c9 7280 if (set && SET_DEST (set) == rld[r].out)
cb2afeb3
R
7281 {
7282 int k;
7283
7284 src_reg = SET_SRC (set);
7285 store_insn = insn;
7286 for (k = 0; k < n_reloads; k++)
7287 {
eceef4c9 7288 if (rld[k].in == src_reg)
cb2afeb3 7289 {
eceef4c9 7290 src_reg = rld[k].reg_rtx;
cb2afeb3
R
7291 break;
7292 }
7293 }
7294 }
7295 }
7296 else
7297 store_insn = new_spill_reg_store[REGNO (src_reg)];
f8cfc6aa 7298 if (src_reg && REG_P (src_reg)
cb2afeb3
R
7299 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7300 {
7301 int src_regno = REGNO (src_reg);
66fd46b6 7302 int nr = hard_regno_nregs[src_regno][rld[r].mode];
cb2afeb3
R
7303 /* The place where to find a death note varies with
7304 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7305 necessarily checked exactly in the code that moves
7306 notes, so just check both locations. */
7307 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
1558b970 7308 if (! note && store_insn)
cb2afeb3
R
7309 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7310 while (nr-- > 0)
7311 {
7312 spill_reg_store[src_regno + nr] = store_insn;
7313 spill_reg_stored_to[src_regno + nr] = out;
7314 reg_reloaded_contents[src_regno + nr] = nregno;
7315 reg_reloaded_insn[src_regno + nr] = store_insn;
00f9f1bc 7316 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
cb2afeb3 7317 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
e3e9336f
DJ
7318 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + nr,
7319 GET_MODE (src_reg)))
7320 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
7321 src_regno + nr);
cb2afeb3
R
7322 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7323 if (note)
7324 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7325 else
7326 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7327 }
7328 reg_last_reload_reg[nregno] = src_reg;
9532c14f
UW
7329 /* We have to set reg_has_output_reload here, or else
7330 forget_old_reloads_1 will clear reg_last_reload_reg
7331 right away. */
7332 reg_has_output_reload[nregno] = 1;
cb2afeb3
R
7333 }
7334 }
c7093272
RK
7335 else
7336 {
66fd46b6 7337 int num_regs = hard_regno_nregs[nregno][GET_MODE (rld[r].out)];
36281332 7338
c7093272
RK
7339 while (num_regs-- > 0)
7340 reg_last_reload_reg[nregno + num_regs] = 0;
7341 }
32131a9c
RK
7342 }
7343 }
e6e52be0 7344 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
32131a9c
RK
7345}
7346\f
5e03c156
RK
7347/* Emit code to perform a reload from IN (which may be a reload register) to
7348 OUT (which may also be a reload register). IN or OUT is from operand
05d10675 7349 OPNUM with reload type TYPE.
546b63fb 7350
3c3eeea6 7351 Returns first insn emitted. */
32131a9c
RK
7352
7353rtx
0c20a65f 7354gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
32131a9c 7355{
546b63fb 7356 rtx last = get_last_insn ();
7a5b18b0
RK
7357 rtx tem;
7358
7359 /* If IN is a paradoxical SUBREG, remove it and try to put the
7360 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7361 if (GET_CODE (in) == SUBREG
7362 && (GET_MODE_SIZE (GET_MODE (in))
7363 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7364 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7365 in = SUBREG_REG (in), out = tem;
7366 else if (GET_CODE (out) == SUBREG
eceef4c9
BS
7367 && (GET_MODE_SIZE (GET_MODE (out))
7368 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7369 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7a5b18b0 7370 out = SUBREG_REG (out), in = tem;
32131a9c 7371
a8fdc208 7372 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
7373 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7374 register that didn't get a hard register. In that case we can just
7375 call emit_move_insn.
7376
a7fd196c
JW
7377 We can also be asked to reload a PLUS that adds a register or a MEM to
7378 another register, constant or MEM. This can occur during frame pointer
7379 elimination and while reloading addresses. This case is handled by
7380 trying to emit a single insn to perform the add. If it is not valid,
7381 we use a two insn sequence.
32131a9c
RK
7382
7383 Finally, we could be called to handle an 'o' constraint by putting
7384 an address into a register. In that case, we first try to do this
7385 with a named pattern of "reload_load_address". If no such pattern
7386 exists, we just emit a SET insn and hope for the best (it will normally
7387 be valid on machines that use 'o').
7388
7389 This entire process is made complex because reload will never
7390 process the insns we generate here and so we must ensure that
7391 they will fit their constraints and also by the fact that parts of
7392 IN might be being reloaded separately and replaced with spill registers.
7393 Because of this, we are, in some sense, just guessing the right approach
7394 here. The one listed above seems to work.
7395
7396 ??? At some point, this whole thing needs to be rethought. */
7397
7398 if (GET_CODE (in) == PLUS
f8cfc6aa 7399 && (REG_P (XEXP (in, 0))
5c6b1bd2 7400 || GET_CODE (XEXP (in, 0)) == SUBREG
3c0cb5de 7401 || MEM_P (XEXP (in, 0)))
f8cfc6aa 7402 && (REG_P (XEXP (in, 1))
5c6b1bd2 7403 || GET_CODE (XEXP (in, 1)) == SUBREG
a7fd196c 7404 || CONSTANT_P (XEXP (in, 1))
3c0cb5de 7405 || MEM_P (XEXP (in, 1))))
32131a9c 7406 {
a7fd196c
JW
7407 /* We need to compute the sum of a register or a MEM and another
7408 register, constant, or MEM, and put it into the reload
3002e160
JW
7409 register. The best possible way of doing this is if the machine
7410 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
7411
7412 The simplest approach is to try to generate such an insn and see if it
7413 is recognized and matches its constraints. If so, it can be used.
7414
7415 It might be better not to actually emit the insn unless it is valid,
0009eff2 7416 but we need to pass the insn as an operand to `recog' and
0eadeb15 7417 `extract_insn' and it is simpler to emit and then delete the insn if
0009eff2 7418 not valid than to dummy things up. */
a8fdc208 7419
af929c62 7420 rtx op0, op1, tem, insn;
32131a9c 7421 int code;
a8fdc208 7422
af929c62
RK
7423 op0 = find_replacement (&XEXP (in, 0));
7424 op1 = find_replacement (&XEXP (in, 1));
7425
32131a9c
RK
7426 /* Since constraint checking is strict, commutativity won't be
7427 checked, so we need to do that here to avoid spurious failure
7428 if the add instruction is two-address and the second operand
7429 of the add is the same as the reload reg, which is frequently
7430 the case. If the insn would be A = B + A, rearrange it so
0f41302f 7431 it will be A = A + B as constrain_operands expects. */
a8fdc208 7432
f8cfc6aa 7433 if (REG_P (XEXP (in, 1))
5e03c156 7434 && REGNO (out) == REGNO (XEXP (in, 1)))
af929c62
RK
7435 tem = op0, op0 = op1, op1 = tem;
7436
7437 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
38a448ca 7438 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
32131a9c 7439
38a448ca 7440 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
32131a9c
RK
7441 code = recog_memoized (insn);
7442
7443 if (code >= 0)
7444 {
0eadeb15 7445 extract_insn (insn);
32131a9c
RK
7446 /* We want constrain operands to treat this insn strictly in
7447 its validity determination, i.e., the way it would after reload
7448 has completed. */
0eadeb15 7449 if (constrain_operands (1))
32131a9c
RK
7450 return insn;
7451 }
7452
546b63fb 7453 delete_insns_since (last);
32131a9c
RK
7454
7455 /* If that failed, we must use a conservative two-insn sequence.
09522f21
FS
7456
7457 Use a move to copy one operand into the reload register. Prefer
7458 to reload a constant, MEM or pseudo since the move patterns can
7459 handle an arbitrary operand. If OP1 is not a constant, MEM or
7460 pseudo and OP1 is not a valid operand for an add instruction, then
7461 reload OP1.
7462
7463 After reloading one of the operands into the reload register, add
7464 the reload register to the output register.
32131a9c
RK
7465
7466 If there is another way to do this for a specific machine, a
7467 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7468 we emit below. */
7469
09522f21
FS
7470 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7471
3c0cb5de 7472 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
f8cfc6aa 7473 || (REG_P (op1)
09522f21
FS
7474 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7475 || (code != CODE_FOR_nothing
a995e389
RH
7476 && ! ((*insn_data[code].operand[2].predicate)
7477 (op1, insn_data[code].operand[2].mode))))
af929c62 7478 tem = op0, op0 = op1, op1 = tem;
32131a9c 7479
5c6b1bd2 7480 gen_reload (out, op0, opnum, type);
39b56c2a 7481
5e03c156 7482 /* If OP0 and OP1 are the same, we can use OUT for OP1.
39b56c2a
RK
7483 This fixes a problem on the 32K where the stack pointer cannot
7484 be used as an operand of an add insn. */
7485
7486 if (rtx_equal_p (op0, op1))
5e03c156 7487 op1 = out;
39b56c2a 7488
5e03c156 7489 insn = emit_insn (gen_add2_insn (out, op1));
c77c9766
RK
7490
7491 /* If that failed, copy the address register to the reload register.
0f41302f 7492 Then add the constant to the reload register. */
c77c9766
RK
7493
7494 code = recog_memoized (insn);
7495
7496 if (code >= 0)
7497 {
0eadeb15 7498 extract_insn (insn);
c77c9766
RK
7499 /* We want constrain operands to treat this insn strictly in
7500 its validity determination, i.e., the way it would after reload
7501 has completed. */
0eadeb15 7502 if (constrain_operands (1))
4117a96b
R
7503 {
7504 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7505 REG_NOTES (insn)
9e6a5703 7506 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
4117a96b
R
7507 return insn;
7508 }
c77c9766
RK
7509 }
7510
7511 delete_insns_since (last);
7512
5c6b1bd2 7513 gen_reload (out, op1, opnum, type);
4117a96b 7514 insn = emit_insn (gen_add2_insn (out, op0));
9e6a5703 7515 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
32131a9c
RK
7516 }
7517
0dadecf6
RK
7518#ifdef SECONDARY_MEMORY_NEEDED
7519 /* If we need a memory location to do the move, do it that way. */
f8cfc6aa 7520 else if ((REG_P (in) || GET_CODE (in) == SUBREG)
344b78b8 7521 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
f8cfc6aa 7522 && (REG_P (out) || GET_CODE (out) == SUBREG)
344b78b8
JH
7523 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
7524 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
7525 REGNO_REG_CLASS (reg_or_subregno (out)),
5e03c156 7526 GET_MODE (out)))
0dadecf6
RK
7527 {
7528 /* Get the memory to use and rewrite both registers to its mode. */
5e03c156 7529 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
0dadecf6 7530
5e03c156 7531 if (GET_MODE (loc) != GET_MODE (out))
38a448ca 7532 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
0dadecf6
RK
7533
7534 if (GET_MODE (loc) != GET_MODE (in))
38a448ca 7535 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
0dadecf6 7536
5c6b1bd2
RK
7537 gen_reload (loc, in, opnum, type);
7538 gen_reload (out, loc, opnum, type);
0dadecf6
RK
7539 }
7540#endif
7541
32131a9c 7542 /* If IN is a simple operand, use gen_move_insn. */
ec8e098d 7543 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
5e03c156 7544 emit_insn (gen_move_insn (out, in));
32131a9c
RK
7545
7546#ifdef HAVE_reload_load_address
7547 else if (HAVE_reload_load_address)
5e03c156 7548 emit_insn (gen_reload_load_address (out, in));
32131a9c
RK
7549#endif
7550
5e03c156 7551 /* Otherwise, just write (set OUT IN) and hope for the best. */
32131a9c 7552 else
38a448ca 7553 emit_insn (gen_rtx_SET (VOIDmode, out, in));
32131a9c
RK
7554
7555 /* Return the first insn emitted.
546b63fb 7556 We can not just return get_last_insn, because there may have
32131a9c
RK
7557 been multiple instructions emitted. Also note that gen_move_insn may
7558 emit more than one insn itself, so we can not assume that there is one
7559 insn emitted per emit_insn_before call. */
7560
546b63fb 7561 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
7562}
7563\f
cda94cbb
RH
7564/* Delete a previously made output-reload whose result we now believe
7565 is not needed. First we double-check.
32131a9c
RK
7566
7567 INSN is the insn now being processed.
cb2afeb3
R
7568 LAST_RELOAD_REG is the hard register number for which we want to delete
7569 the last output reload.
7570 J is the reload-number that originally used REG. The caller has made
7571 certain that reload J doesn't use REG any longer for input. */
32131a9c
RK
7572
7573static void
0c20a65f 7574delete_output_reload (rtx insn, int j, int last_reload_reg)
32131a9c 7575{
cb2afeb3
R
7576 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7577 rtx reg = spill_reg_stored_to[last_reload_reg];
7578 int k;
7579 int n_occurrences;
7580 int n_inherited = 0;
b3694847 7581 rtx i1;
cb2afeb3 7582 rtx substed;
05d10675 7583
068f5dea
JH
7584 /* It is possible that this reload has been only used to set another reload
7585 we eliminated earlier and thus deleted this instruction too. */
7586 if (INSN_DELETED_P (output_reload_insn))
7587 return;
7588
32131a9c
RK
7589 /* Get the raw pseudo-register referred to. */
7590
32131a9c
RK
7591 while (GET_CODE (reg) == SUBREG)
7592 reg = SUBREG_REG (reg);
cb2afeb3
R
7593 substed = reg_equiv_memory_loc[REGNO (reg)];
7594
7595 /* This is unsafe if the operand occurs more often in the current
7596 insn than it is inherited. */
7597 for (k = n_reloads - 1; k >= 0; k--)
7598 {
eceef4c9 7599 rtx reg2 = rld[k].in;
cb2afeb3
R
7600 if (! reg2)
7601 continue;
3c0cb5de 7602 if (MEM_P (reg2) || reload_override_in[k])
eceef4c9 7603 reg2 = rld[k].in_reg;
cb2afeb3 7604#ifdef AUTO_INC_DEC
eceef4c9
BS
7605 if (rld[k].out && ! rld[k].out_reg)
7606 reg2 = XEXP (rld[k].in_reg, 0);
cb2afeb3
R
7607#endif
7608 while (GET_CODE (reg2) == SUBREG)
7609 reg2 = SUBREG_REG (reg2);
7610 if (rtx_equal_p (reg2, reg))
2eb6dac7
AS
7611 {
7612 if (reload_inherited[k] || reload_override_in[k] || k == j)
7613 {
cb2afeb3 7614 n_inherited++;
eceef4c9 7615 reg2 = rld[k].out_reg;
2eb6dac7
AS
7616 if (! reg2)
7617 continue;
7618 while (GET_CODE (reg2) == SUBREG)
7619 reg2 = XEXP (reg2, 0);
7620 if (rtx_equal_p (reg2, reg))
7621 n_inherited++;
7622 }
7623 else
7624 return;
7625 }
cb2afeb3 7626 }
4b983fdc 7627 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
cb2afeb3 7628 if (substed)
5d7ef82a
BS
7629 n_occurrences += count_occurrences (PATTERN (insn),
7630 eliminate_regs (substed, 0,
7631 NULL_RTX), 0);
cb2afeb3
R
7632 if (n_occurrences > n_inherited)
7633 return;
32131a9c
RK
7634
7635 /* If the pseudo-reg we are reloading is no longer referenced
7636 anywhere between the store into it and here,
7637 and no jumps or labels intervene, then the value can get
7638 here through the reload reg alone.
7639 Otherwise, give up--return. */
7640 for (i1 = NEXT_INSN (output_reload_insn);
7641 i1 != insn; i1 = NEXT_INSN (i1))
7642 {
4b4bf941 7643 if (LABEL_P (i1) || JUMP_P (i1))
32131a9c 7644 return;
4b4bf941 7645 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
32131a9c 7646 && reg_mentioned_p (reg, PATTERN (i1)))
aa6498c2 7647 {
cb2afeb3
R
7648 /* If this is USE in front of INSN, we only have to check that
7649 there are no more references than accounted for by inheritance. */
4b4bf941 7650 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
aa6498c2 7651 {
cb2afeb3 7652 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
aa6498c2
R
7653 i1 = NEXT_INSN (i1);
7654 }
cb2afeb3 7655 if (n_occurrences <= n_inherited && i1 == insn)
aa6498c2
R
7656 break;
7657 return;
7658 }
32131a9c
RK
7659 }
7660
cda94cbb 7661 /* We will be deleting the insn. Remove the spill reg information. */
66fd46b6 7662 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
cda94cbb
RH
7663 {
7664 spill_reg_store[last_reload_reg + k] = 0;
7665 spill_reg_stored_to[last_reload_reg + k] = 0;
7666 }
7667
aa6498c2 7668 /* The caller has already checked that REG dies or is set in INSN.
cda94cbb 7669 It has also checked that we are optimizing, and thus some
14b493d6 7670 inaccuracies in the debugging information are acceptable.
cda94cbb
RH
7671 So we could just delete output_reload_insn. But in some cases
7672 we can improve the debugging information without sacrificing
7673 optimization - maybe even improving the code: See if the pseudo
7674 reg has been completely replaced with reload regs. If so, delete
7675 the store insn and forget we had a stack slot for the pseudo. */
eceef4c9 7676 if (rld[j].out != rld[j].in
aa6498c2 7677 && REG_N_DEATHS (REGNO (reg)) == 1
a3a24aa6 7678 && REG_N_SETS (REGNO (reg)) == 1
aa6498c2
R
7679 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7680 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
32131a9c
RK
7681 {
7682 rtx i2;
7683
cda94cbb
RH
7684 /* We know that it was used only between here and the beginning of
7685 the current basic block. (We also know that the last use before
7686 INSN was the output reload we are thinking of deleting, but never
7687 mind that.) Search that range; see if any ref remains. */
32131a9c
RK
7688 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7689 {
d445b551
RK
7690 rtx set = single_set (i2);
7691
32131a9c
RK
7692 /* Uses which just store in the pseudo don't count,
7693 since if they are the only uses, they are dead. */
d445b551 7694 if (set != 0 && SET_DEST (set) == reg)
32131a9c 7695 continue;
4b4bf941
JQ
7696 if (LABEL_P (i2)
7697 || JUMP_P (i2))
32131a9c 7698 break;
4b4bf941 7699 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
32131a9c 7700 && reg_mentioned_p (reg, PATTERN (i2)))
aa6498c2
R
7701 {
7702 /* Some other ref remains; just delete the output reload we
7703 know to be dead. */
cb2afeb3 7704 delete_address_reloads (output_reload_insn, insn);
ca6c03ca 7705 delete_insn (output_reload_insn);
aa6498c2
R
7706 return;
7707 }
32131a9c
RK
7708 }
7709
cda94cbb
RH
7710 /* Delete the now-dead stores into this pseudo. Note that this
7711 loop also takes care of deleting output_reload_insn. */
32131a9c
RK
7712 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7713 {
d445b551
RK
7714 rtx set = single_set (i2);
7715
7716 if (set != 0 && SET_DEST (set) == reg)
5507b94b 7717 {
cb2afeb3 7718 delete_address_reloads (i2, insn);
ca6c03ca 7719 delete_insn (i2);
5507b94b 7720 }
4b4bf941
JQ
7721 if (LABEL_P (i2)
7722 || JUMP_P (i2))
32131a9c
RK
7723 break;
7724 }
7725
cda94cbb 7726 /* For the debugging info, say the pseudo lives in this reload reg. */
eceef4c9 7727 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
32131a9c
RK
7728 alter_reg (REGNO (reg), -1);
7729 }
cda94cbb
RH
7730 else
7731 {
7732 delete_address_reloads (output_reload_insn, insn);
7733 delete_insn (output_reload_insn);
7734 }
cb2afeb3
R
7735}
7736
7737/* We are going to delete DEAD_INSN. Recursively delete loads of
7738 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7739 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
7740static void
0c20a65f 7741delete_address_reloads (rtx dead_insn, rtx current_insn)
cb2afeb3
R
7742{
7743 rtx set = single_set (dead_insn);
7744 rtx set2, dst, prev, next;
7745 if (set)
7746 {
7747 rtx dst = SET_DEST (set);
3c0cb5de 7748 if (MEM_P (dst))
cb2afeb3
R
7749 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
7750 }
7751 /* If we deleted the store from a reloaded post_{in,de}c expression,
7752 we can delete the matching adds. */
7753 prev = PREV_INSN (dead_insn);
7754 next = NEXT_INSN (dead_insn);
7755 if (! prev || ! next)
7756 return;
7757 set = single_set (next);
7758 set2 = single_set (prev);
7759 if (! set || ! set2
7760 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
7761 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
7762 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
7763 return;
7764 dst = SET_DEST (set);
7765 if (! rtx_equal_p (dst, SET_DEST (set2))
7766 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
7767 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
7768 || (INTVAL (XEXP (SET_SRC (set), 1))
1d7254c5 7769 != -INTVAL (XEXP (SET_SRC (set2), 1))))
cb2afeb3 7770 return;
53c17031
JH
7771 delete_related_insns (prev);
7772 delete_related_insns (next);
cb2afeb3
R
7773}
7774
7775/* Subfunction of delete_address_reloads: process registers found in X. */
7776static void
0c20a65f 7777delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
cb2afeb3
R
7778{
7779 rtx prev, set, dst, i2;
7780 int i, j;
7781 enum rtx_code code = GET_CODE (x);
7782
7783 if (code != REG)
7784 {
1d7254c5 7785 const char *fmt = GET_RTX_FORMAT (code);
cb2afeb3
R
7786 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7787 {
7788 if (fmt[i] == 'e')
7789 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
7790 else if (fmt[i] == 'E')
7791 {
1d7254c5 7792 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
cb2afeb3
R
7793 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
7794 current_insn);
7795 }
7796 }
7797 return;
7798 }
7799
7800 if (spill_reg_order[REGNO (x)] < 0)
7801 return;
aa6498c2 7802
cb2afeb3
R
7803 /* Scan backwards for the insn that sets x. This might be a way back due
7804 to inheritance. */
7805 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
7806 {
7807 code = GET_CODE (prev);
7808 if (code == CODE_LABEL || code == JUMP_INSN)
7809 return;
ec8e098d 7810 if (!INSN_P (prev))
cb2afeb3
R
7811 continue;
7812 if (reg_set_p (x, PATTERN (prev)))
7813 break;
7814 if (reg_referenced_p (x, PATTERN (prev)))
7815 return;
7816 }
7817 if (! prev || INSN_UID (prev) < reload_first_uid)
7818 return;
7819 /* Check that PREV only sets the reload register. */
7820 set = single_set (prev);
7821 if (! set)
7822 return;
7823 dst = SET_DEST (set);
f8cfc6aa 7824 if (!REG_P (dst)
cb2afeb3
R
7825 || ! rtx_equal_p (dst, x))
7826 return;
7827 if (! reg_set_p (dst, PATTERN (dead_insn)))
7828 {
7829 /* Check if DST was used in a later insn -
7830 it might have been inherited. */
7831 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
7832 {
4b4bf941 7833 if (LABEL_P (i2))
cb2afeb3 7834 break;
2c3c49de 7835 if (! INSN_P (i2))
cb2afeb3
R
7836 continue;
7837 if (reg_referenced_p (dst, PATTERN (i2)))
7838 {
7839 /* If there is a reference to the register in the current insn,
7840 it might be loaded in a non-inherited reload. If no other
7841 reload uses it, that means the register is set before
7842 referenced. */
7843 if (i2 == current_insn)
7844 {
7845 for (j = n_reloads - 1; j >= 0; j--)
eceef4c9 7846 if ((rld[j].reg_rtx == dst && reload_inherited[j])
cb2afeb3
R
7847 || reload_override_in[j] == dst)
7848 return;
7849 for (j = n_reloads - 1; j >= 0; j--)
eceef4c9 7850 if (rld[j].in && rld[j].reg_rtx == dst)
cb2afeb3
R
7851 break;
7852 if (j >= 0)
7853 break;
7854 }
7855 return;
7856 }
4b4bf941 7857 if (JUMP_P (i2))
cb2afeb3 7858 break;
cb2afeb3 7859 /* If DST is still live at CURRENT_INSN, check if it is used for
3900dc09
R
7860 any reload. Note that even if CURRENT_INSN sets DST, we still
7861 have to check the reloads. */
cb2afeb3
R
7862 if (i2 == current_insn)
7863 {
7864 for (j = n_reloads - 1; j >= 0; j--)
eceef4c9 7865 if ((rld[j].reg_rtx == dst && reload_inherited[j])
cb2afeb3
R
7866 || reload_override_in[j] == dst)
7867 return;
7868 /* ??? We can't finish the loop here, because dst might be
7869 allocated to a pseudo in this block if no reload in this
14b493d6 7870 block needs any of the classes containing DST - see
cb2afeb3
R
7871 spill_hard_reg. There is no easy way to tell this, so we
7872 have to scan till the end of the basic block. */
7873 }
3900dc09
R
7874 if (reg_set_p (dst, PATTERN (i2)))
7875 break;
cb2afeb3
R
7876 }
7877 }
7878 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
7879 reg_reloaded_contents[REGNO (dst)] = -1;
ca6c03ca 7880 delete_insn (prev);
32131a9c 7881}
32131a9c 7882\f
a8fdc208 7883/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 7884 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
7885 is a register or memory location;
7886 so reloading involves incrementing that location.
cb2afeb3 7887 IN is either identical to VALUE, or some cheaper place to reload from.
32131a9c
RK
7888
7889 INC_AMOUNT is the number to increment or decrement by (always positive).
cb2afeb3 7890 This cannot be deduced from VALUE.
32131a9c 7891
cb2afeb3
R
7892 Return the instruction that stores into RELOADREG. */
7893
7894static rtx
0c20a65f 7895inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
32131a9c
RK
7896{
7897 /* REG or MEM to be copied and incremented. */
7898 rtx incloc = XEXP (value, 0);
7899 /* Nonzero if increment after copying. */
7900 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 7901 rtx last;
0009eff2
RK
7902 rtx inc;
7903 rtx add_insn;
7904 int code;
cb2afeb3
R
7905 rtx store;
7906 rtx real_in = in == value ? XEXP (in, 0) : in;
32131a9c
RK
7907
7908 /* No hard register is equivalent to this register after
40f03658 7909 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
32131a9c
RK
7910 we could inc/dec that register as well (maybe even using it for
7911 the source), but I'm not sure it's worth worrying about. */
f8cfc6aa 7912 if (REG_P (incloc))
32131a9c
RK
7913 reg_last_reload_reg[REGNO (incloc)] = 0;
7914
7915 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
1d7254c5 7916 inc_amount = -inc_amount;
32131a9c 7917
fb3821f7 7918 inc = GEN_INT (inc_amount);
0009eff2
RK
7919
7920 /* If this is post-increment, first copy the location to the reload reg. */
cb2afeb3
R
7921 if (post && real_in != reloadreg)
7922 emit_insn (gen_move_insn (reloadreg, real_in));
0009eff2 7923
cb2afeb3
R
7924 if (in == value)
7925 {
7926 /* See if we can directly increment INCLOC. Use a method similar to
7927 that in gen_reload. */
0009eff2 7928
cb2afeb3
R
7929 last = get_last_insn ();
7930 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
7931 gen_rtx_PLUS (GET_MODE (incloc),
7932 incloc, inc)));
05d10675 7933
cb2afeb3
R
7934 code = recog_memoized (add_insn);
7935 if (code >= 0)
32131a9c 7936 {
0eadeb15
BS
7937 extract_insn (add_insn);
7938 if (constrain_operands (1))
cb2afeb3
R
7939 {
7940 /* If this is a pre-increment and we have incremented the value
7941 where it lives, copy the incremented value to RELOADREG to
7942 be used as an address. */
0009eff2 7943
cb2afeb3
R
7944 if (! post)
7945 emit_insn (gen_move_insn (reloadreg, incloc));
546b63fb 7946
cb2afeb3
R
7947 return add_insn;
7948 }
32131a9c 7949 }
cb2afeb3 7950 delete_insns_since (last);
32131a9c 7951 }
0009eff2 7952
0009eff2
RK
7953 /* If couldn't do the increment directly, must increment in RELOADREG.
7954 The way we do this depends on whether this is pre- or post-increment.
7955 For pre-increment, copy INCLOC to the reload register, increment it
7956 there, then save back. */
7957
7958 if (! post)
7959 {
cb2afeb3
R
7960 if (in != reloadreg)
7961 emit_insn (gen_move_insn (reloadreg, real_in));
546b63fb 7962 emit_insn (gen_add2_insn (reloadreg, inc));
cb2afeb3 7963 store = emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 7964 }
32131a9c
RK
7965 else
7966 {
0009eff2
RK
7967 /* Postincrement.
7968 Because this might be a jump insn or a compare, and because RELOADREG
7969 may not be available after the insn in an input reload, we must do
7970 the incrementation before the insn being reloaded for.
7971
cb2afeb3 7972 We have already copied IN to RELOADREG. Increment the copy in
0009eff2
RK
7973 RELOADREG, save that back, then decrement RELOADREG so it has
7974 the original value. */
7975
546b63fb 7976 emit_insn (gen_add2_insn (reloadreg, inc));
cb2afeb3 7977 store = emit_insn (gen_move_insn (incloc, reloadreg));
546b63fb 7978 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 7979 }
0009eff2 7980
cb2afeb3 7981 return store;
32131a9c
RK
7982}
7983\f
2dfa9a87
MH
7984#ifdef AUTO_INC_DEC
7985static void
0c20a65f 7986add_auto_inc_notes (rtx insn, rtx x)
2dfa9a87
MH
7987{
7988 enum rtx_code code = GET_CODE (x);
6f7d635c 7989 const char *fmt;
2dfa9a87
MH
7990 int i, j;
7991
7992 if (code == MEM && auto_inc_p (XEXP (x, 0)))
7993 {
7994 REG_NOTES (insn)
7995 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
7996 return;
7997 }
7998
7999 /* Scan all the operand sub-expressions. */
8000 fmt = GET_RTX_FORMAT (code);
8001 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8002 {
8003 if (fmt[i] == 'e')
8004 add_auto_inc_notes (insn, XEXP (x, i));
8005 else if (fmt[i] == 'E')
8006 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8007 add_auto_inc_notes (insn, XVECEXP (x, i, j));
8008 }
8009}
8010#endif
94bd63e5
AH
8011
8012/* Copy EH notes from an insn to its reloads. */
8013static void
0c20a65f 8014copy_eh_notes (rtx insn, rtx x)
94bd63e5
AH
8015{
8016 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
8017 if (eh_note)
8018 {
8019 for (; x != 0; x = NEXT_INSN (x))
8020 {
8021 if (may_trap_p (PATTERN (x)))
a6a2274a 8022 REG_NOTES (x)
94bd63e5
AH
8023 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
8024 REG_NOTES (x));
8025 }
8026 }
8027}
8028
f1330226
JH
8029/* This is used by reload pass, that does emit some instructions after
8030 abnormal calls moving basic block end, but in fact it wants to emit
8031 them on the edge. Looks for abnormal call edges, find backward the
8032 proper call and fix the damage.
a6a2274a 8033
f1330226 8034 Similar handle instructions throwing exceptions internally. */
068473ec 8035void
0c20a65f 8036fixup_abnormal_edges (void)
f1330226 8037{
f1330226 8038 bool inserted = false;
e0082a72 8039 basic_block bb;
f1330226 8040
e0082a72 8041 FOR_EACH_BB (bb)
f1330226 8042 {
f1330226
JH
8043 edge e;
8044
09da1532 8045 /* Look for cases we are interested in - calls or instructions causing
f1330226
JH
8046 exceptions. */
8047 for (e = bb->succ; e; e = e->succ_next)
8048 {
8049 if (e->flags & EDGE_ABNORMAL_CALL)
8050 break;
8051 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
8052 == (EDGE_ABNORMAL | EDGE_EH))
8053 break;
8054 }
4b4bf941 8055 if (e && !CALL_P (BB_END (bb))
a813c111 8056 && !can_throw_internal (BB_END (bb)))
f1330226 8057 {
a813c111 8058 rtx insn = BB_END (bb), stop = NEXT_INSN (BB_END (bb));
f1330226
JH
8059 rtx next;
8060 for (e = bb->succ; e; e = e->succ_next)
8061 if (e->flags & EDGE_FALLTHRU)
8062 break;
39f95a2c
JH
8063 /* Get past the new insns generated. Allow notes, as the insns may
8064 be already deleted. */
4b4bf941 8065 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
39f95a2c 8066 && !can_throw_internal (insn)
a813c111 8067 && insn != BB_HEAD (bb))
f1330226 8068 insn = PREV_INSN (insn);
4b4bf941 8069 if (!CALL_P (insn) && !can_throw_internal (insn))
f1330226 8070 abort ();
a813c111 8071 BB_END (bb) = insn;
f1330226
JH
8072 inserted = true;
8073 insn = NEXT_INSN (insn);
0c4992b0 8074 while (insn && insn != stop)
f1330226
JH
8075 {
8076 next = NEXT_INSN (insn);
0c4992b0
JH
8077 if (INSN_P (insn))
8078 {
53c17031 8079 delete_insn (insn);
f8ed1958 8080
ed8d2920
MM
8081 /* Sometimes there's still the return value USE.
8082 If it's placed after a trapping call (i.e. that
8083 call is the last insn anyway), we have no fallthru
8084 edge. Simply delete this use and don't try to insert
14b493d6 8085 on the non-existent edge. */
ed8d2920
MM
8086 if (GET_CODE (PATTERN (insn)) != USE)
8087 {
ed8d2920
MM
8088 /* We're not deleting it, we're moving it. */
8089 INSN_DELETED_P (insn) = 0;
8090 PREV_INSN (insn) = NULL_RTX;
8091 NEXT_INSN (insn) = NULL_RTX;
f8ed1958 8092
ed8d2920
MM
8093 insert_insn_on_edge (insn, e);
8094 }
0c4992b0 8095 }
f1330226
JH
8096 insn = next;
8097 }
8098 }
8099 }
83fd323c
JH
8100 /* We've possibly turned single trapping insn into multiple ones. */
8101 if (flag_non_call_exceptions)
8102 {
8103 sbitmap blocks;
8104 blocks = sbitmap_alloc (last_basic_block);
8105 sbitmap_ones (blocks);
8106 find_many_sub_basic_blocks (blocks);
8107 }
f1330226
JH
8108 if (inserted)
8109 commit_edge_insertions ();
8110}
This page took 4.43422 seconds and 5 git commands to generate.