]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
Make it possible to prototype port-specific functions (and convert i386 to use this)
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
a5cad800 2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e99215a3
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
32131a9c
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
cab634f2
KG
24
25#include "machmode.h"
26#include "hard-reg-set.h"
32131a9c 27#include "rtl.h"
6baf1cc8 28#include "tm_p.h"
32131a9c
RK
29#include "obstack.h"
30#include "insn-config.h"
31#include "insn-flags.h"
32#include "insn-codes.h"
33#include "flags.h"
49ad7cfa 34#include "function.h"
32131a9c
RK
35#include "expr.h"
36#include "regs.h"
cad6f7d0 37#include "basic-block.h"
32131a9c
RK
38#include "reload.h"
39#include "recog.h"
32131a9c 40#include "output.h"
a9c366bf 41#include "real.h"
10f0ad3d 42#include "toplev.h"
32131a9c 43
65954bd8
JL
44#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
45#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
46#endif
47
32131a9c
RK
48/* This file contains the reload pass of the compiler, which is
49 run after register allocation has been done. It checks that
50 each insn is valid (operands required to be in registers really
51 are in registers of the proper class) and fixes up invalid ones
52 by copying values temporarily into registers for the insns
53 that need them.
54
55 The results of register allocation are described by the vector
56 reg_renumber; the insns still contain pseudo regs, but reg_renumber
57 can be used to find which hard reg, if any, a pseudo reg is in.
58
59 The technique we always use is to free up a few hard regs that are
60 called ``reload regs'', and for each place where a pseudo reg
61 must be in a hard reg, copy it temporarily into one of the reload regs.
62
03acd8f8
BS
63 Reload regs are allocated locally for every instruction that needs
64 reloads. When there are pseudos which are allocated to a register that
65 has been chosen as a reload reg, such pseudos must be ``spilled''.
66 This means that they go to other hard regs, or to stack slots if no other
32131a9c
RK
67 available hard regs can be found. Spilling can invalidate more
68 insns, requiring additional need for reloads, so we must keep checking
69 until the process stabilizes.
70
71 For machines with different classes of registers, we must keep track
72 of the register class needed for each reload, and make sure that
73 we allocate enough reload registers of each class.
74
75 The file reload.c contains the code that checks one insn for
76 validity and reports the reloads that it needs. This file
77 is in charge of scanning the entire rtl code, accumulating the
78 reload needs, spilling, assigning reload registers to use for
79 fixing up each insn, and generating the new insns to copy values
80 into the reload registers. */
546b63fb
RK
81
82
83#ifndef REGISTER_MOVE_COST
84#define REGISTER_MOVE_COST(x, y) 2
85#endif
32131a9c
RK
86\f
87/* During reload_as_needed, element N contains a REG rtx for the hard reg
0f41302f 88 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
89static rtx *reg_last_reload_reg;
90
91/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
92 for an output reload that stores into reg N. */
93static char *reg_has_output_reload;
94
95/* Indicates which hard regs are reload-registers for an output reload
96 in the current insn. */
97static HARD_REG_SET reg_is_output_reload;
98
99/* Element N is the constant value to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a constant.
101 find_reloads looks at this in order to replace pseudo reg N
102 with the constant it stands for. */
103rtx *reg_equiv_constant;
104
105/* Element N is a memory location to which pseudo reg N is equivalent,
106 prior to any register elimination (such as frame pointer to stack
107 pointer). Depending on whether or not it is a valid address, this value
108 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 109rtx *reg_equiv_memory_loc;
32131a9c
RK
110
111/* Element N is the address of stack slot to which pseudo reg N is equivalent.
112 This is used when the address is not valid as a memory address
113 (because its displacement is too big for the machine.) */
114rtx *reg_equiv_address;
115
116/* Element N is the memory slot to which pseudo reg N is equivalent,
117 or zero if pseudo reg N is not equivalent to a memory slot. */
118rtx *reg_equiv_mem;
119
120/* Widest width in which each pseudo reg is referred to (via subreg). */
121static int *reg_max_ref_width;
122
135eb61c 123/* Element N is the list of insns that initialized reg N from its equivalent
32131a9c
RK
124 constant or memory slot. */
125static rtx *reg_equiv_init;
126
03acd8f8
BS
127/* Vector to remember old contents of reg_renumber before spilling. */
128static short *reg_old_renumber;
129
e6e52be0 130/* During reload_as_needed, element N contains the last pseudo regno reloaded
03acd8f8 131 into hard register N. If that pseudo reg occupied more than one register,
32131a9c
RK
132 reg_reloaded_contents points to that pseudo for each spill register in
133 use; all of these must remain set for an inheritance to occur. */
134static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
135
136/* During reload_as_needed, element N contains the insn for which
e6e52be0
R
137 hard register N was last used. Its contents are significant only
138 when reg_reloaded_valid is set for this register. */
32131a9c
RK
139static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
140
e6e52be0
R
141/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
142static HARD_REG_SET reg_reloaded_valid;
143/* Indicate if the register was dead at the end of the reload.
144 This is only valid if reg_reloaded_contents is set and valid. */
145static HARD_REG_SET reg_reloaded_dead;
146
32131a9c
RK
147/* Number of spill-regs so far; number of valid elements of spill_regs. */
148static int n_spills;
149
150/* In parallel with spill_regs, contains REG rtx's for those regs.
151 Holds the last rtx used for any given reg, or 0 if it has never
152 been used for spilling yet. This rtx is reused, provided it has
153 the proper mode. */
154static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
155
156/* In parallel with spill_regs, contains nonzero for a spill reg
157 that was stored after the last time it was used.
158 The precise value is the insn generated to do the store. */
159static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
160
cb2afeb3
R
161/* This is the register that was stored with spill_reg_store. This is a
162 copy of reload_out / reload_out_reg when the value was stored; if
163 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
164static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
165
32131a9c
RK
166/* This table is the inverse mapping of spill_regs:
167 indexed by hard reg number,
168 it contains the position of that reg in spill_regs,
05d10675 169 or -1 for something that is not in spill_regs.
13c8e8e3
JL
170
171 ?!? This is no longer accurate. */
32131a9c
RK
172static short spill_reg_order[FIRST_PSEUDO_REGISTER];
173
03acd8f8
BS
174/* This reg set indicates registers that can't be used as spill registers for
175 the currently processed insn. These are the hard registers which are live
176 during the insn, but not allocated to pseudos, as well as fixed
177 registers. */
32131a9c
RK
178static HARD_REG_SET bad_spill_regs;
179
03acd8f8
BS
180/* These are the hard registers that can't be used as spill register for any
181 insn. This includes registers used for user variables and registers that
182 we can't eliminate. A register that appears in this set also can't be used
183 to retry register allocation. */
184static HARD_REG_SET bad_spill_regs_global;
185
32131a9c 186/* Describes order of use of registers for reloading
03acd8f8
BS
187 of spilled pseudo-registers. `n_spills' is the number of
188 elements that are actually valid; new ones are added at the end.
189
190 Both spill_regs and spill_reg_order are used on two occasions:
191 once during find_reload_regs, where they keep track of the spill registers
192 for a single insn, but also during reload_as_needed where they show all
193 the registers ever used by reload. For the latter case, the information
194 is calculated during finish_spills. */
32131a9c
RK
195static short spill_regs[FIRST_PSEUDO_REGISTER];
196
03acd8f8
BS
197/* This vector of reg sets indicates, for each pseudo, which hard registers
198 may not be used for retrying global allocation because the register was
199 formerly spilled from one of them. If we allowed reallocating a pseudo to
200 a register that it was already allocated to, reload might not
201 terminate. */
202static HARD_REG_SET *pseudo_previous_regs;
203
204/* This vector of reg sets indicates, for each pseudo, which hard
205 registers may not be used for retrying global allocation because they
206 are used as spill registers during one of the insns in which the
207 pseudo is live. */
208static HARD_REG_SET *pseudo_forbidden_regs;
209
210/* All hard regs that have been used as spill registers for any insn are
211 marked in this set. */
212static HARD_REG_SET used_spill_regs;
8b4f9969 213
4079cd63
JW
214/* Index of last register assigned as a spill register. We allocate in
215 a round-robin fashion. */
4079cd63
JW
216static int last_spill_reg;
217
32131a9c
RK
218/* Describes order of preference for putting regs into spill_regs.
219 Contains the numbers of all the hard regs, in order most preferred first.
220 This order is different for each function.
221 It is set up by order_regs_for_reload.
222 Empty elements at the end contain -1. */
223static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
224
32131a9c
RK
225/* Nonzero if indirect addressing is supported on the machine; this means
226 that spilling (REG n) does not require reloading it into a register in
227 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
228 value indicates the level of indirect addressing supported, e.g., two
229 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
230 a hard register. */
32131a9c
RK
231static char spill_indirect_levels;
232
233/* Nonzero if indirect addressing is supported when the innermost MEM is
234 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
235 which these are valid is the same as spill_indirect_levels, above. */
32131a9c
RK
236char indirect_symref_ok;
237
238/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
32131a9c
RK
239char double_reg_address_ok;
240
241/* Record the stack slot for each spilled hard register. */
32131a9c
RK
242static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
243
244/* Width allocated so far for that stack slot. */
32131a9c
RK
245static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
246
7609e720
BS
247/* Record which pseudos needed to be spilled. */
248static regset spilled_pseudos;
249
32131a9c
RK
250/* First uid used by insns created by reload in this function.
251 Used in find_equiv_reg. */
252int reload_first_uid;
253
254/* Flag set by local-alloc or global-alloc if anything is live in
255 a call-clobbered reg across calls. */
32131a9c
RK
256int caller_save_needed;
257
258/* Set to 1 while reload_as_needed is operating.
259 Required by some machines to handle any generated moves differently. */
32131a9c
RK
260int reload_in_progress = 0;
261
262/* These arrays record the insn_code of insns that may be needed to
263 perform input and output reloads of special objects. They provide a
264 place to pass a scratch register. */
32131a9c
RK
265enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266enum insn_code reload_out_optab[NUM_MACHINE_MODES];
267
d45cf215 268/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
269 The allocated storage can be freed once find_reloads has processed the
270 insn. */
32131a9c 271struct obstack reload_obstack;
cad6f7d0
BS
272
273/* Points to the beginning of the reload_obstack. All insn_chain structures
274 are allocated first. */
275char *reload_startobj;
276
277/* The point after all insn_chain structures. Used to quickly deallocate
278 memory used while processing one insn. */
32131a9c
RK
279char *reload_firstobj;
280
281#define obstack_chunk_alloc xmalloc
282#define obstack_chunk_free free
283
cad6f7d0
BS
284/* List of insn_chain instructions, one for every insn that reload needs to
285 examine. */
286struct insn_chain *reload_insn_chain;
7609e720 287
dfb7c80f
JL
288#ifdef TREE_CODE
289extern tree current_function_decl;
290#else
122a860e 291extern union tree_node *current_function_decl;
dfb7c80f
JL
292#endif
293
03acd8f8 294/* List of all insns needing reloads. */
7609e720 295static struct insn_chain *insns_need_reload;
32131a9c
RK
296\f
297/* This structure is used to record information about register eliminations.
298 Each array entry describes one possible way of eliminating a register
299 in favor of another. If there is more than one way of eliminating a
300 particular register, the most preferred should be specified first. */
301
590cf94d 302struct elim_table
32131a9c 303{
0f41302f
MS
304 int from; /* Register number to be eliminated. */
305 int to; /* Register number used as replacement. */
306 int initial_offset; /* Initial difference between values. */
307 int can_eliminate; /* Non-zero if this elimination can be done. */
32131a9c 308 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
0f41302f
MS
309 insns made by reload. */
310 int offset; /* Current offset between the two regs. */
0f41302f
MS
311 int previous_offset; /* Offset at end of previous insn. */
312 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
32131a9c
RK
313 rtx from_rtx; /* REG rtx for the register to be eliminated.
314 We cannot simply compare the number since
315 we might then spuriously replace a hard
316 register corresponding to a pseudo
0f41302f
MS
317 assigned to the reg to be eliminated. */
318 rtx to_rtx; /* REG rtx for the replacement. */
590cf94d
KG
319};
320
321static struct elim_table * reg_eliminate = 0;
322
323/* This is an intermediate structure to initialize the table. It has
324 exactly the members provided by ELIMINABLE_REGS. */
325static struct elim_table_1
326{
327 int from;
328 int to;
329} reg_eliminate_1[] =
32131a9c
RK
330
331/* If a set of eliminable registers was specified, define the table from it.
332 Otherwise, default to the normal case of the frame pointer being
333 replaced by the stack pointer. */
334
335#ifdef ELIMINABLE_REGS
336 ELIMINABLE_REGS;
337#else
338 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
339#endif
340
590cf94d 341#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate_1/sizeof reg_eliminate_1[0])
32131a9c
RK
342
343/* Record the number of pending eliminations that have an offset not equal
344 to their initial offset. If non-zero, we use a new copy of each
345 replacement result in any insns encountered. */
cb2afeb3 346int num_not_at_initial_offset;
32131a9c
RK
347
348/* Count the number of registers that we may be able to eliminate. */
349static int num_eliminable;
2b49ee39
R
350/* And the number of registers that are equivalent to a constant that
351 can be eliminated to frame_pointer / arg_pointer + constant. */
352static int num_eliminable_invariants;
32131a9c
RK
353
354/* For each label, we record the offset of each elimination. If we reach
355 a label by more than one path and an offset differs, we cannot do the
356 elimination. This information is indexed by the number of the label.
357 The first table is an array of flags that records whether we have yet
358 encountered a label and the second table is an array of arrays, one
359 entry in the latter array for each elimination. */
360
361static char *offsets_known_at;
362static int (*offsets_at)[NUM_ELIMINABLE_REGS];
363
364/* Number of labels in the current function. */
365
366static int num_labels;
546b63fb 367
03acd8f8
BS
368struct hard_reg_n_uses
369{
370 int regno;
371 unsigned int uses;
372};
32131a9c 373\f
18a90182 374static void maybe_fix_stack_asms PROTO((void));
03acd8f8
BS
375static void calculate_needs_all_insns PROTO((int));
376static void calculate_needs PROTO((struct insn_chain *));
377static void find_reload_regs PROTO((struct insn_chain *chain,
378 FILE *));
379static void find_tworeg_group PROTO((struct insn_chain *, int,
380 FILE *));
381static void find_group PROTO((struct insn_chain *, int,
382 FILE *));
383static int possible_group_p PROTO((struct insn_chain *, int));
384static void count_possible_groups PROTO((struct insn_chain *, int));
546b63fb
RK
385static int modes_equiv_for_class_p PROTO((enum machine_mode,
386 enum machine_mode,
387 enum reg_class));
7609e720 388static void delete_caller_save_insns PROTO((void));
03acd8f8 389
546b63fb 390static void spill_failure PROTO((rtx));
03acd8f8
BS
391static void new_spill_reg PROTO((struct insn_chain *, int, int,
392 int, FILE *));
393static void maybe_mark_pseudo_spilled PROTO((int));
546b63fb
RK
394static void delete_dead_insn PROTO((rtx));
395static void alter_reg PROTO((int, int));
396static void set_label_offsets PROTO((rtx, rtx, int));
397static int eliminate_regs_in_insn PROTO((rtx, int));
cb2afeb3 398static void update_eliminable_offsets PROTO((void));
546b63fb 399static void mark_not_eliminable PROTO((rtx, rtx));
09dd1133 400static void set_initial_elim_offsets PROTO((void));
c47f5ea5 401static void verify_initial_elim_offsets PROTO((void));
1f3b1e1a
JL
402static void set_initial_label_offsets PROTO((void));
403static void set_offsets_for_label PROTO((rtx));
09dd1133
BS
404static void init_elim_table PROTO((void));
405static void update_eliminables PROTO((HARD_REG_SET *));
03acd8f8
BS
406static void spill_hard_reg PROTO((int, FILE *, int));
407static int finish_spills PROTO((int, FILE *));
408static void ior_hard_reg_set PROTO((HARD_REG_SET *, HARD_REG_SET *));
546b63fb 409static void scan_paradoxical_subregs PROTO((rtx));
e1b6684c 410static int hard_reg_use_compare PROTO((const PTR, const PTR));
03acd8f8
BS
411static void count_pseudo PROTO((struct hard_reg_n_uses *, int));
412static void order_regs_for_reload PROTO((struct insn_chain *));
7609e720 413static void reload_as_needed PROTO((int));
9a881562 414static void forget_old_reloads_1 PROTO((rtx, rtx));
e1b6684c 415static int reload_reg_class_lower PROTO((const PTR, const PTR));
546b63fb
RK
416static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
417 enum machine_mode));
be7ae2a4
RK
418static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
419 enum machine_mode));
546b63fb 420static int reload_reg_free_p PROTO((int, int, enum reload_type));
dfe96118 421static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int, int));
546b63fb 422static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
03acd8f8
BS
423static int allocate_reload_reg PROTO((struct insn_chain *, int, int,
424 int));
425static void choose_reload_regs PROTO((struct insn_chain *));
546b63fb 426static void merge_assigned_reloads PROTO((rtx));
7609e720 427static void emit_reload_insns PROTO((struct insn_chain *));
cb2afeb3
R
428static void delete_output_reload PROTO((rtx, int, int));
429static void delete_address_reloads PROTO((rtx, rtx));
430static void delete_address_reloads_1 PROTO((rtx, rtx, rtx));
431static rtx inc_for_reload PROTO((rtx, rtx, rtx, int));
9b3142b3 432static int constraint_accepts_reg_p PROTO((const char *, rtx));
5adf6da0 433static void reload_cse_regs_1 PROTO((rtx));
2a9fb548 434static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
cbfc3ad3 435static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
2a9fb548
ILT
436static void reload_cse_invalidate_mem PROTO((rtx));
437static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
2a9fb548 438static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
31418d35 439static int reload_cse_noop_set_p PROTO((rtx, rtx));
e9a25f70
JL
440static int reload_cse_simplify_set PROTO((rtx, rtx));
441static int reload_cse_simplify_operands PROTO((rtx));
2a9fb548
ILT
442static void reload_cse_check_clobber PROTO((rtx, rtx));
443static void reload_cse_record_set PROTO((rtx, rtx));
5adf6da0
R
444static void reload_combine PROTO((void));
445static void reload_combine_note_use PROTO((rtx *, rtx));
446static void reload_combine_note_store PROTO((rtx, rtx));
447static void reload_cse_move2add PROTO((rtx));
448static void move2add_note_store PROTO((rtx, rtx));
2dfa9a87
MH
449#ifdef AUTO_INC_DEC
450static void add_auto_inc_notes PROTO((rtx, rtx));
451#endif
32131a9c 452\f
546b63fb
RK
453/* Initialize the reload pass once per compilation. */
454
32131a9c
RK
455void
456init_reload ()
457{
458 register int i;
459
460 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
461 Set spill_indirect_levels to the number of levels such addressing is
462 permitted, zero if it is not permitted at all. */
463
464 register rtx tem
38a448ca
RH
465 = gen_rtx_MEM (Pmode,
466 gen_rtx_PLUS (Pmode,
c5c76735
JL
467 gen_rtx_REG (Pmode,
468 LAST_VIRTUAL_REGISTER + 1),
38a448ca 469 GEN_INT (4)));
32131a9c
RK
470 spill_indirect_levels = 0;
471
472 while (memory_address_p (QImode, tem))
473 {
474 spill_indirect_levels++;
38a448ca 475 tem = gen_rtx_MEM (Pmode, tem);
32131a9c
RK
476 }
477
478 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
479
38a448ca 480 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
32131a9c
RK
481 indirect_symref_ok = memory_address_p (QImode, tem);
482
483 /* See if reg+reg is a valid (and offsettable) address. */
484
65701fd2 485 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638 486 {
38a448ca
RH
487 tem = gen_rtx_PLUS (Pmode,
488 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
489 gen_rtx_REG (Pmode, i));
c5c76735 490
57caa638
RS
491 /* This way, we make sure that reg+reg is an offsettable address. */
492 tem = plus_constant (tem, 4);
493
494 if (memory_address_p (QImode, tem))
495 {
496 double_reg_address_ok = 1;
497 break;
498 }
499 }
32131a9c 500
0f41302f 501 /* Initialize obstack for our rtl allocation. */
32131a9c 502 gcc_obstack_init (&reload_obstack);
cad6f7d0 503 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
504}
505
cad6f7d0
BS
506/* List of insn chains that are currently unused. */
507static struct insn_chain *unused_insn_chains = 0;
508
509/* Allocate an empty insn_chain structure. */
510struct insn_chain *
511new_insn_chain ()
512{
513 struct insn_chain *c;
514
515 if (unused_insn_chains == 0)
516 {
8db99db2
KG
517 c = (struct insn_chain *)
518 obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
cad6f7d0
BS
519 c->live_before = OBSTACK_ALLOC_REG_SET (&reload_obstack);
520 c->live_after = OBSTACK_ALLOC_REG_SET (&reload_obstack);
521 }
522 else
523 {
524 c = unused_insn_chains;
525 unused_insn_chains = c->next;
526 }
527 c->is_caller_save_insn = 0;
03acd8f8 528 c->need_operand_change = 0;
cad6f7d0
BS
529 c->need_reload = 0;
530 c->need_elim = 0;
531 return c;
532}
533
7609e720
BS
534/* Small utility function to set all regs in hard reg set TO which are
535 allocated to pseudos in regset FROM. */
536void
537compute_use_by_pseudos (to, from)
538 HARD_REG_SET *to;
539 regset from;
540{
541 int regno;
542 EXECUTE_IF_SET_IN_REG_SET
543 (from, FIRST_PSEUDO_REGISTER, regno,
544 {
545 int r = reg_renumber[regno];
546 int nregs;
547 if (r < 0)
404d95c4
R
548 {
549 /* reload_combine uses the information from
e881bb1b
RH
550 BASIC_BLOCK->global_live_at_start, which might still
551 contain registers that have not actually been allocated
552 since they have an equivalence. */
404d95c4
R
553 if (! reload_completed)
554 abort ();
555 }
556 else
557 {
558 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
559 while (nregs-- > 0)
560 SET_HARD_REG_BIT (*to, r + nregs);
561 }
7609e720
BS
562 });
563}
03acd8f8 564\f
1e5bd841
BS
565/* Global variables used by reload and its subroutines. */
566
1e5bd841
BS
567/* Set during calculate_needs if an insn needs register elimination. */
568static int something_needs_elimination;
cb2afeb3
R
569/* Set during calculate_needs if an insn needs an operand changed. */
570int something_needs_operands_changed;
1e5bd841 571
1e5bd841
BS
572/* Nonzero means we couldn't get enough spill regs. */
573static int failure;
574
546b63fb 575/* Main entry point for the reload pass.
32131a9c
RK
576
577 FIRST is the first insn of the function being compiled.
578
579 GLOBAL nonzero means we were called from global_alloc
580 and should attempt to reallocate any pseudoregs that we
581 displace from hard regs we will use for reloads.
582 If GLOBAL is zero, we do not have enough information to do that,
583 so any pseudo reg that is spilled must go to the stack.
584
585 DUMPFILE is the global-reg debugging dump file stream, or 0.
586 If it is nonzero, messages are written to it to describe
587 which registers are seized as reload regs, which pseudo regs
5352b11a 588 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 589
5352b11a
RS
590 Return value is nonzero if reload failed
591 and we must not do any more for this function. */
592
593int
32131a9c
RK
594reload (first, global, dumpfile)
595 rtx first;
596 int global;
597 FILE *dumpfile;
598{
03acd8f8 599 register int i;
32131a9c
RK
600 register rtx insn;
601 register struct elim_table *ep;
602
a68d4b75
BK
603 /* The two pointers used to track the true location of the memory used
604 for label offsets. */
605 char *real_known_ptr = NULL_PTR;
606 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
607
32131a9c
RK
608 /* Make sure even insns with volatile mem refs are recognizable. */
609 init_recog ();
610
1e5bd841
BS
611 failure = 0;
612
cad6f7d0
BS
613 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
614
437a710d
BS
615 /* Make sure that the last insn in the chain
616 is not something that needs reloading. */
617 emit_note (NULL_PTR, NOTE_INSN_DELETED);
618
32131a9c
RK
619 /* Enable find_equiv_reg to distinguish insns made by reload. */
620 reload_first_uid = get_max_uid ();
621
0dadecf6
RK
622#ifdef SECONDARY_MEMORY_NEEDED
623 /* Initialize the secondary memory table. */
624 clear_secondary_mem ();
625#endif
626
32131a9c 627 /* We don't have a stack slot for any spill reg yet. */
4c9a05bc
RK
628 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
629 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
32131a9c 630
a8efe40d
RK
631 /* Initialize the save area information for caller-save, in case some
632 are needed. */
633 init_save_areas ();
a8fdc208 634
32131a9c
RK
635 /* Compute which hard registers are now in use
636 as homes for pseudo registers.
637 This is done here rather than (eg) in global_alloc
638 because this point is reached even if not optimizing. */
32131a9c
RK
639 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
640 mark_home_live (i);
641
8dddd002
RK
642 /* A function that receives a nonlocal goto must save all call-saved
643 registers. */
644 if (current_function_has_nonlocal_label)
645 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
646 {
647 if (! call_used_regs[i] && ! fixed_regs[i])
648 regs_ever_live[i] = 1;
649 }
650
32131a9c
RK
651 /* Find all the pseudo registers that didn't get hard regs
652 but do have known equivalent constants or memory slots.
653 These include parameters (known equivalent to parameter slots)
654 and cse'd or loop-moved constant memory addresses.
655
656 Record constant equivalents in reg_equiv_constant
657 so they will be substituted by find_reloads.
658 Record memory equivalents in reg_mem_equiv so they can
659 be substituted eventually by altering the REG-rtx's. */
660
ad85216e
KG
661 reg_equiv_constant = (rtx *) xcalloc (max_regno, sizeof (rtx));
662 reg_equiv_memory_loc = (rtx *) xcalloc (max_regno, sizeof (rtx));
663 reg_equiv_mem = (rtx *) xcalloc (max_regno, sizeof (rtx));
664 reg_equiv_init = (rtx *) xcalloc (max_regno, sizeof (rtx));
665 reg_equiv_address = (rtx *) xcalloc (max_regno, sizeof (rtx));
666 reg_max_ref_width = (int *) xcalloc (max_regno, sizeof (int));
667 reg_old_renumber = (short *) xcalloc (max_regno, sizeof (short));
47c3ed98 668 bcopy ((PTR) reg_renumber, (PTR) reg_old_renumber, max_regno * sizeof (short));
03acd8f8
BS
669 pseudo_forbidden_regs
670 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
671 pseudo_previous_regs
ad85216e 672 = (HARD_REG_SET *) xcalloc (max_regno, sizeof (HARD_REG_SET));
32131a9c 673
03acd8f8 674 CLEAR_HARD_REG_SET (bad_spill_regs_global);
56f58d3a 675
32131a9c 676 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
56f58d3a
RK
677 Also find all paradoxical subregs and find largest such for each pseudo.
678 On machines with small register classes, record hard registers that
05d10675 679 are used for user variables. These can never be used for spills.
b453cb0b
RK
680 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
681 caller-saved registers must be marked live. */
32131a9c 682
2b49ee39 683 num_eliminable_invariants = 0;
32131a9c
RK
684 for (insn = first; insn; insn = NEXT_INSN (insn))
685 {
686 rtx set = single_set (insn);
687
b453cb0b
RK
688 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
689 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
690 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
691 if (! call_used_regs[i])
692 regs_ever_live[i] = 1;
693
32131a9c
RK
694 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
695 {
fb3821f7 696 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
697 if (note
698#ifdef LEGITIMATE_PIC_OPERAND_P
2b49ee39
R
699 && (! function_invariant_p (XEXP (note, 0))
700 || ! flag_pic
a8efe40d
RK
701 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
702#endif
703 )
32131a9c
RK
704 {
705 rtx x = XEXP (note, 0);
706 i = REGNO (SET_DEST (set));
707 if (i > LAST_VIRTUAL_REGISTER)
708 {
709 if (GET_CODE (x) == MEM)
956d6950
JL
710 {
711 /* If the operand is a PLUS, the MEM may be shared,
712 so make sure we have an unshared copy here. */
713 if (GET_CODE (XEXP (x, 0)) == PLUS)
714 x = copy_rtx (x);
715
716 reg_equiv_memory_loc[i] = x;
717 }
2b49ee39 718 else if (function_invariant_p (x))
32131a9c 719 {
2b49ee39
R
720 if (GET_CODE (x) == PLUS)
721 {
722 /* This is PLUS of frame pointer and a constant,
723 and might be shared. Unshare it. */
724 reg_equiv_constant[i] = copy_rtx (x);
725 num_eliminable_invariants++;
726 }
727 else if (x == frame_pointer_rtx
728 || x == arg_pointer_rtx)
729 {
730 reg_equiv_constant[i] = x;
731 num_eliminable_invariants++;
732 }
733 else if (LEGITIMATE_CONSTANT_P (x))
32131a9c
RK
734 reg_equiv_constant[i] = x;
735 else
736 reg_equiv_memory_loc[i]
d445b551 737 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
738 }
739 else
740 continue;
741
742 /* If this register is being made equivalent to a MEM
743 and the MEM is not SET_SRC, the equivalencing insn
744 is one with the MEM as a SET_DEST and it occurs later.
745 So don't mark this insn now. */
746 if (GET_CODE (x) != MEM
747 || rtx_equal_p (SET_SRC (set), x))
135eb61c
R
748 reg_equiv_init[i]
749 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
32131a9c
RK
750 }
751 }
752 }
753
754 /* If this insn is setting a MEM from a register equivalent to it,
755 this is the equivalencing insn. */
756 else if (set && GET_CODE (SET_DEST (set)) == MEM
757 && GET_CODE (SET_SRC (set)) == REG
758 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
759 && rtx_equal_p (SET_DEST (set),
760 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
135eb61c
R
761 reg_equiv_init[REGNO (SET_SRC (set))]
762 = gen_rtx_INSN_LIST (VOIDmode, insn,
763 reg_equiv_init[REGNO (SET_SRC (set))]);
32131a9c
RK
764
765 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
766 scan_paradoxical_subregs (PATTERN (insn));
767 }
768
09dd1133 769 init_elim_table ();
32131a9c
RK
770
771 num_labels = max_label_num () - get_first_label_num ();
772
773 /* Allocate the tables used to store offset information at labels. */
a68d4b75
BK
774 /* We used to use alloca here, but the size of what it would try to
775 allocate would occasionally cause it to exceed the stack limit and
776 cause a core dump. */
777 real_known_ptr = xmalloc (num_labels);
778 real_at_ptr
32131a9c 779 = (int (*)[NUM_ELIMINABLE_REGS])
a68d4b75 780 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
32131a9c 781
a68d4b75
BK
782 offsets_known_at = real_known_ptr - get_first_label_num ();
783 offsets_at
784 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
32131a9c
RK
785
786 /* Alter each pseudo-reg rtx to contain its hard reg number.
787 Assign stack slots to the pseudos that lack hard regs or equivalents.
788 Do not touch virtual registers. */
789
790 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
791 alter_reg (i, -1);
792
32131a9c
RK
793 /* If we have some registers we think can be eliminated, scan all insns to
794 see if there is an insn that sets one of these registers to something
795 other than itself plus a constant. If so, the register cannot be
796 eliminated. Doing this scan here eliminates an extra pass through the
797 main reload loop in the most common case where register elimination
798 cannot be done. */
799 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
800 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
801 || GET_CODE (insn) == CALL_INSN)
802 note_stores (PATTERN (insn), mark_not_eliminable);
803
804#ifndef REGISTER_CONSTRAINTS
805 /* If all the pseudo regs have hard regs,
806 except for those that are never referenced,
807 we know that no reloads are needed. */
808 /* But that is not true if there are register constraints, since
809 in that case some pseudos might be in the wrong kind of hard reg. */
810
811 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
b1f21e0a 812 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
32131a9c
RK
813 break;
814
b8093d02 815 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
a68d4b75
BK
816 {
817 free (real_known_ptr);
818 free (real_at_ptr);
56a65848
DB
819 free (reg_equiv_constant);
820 free (reg_equiv_memory_loc);
821 free (reg_equiv_mem);
822 free (reg_equiv_init);
823 free (reg_equiv_address);
824 free (reg_max_ref_width);
03acd8f8
BS
825 free (reg_old_renumber);
826 free (pseudo_previous_regs);
827 free (pseudo_forbidden_regs);
56a65848 828 return 0;
a68d4b75 829 }
32131a9c
RK
830#endif
831
18a90182
BS
832 maybe_fix_stack_asms ();
833
03acd8f8
BS
834 insns_need_reload = 0;
835 something_needs_elimination = 0;
05d10675 836
4079cd63
JW
837 /* Initialize to -1, which means take the first spill register. */
838 last_spill_reg = -1;
839
7609e720
BS
840 spilled_pseudos = ALLOCA_REG_SET ();
841
32131a9c 842 /* Spill any hard regs that we know we can't eliminate. */
03acd8f8 843 CLEAR_HARD_REG_SET (used_spill_regs);
32131a9c
RK
844 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
845 if (! ep->can_eliminate)
03acd8f8 846 spill_hard_reg (ep->from, dumpfile, 1);
9ff3516a
RK
847
848#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
849 if (frame_pointer_needed)
03acd8f8 850 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, dumpfile, 1);
9ff3516a 851#endif
7609e720
BS
852 finish_spills (global, dumpfile);
853
f1db3576
JL
854 /* From now on, we may need to generate moves differently. We may also
855 allow modifications of insns which cause them to not be recognized.
856 Any such modifications will be cleaned up during reload itself. */
b2f15f94
RK
857 reload_in_progress = 1;
858
32131a9c
RK
859 /* This loop scans the entire function each go-round
860 and repeats until one repetition spills no additional hard regs. */
03acd8f8 861 for (;;)
32131a9c 862 {
03acd8f8
BS
863 int something_changed;
864 int did_spill;
865 struct insn_chain *chain;
32131a9c 866
03acd8f8 867 HOST_WIDE_INT starting_frame_size;
32131a9c 868
7657bf2f
JW
869 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
870 here because the stack size may be a part of the offset computation
871 for register elimination, and there might have been new stack slots
872 created in the last iteration of this loop. */
873 assign_stack_local (BLKmode, 0, 0);
874
875 starting_frame_size = get_frame_size ();
876
09dd1133 877 set_initial_elim_offsets ();
1f3b1e1a 878 set_initial_label_offsets ();
03acd8f8 879
32131a9c
RK
880 /* For each pseudo register that has an equivalent location defined,
881 try to eliminate any eliminable registers (such as the frame pointer)
882 assuming initial offsets for the replacement register, which
883 is the normal case.
884
885 If the resulting location is directly addressable, substitute
886 the MEM we just got directly for the old REG.
887
888 If it is not addressable but is a constant or the sum of a hard reg
889 and constant, it is probably not addressable because the constant is
890 out of range, in that case record the address; we will generate
891 hairy code to compute the address in a register each time it is
6491dbbb
RK
892 needed. Similarly if it is a hard register, but one that is not
893 valid as an address register.
32131a9c
RK
894
895 If the location is not addressable, but does not have one of the
896 above forms, assign a stack slot. We have to do this to avoid the
897 potential of producing lots of reloads if, e.g., a location involves
898 a pseudo that didn't get a hard register and has an equivalent memory
899 location that also involves a pseudo that didn't get a hard register.
900
901 Perhaps at some point we will improve reload_when_needed handling
902 so this problem goes away. But that's very hairy. */
903
904 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
905 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
906 {
1914f5da 907 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
908
909 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
910 XEXP (x, 0)))
911 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
912 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
913 || (GET_CODE (XEXP (x, 0)) == REG
914 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
915 || (GET_CODE (XEXP (x, 0)) == PLUS
916 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
917 && (REGNO (XEXP (XEXP (x, 0), 0))
918 < FIRST_PSEUDO_REGISTER)
919 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
920 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
921 else
922 {
923 /* Make a new stack slot. Then indicate that something
a8fdc208 924 changed so we go back and recompute offsets for
32131a9c
RK
925 eliminable registers because the allocation of memory
926 below might change some offset. reg_equiv_{mem,address}
927 will be set up for this pseudo on the next pass around
928 the loop. */
929 reg_equiv_memory_loc[i] = 0;
930 reg_equiv_init[i] = 0;
931 alter_reg (i, -1);
32131a9c
RK
932 }
933 }
a8fdc208 934
437a710d
BS
935 if (caller_save_needed)
936 setup_save_areas ();
937
03acd8f8 938 /* If we allocated another stack slot, redo elimination bookkeeping. */
437a710d 939 if (starting_frame_size != get_frame_size ())
32131a9c
RK
940 continue;
941
437a710d 942 if (caller_save_needed)
a8efe40d 943 {
437a710d
BS
944 save_call_clobbered_regs ();
945 /* That might have allocated new insn_chain structures. */
946 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
a8efe40d
RK
947 }
948
03acd8f8
BS
949 calculate_needs_all_insns (global);
950
951 CLEAR_REG_SET (spilled_pseudos);
952 did_spill = 0;
953
954 something_changed = 0;
32131a9c 955
0dadecf6
RK
956 /* If we allocated any new memory locations, make another pass
957 since it might have changed elimination offsets. */
958 if (starting_frame_size != get_frame_size ())
959 something_changed = 1;
960
09dd1133
BS
961 {
962 HARD_REG_SET to_spill;
963 CLEAR_HARD_REG_SET (to_spill);
964 update_eliminables (&to_spill);
965 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
966 if (TEST_HARD_REG_BIT (to_spill, i))
32131a9c 967 {
03acd8f8
BS
968 spill_hard_reg (i, dumpfile, 1);
969 did_spill = 1;
8f5db3c1
JL
970
971 /* Regardless of the state of spills, if we previously had
972 a register that we thought we could eliminate, but no can
973 not eliminate, we must run another pass.
974
975 Consider pseudos which have an entry in reg_equiv_* which
976 reference an eliminable register. We must make another pass
977 to update reg_equiv_* so that we do not substitute in the
978 old value from when we thought the elimination could be
979 performed. */
980 something_changed = 1;
32131a9c 981 }
09dd1133 982 }
9ff3516a 983
03acd8f8
BS
984 CLEAR_HARD_REG_SET (used_spill_regs);
985 /* Try to satisfy the needs for each insn. */
986 for (chain = insns_need_reload; chain != 0;
987 chain = chain->next_need_reload)
988 find_reload_regs (chain, dumpfile);
32131a9c 989
1e5bd841
BS
990 if (failure)
991 goto failed;
437a710d 992
03acd8f8
BS
993 if (insns_need_reload != 0 || did_spill)
994 something_changed |= finish_spills (global, dumpfile);
7609e720 995
03acd8f8
BS
996 if (! something_changed)
997 break;
998
999 if (caller_save_needed)
7609e720 1000 delete_caller_save_insns ();
32131a9c
RK
1001 }
1002
1003 /* If global-alloc was run, notify it of any register eliminations we have
1004 done. */
1005 if (global)
1006 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1007 if (ep->can_eliminate)
1008 mark_elimination (ep->from, ep->to);
1009
32131a9c
RK
1010 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1011 If that insn didn't set the register (i.e., it copied the register to
1012 memory), just delete that insn instead of the equivalencing insn plus
1013 anything now dead. If we call delete_dead_insn on that insn, we may
135eb61c 1014 delete the insn that actually sets the register if the register dies
32131a9c
RK
1015 there and that is incorrect. */
1016
1017 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
135eb61c
R
1018 {
1019 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1020 {
1021 rtx list;
1022 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1023 {
1024 rtx equiv_insn = XEXP (list, 0);
1025 if (GET_CODE (equiv_insn) == NOTE)
1026 continue;
1027 if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1028 delete_dead_insn (equiv_insn);
1029 else
1030 {
1031 PUT_CODE (equiv_insn, NOTE);
1032 NOTE_SOURCE_FILE (equiv_insn) = 0;
1033 NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED;
1034 }
1035 }
1036 }
1037 }
32131a9c
RK
1038
1039 /* Use the reload registers where necessary
1040 by generating move instructions to move the must-be-register
1041 values into or out of the reload registers. */
1042
03acd8f8
BS
1043 if (insns_need_reload != 0 || something_needs_elimination
1044 || something_needs_operands_changed)
c47f5ea5
BS
1045 {
1046 int old_frame_size = get_frame_size ();
1047
1048 reload_as_needed (global);
1049
1050 if (old_frame_size != get_frame_size ())
1051 abort ();
1052
1053 if (num_eliminable)
1054 verify_initial_elim_offsets ();
1055 }
32131a9c 1056
2a1f8b6b 1057 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1058 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1059 virtue of being in a pseudo, that pseudo will be marked live
1060 and hence the frame pointer will be known to be live via that
1061 pseudo. */
1062
1063 if (! frame_pointer_needed)
1064 for (i = 0; i < n_basic_blocks; i++)
e881bb1b 1065 CLEAR_REGNO_REG_SET (BASIC_BLOCK (i)->global_live_at_start,
8e08106d 1066 HARD_FRAME_POINTER_REGNUM);
2a1f8b6b 1067
5352b11a
RS
1068 /* Come here (with failure set nonzero) if we can't get enough spill regs
1069 and we decide not to abort about it. */
1070 failed:
1071
a3ec87a8
RS
1072 reload_in_progress = 0;
1073
32131a9c
RK
1074 /* Now eliminate all pseudo regs by modifying them into
1075 their equivalent memory references.
1076 The REG-rtx's for the pseudos are modified in place,
1077 so all insns that used to refer to them now refer to memory.
1078
1079 For a reg that has a reg_equiv_address, all those insns
1080 were changed by reloading so that no insns refer to it any longer;
1081 but the DECL_RTL of a variable decl may refer to it,
1082 and if so this causes the debugging info to mention the variable. */
1083
1084 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1085 {
1086 rtx addr = 0;
ab1fd483 1087 int in_struct = 0;
6a651371 1088 int is_scalar = 0;
9ec36da5
JL
1089 int is_readonly = 0;
1090
1091 if (reg_equiv_memory_loc[i])
ab1fd483 1092 {
9ec36da5 1093 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
c6df88cb 1094 is_scalar = MEM_SCALAR_P (reg_equiv_memory_loc[i]);
9ec36da5 1095 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
ab1fd483 1096 }
9ec36da5
JL
1097
1098 if (reg_equiv_mem[i])
1099 addr = XEXP (reg_equiv_mem[i], 0);
1100
32131a9c
RK
1101 if (reg_equiv_address[i])
1102 addr = reg_equiv_address[i];
9ec36da5 1103
32131a9c
RK
1104 if (addr)
1105 {
1106 if (reg_renumber[i] < 0)
1107 {
1108 rtx reg = regno_reg_rtx[i];
ef178af3 1109 PUT_CODE (reg, MEM);
32131a9c
RK
1110 XEXP (reg, 0) = addr;
1111 REG_USERVAR_P (reg) = 0;
9ec36da5 1112 RTX_UNCHANGING_P (reg) = is_readonly;
ab1fd483 1113 MEM_IN_STRUCT_P (reg) = in_struct;
c6df88cb 1114 MEM_SCALAR_P (reg) = is_scalar;
41472af8
MM
1115 /* We have no alias information about this newly created
1116 MEM. */
1117 MEM_ALIAS_SET (reg) = 0;
32131a9c
RK
1118 }
1119 else if (reg_equiv_mem[i])
1120 XEXP (reg_equiv_mem[i], 0) = addr;
1121 }
1122 }
1123
2ae74651
JL
1124 /* We must set reload_completed now since the cleanup_subreg_operands call
1125 below will re-recognize each insn and reload may have generated insns
1126 which are only valid during and after reload. */
1127 reload_completed = 1;
1128
2dfa9a87
MH
1129 /* Make a pass over all the insns and delete all USEs which we
1130 inserted only to tag a REG_EQUAL note on them. Remove all
1131 REG_DEAD and REG_UNUSED notes. Delete all CLOBBER insns and
1132 simplify (subreg (reg)) operands. Also remove all REG_RETVAL and
1133 REG_LIBCALL notes since they are no longer useful or accurate.
1134 Strip and regenerate REG_INC notes that may have been moved
1135 around. */
32131a9c
RK
1136
1137 for (insn = first; insn; insn = NEXT_INSN (insn))
1138 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1139 {
6764d250 1140 rtx *pnote;
32131a9c 1141
0304f787
JL
1142 if ((GET_CODE (PATTERN (insn)) == USE
1143 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1144 || GET_CODE (PATTERN (insn)) == CLOBBER)
b60a8416
R
1145 {
1146 PUT_CODE (insn, NOTE);
1147 NOTE_SOURCE_FILE (insn) = 0;
1148 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1149 continue;
1150 }
6764d250
BS
1151
1152 pnote = &REG_NOTES (insn);
1153 while (*pnote != 0)
32131a9c 1154 {
6764d250 1155 if (REG_NOTE_KIND (*pnote) == REG_DEAD
80599fd9 1156 || REG_NOTE_KIND (*pnote) == REG_UNUSED
2dfa9a87 1157 || REG_NOTE_KIND (*pnote) == REG_INC
80599fd9
NC
1158 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1159 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
6764d250
BS
1160 *pnote = XEXP (*pnote, 1);
1161 else
1162 pnote = &XEXP (*pnote, 1);
32131a9c 1163 }
0304f787 1164
2dfa9a87
MH
1165#ifdef AUTO_INC_DEC
1166 add_auto_inc_notes (insn, PATTERN (insn));
1167#endif
1168
0304f787
JL
1169 /* And simplify (subreg (reg)) if it appears as an operand. */
1170 cleanup_subreg_operands (insn);
b60a8416 1171 }
32131a9c 1172
ab87f8c8
JL
1173 /* If we are doing stack checking, give a warning if this function's
1174 frame size is larger than we expect. */
1175 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1176 {
1177 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
05d10675
BS
1178 static int verbose_warned = 0;
1179
ab87f8c8
JL
1180 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1181 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1182 size += UNITS_PER_WORD;
1183
1184 if (size > STACK_CHECK_MAX_FRAME_SIZE)
05d10675 1185 {
ab87f8c8
JL
1186 warning ("frame size too large for reliable stack checking");
1187 if (! verbose_warned)
1188 {
1189 warning ("try reducing the number of local variables");
1190 verbose_warned = 1;
1191 }
1192 }
1193 }
1194
32131a9c 1195 /* Indicate that we no longer have known memory locations or constants. */
58d9f9d9
JL
1196 if (reg_equiv_constant)
1197 free (reg_equiv_constant);
32131a9c 1198 reg_equiv_constant = 0;
58d9f9d9
JL
1199 if (reg_equiv_memory_loc)
1200 free (reg_equiv_memory_loc);
32131a9c 1201 reg_equiv_memory_loc = 0;
5352b11a 1202
a68d4b75
BK
1203 if (real_known_ptr)
1204 free (real_known_ptr);
1205 if (real_at_ptr)
1206 free (real_at_ptr);
1207
56a65848
DB
1208 free (reg_equiv_mem);
1209 free (reg_equiv_init);
1210 free (reg_equiv_address);
1211 free (reg_max_ref_width);
03acd8f8
BS
1212 free (reg_old_renumber);
1213 free (pseudo_previous_regs);
1214 free (pseudo_forbidden_regs);
56a65848 1215
7609e720
BS
1216 FREE_REG_SET (spilled_pseudos);
1217
8b4f9969
JW
1218 CLEAR_HARD_REG_SET (used_spill_regs);
1219 for (i = 0; i < n_spills; i++)
1220 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1221
7609e720
BS
1222 /* Free all the insn_chain structures at once. */
1223 obstack_free (&reload_obstack, reload_startobj);
1224 unused_insn_chains = 0;
1225
5352b11a 1226 return failure;
32131a9c 1227}
1e5bd841 1228
18a90182
BS
1229/* Yet another special case. Unfortunately, reg-stack forces people to
1230 write incorrect clobbers in asm statements. These clobbers must not
1231 cause the register to appear in bad_spill_regs, otherwise we'll call
1232 fatal_insn later. We clear the corresponding regnos in the live
1233 register sets to avoid this.
1234 The whole thing is rather sick, I'm afraid. */
1235static void
1236maybe_fix_stack_asms ()
1237{
1238#ifdef STACK_REGS
392dccb7 1239 const char *constraints[MAX_RECOG_OPERANDS];
18a90182
BS
1240 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1241 struct insn_chain *chain;
1242
1243 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1244 {
1245 int i, noperands;
1246 HARD_REG_SET clobbered, allowed;
1247 rtx pat;
1248
1249 if (GET_RTX_CLASS (GET_CODE (chain->insn)) != 'i'
1250 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1251 continue;
1252 pat = PATTERN (chain->insn);
1253 if (GET_CODE (pat) != PARALLEL)
1254 continue;
1255
1256 CLEAR_HARD_REG_SET (clobbered);
1257 CLEAR_HARD_REG_SET (allowed);
1258
1259 /* First, make a mask of all stack regs that are clobbered. */
1260 for (i = 0; i < XVECLEN (pat, 0); i++)
1261 {
1262 rtx t = XVECEXP (pat, 0, i);
1263 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1264 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1265 }
1266
1267 /* Get the operand values and constraints out of the insn. */
1ccbefce 1268 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
18a90182
BS
1269 constraints, operand_mode);
1270
1271 /* For every operand, see what registers are allowed. */
1272 for (i = 0; i < noperands; i++)
1273 {
6b9c6f4f 1274 const char *p = constraints[i];
18a90182
BS
1275 /* For every alternative, we compute the class of registers allowed
1276 for reloading in CLS, and merge its contents into the reg set
1277 ALLOWED. */
1278 int cls = (int) NO_REGS;
1279
1280 for (;;)
1281 {
1282 char c = *p++;
1283
1284 if (c == '\0' || c == ',' || c == '#')
1285 {
1286 /* End of one alternative - mark the regs in the current
1287 class, and reset the class. */
1288 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1289 cls = NO_REGS;
1290 if (c == '#')
1291 do {
1292 c = *p++;
1293 } while (c != '\0' && c != ',');
1294 if (c == '\0')
1295 break;
1296 continue;
1297 }
1298
1299 switch (c)
1300 {
1301 case '=': case '+': case '*': case '%': case '?': case '!':
1302 case '0': case '1': case '2': case '3': case '4': case 'm':
1303 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1304 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1305 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1306 case 'P':
1307#ifdef EXTRA_CONSTRAINT
1308 case 'Q': case 'R': case 'S': case 'T': case 'U':
1309#endif
1310 break;
1311
1312 case 'p':
1313 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS];
1314 break;
1315
1316 case 'g':
1317 case 'r':
1318 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1319 break;
1320
1321 default:
1322 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
05d10675 1323
18a90182
BS
1324 }
1325 }
1326 }
1327 /* Those of the registers which are clobbered, but allowed by the
1328 constraints, must be usable as reload registers. So clear them
1329 out of the life information. */
1330 AND_HARD_REG_SET (allowed, clobbered);
1331 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1332 if (TEST_HARD_REG_BIT (allowed, i))
1333 {
1334 CLEAR_REGNO_REG_SET (chain->live_before, i);
1335 CLEAR_REGNO_REG_SET (chain->live_after, i);
1336 }
1337 }
1338
1339#endif
1340}
1341
03acd8f8
BS
1342\f
1343/* Walk the chain of insns, and determine for each whether it needs reloads
1344 and/or eliminations. Build the corresponding insns_need_reload list, and
1345 set something_needs_elimination as appropriate. */
1346static void
7609e720 1347calculate_needs_all_insns (global)
1e5bd841
BS
1348 int global;
1349{
7609e720 1350 struct insn_chain **pprev_reload = &insns_need_reload;
03acd8f8 1351 struct insn_chain **pchain;
1e5bd841 1352
03acd8f8
BS
1353 something_needs_elimination = 0;
1354
1355 for (pchain = &reload_insn_chain; *pchain != 0; pchain = &(*pchain)->next)
1e5bd841 1356 {
03acd8f8
BS
1357 rtx insn;
1358 struct insn_chain *chain;
1359
1360 chain = *pchain;
1361 insn = chain->insn;
1e5bd841 1362
03acd8f8
BS
1363 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1364 include REG_LABEL), we need to see what effects this has on the
1365 known offsets at labels. */
1e5bd841
BS
1366
1367 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1368 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1369 && REG_NOTES (insn) != 0))
1370 set_label_offsets (insn, insn, 0);
1371
1372 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1373 {
1374 rtx old_body = PATTERN (insn);
1375 int old_code = INSN_CODE (insn);
1376 rtx old_notes = REG_NOTES (insn);
1377 int did_elimination = 0;
cb2afeb3 1378 int operands_changed = 0;
2b49ee39
R
1379 rtx set = single_set (insn);
1380
1381 /* Skip insns that only set an equivalence. */
1382 if (set && GET_CODE (SET_DEST (set)) == REG
1383 && reg_renumber[REGNO (SET_DEST (set))] < 0
1384 && reg_equiv_constant[REGNO (SET_DEST (set))])
a8edca88
JW
1385 {
1386 /* Must clear out the shortcuts, in case they were set last
1387 time through. */
1388 chain->need_elim = 0;
1389 chain->need_reload = 0;
1390 chain->need_operand_change = 0;
1391 continue;
1392 }
1e5bd841 1393
1e5bd841 1394 /* If needed, eliminate any eliminable registers. */
2b49ee39 1395 if (num_eliminable || num_eliminable_invariants)
1e5bd841
BS
1396 did_elimination = eliminate_regs_in_insn (insn, 0);
1397
1398 /* Analyze the instruction. */
cb2afeb3
R
1399 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1400 global, spill_reg_order);
1401
1402 /* If a no-op set needs more than one reload, this is likely
1403 to be something that needs input address reloads. We
1404 can't get rid of this cleanly later, and it is of no use
1405 anyway, so discard it now.
1406 We only do this when expensive_optimizations is enabled,
1407 since this complements reload inheritance / output
1408 reload deletion, and it can make debugging harder. */
1409 if (flag_expensive_optimizations && n_reloads > 1)
1410 {
1411 rtx set = single_set (insn);
1412 if (set
1413 && SET_SRC (set) == SET_DEST (set)
1414 && GET_CODE (SET_SRC (set)) == REG
1415 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1416 {
1417 PUT_CODE (insn, NOTE);
1418 NOTE_SOURCE_FILE (insn) = 0;
1419 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1420 continue;
1421 }
1422 }
1423 if (num_eliminable)
1424 update_eliminable_offsets ();
1e5bd841
BS
1425
1426 /* Remember for later shortcuts which insns had any reloads or
7609e720
BS
1427 register eliminations. */
1428 chain->need_elim = did_elimination;
03acd8f8
BS
1429 chain->need_reload = n_reloads > 0;
1430 chain->need_operand_change = operands_changed;
1e5bd841
BS
1431
1432 /* Discard any register replacements done. */
1433 if (did_elimination)
1434 {
1435 obstack_free (&reload_obstack, reload_firstobj);
1436 PATTERN (insn) = old_body;
1437 INSN_CODE (insn) = old_code;
1438 REG_NOTES (insn) = old_notes;
1439 something_needs_elimination = 1;
1440 }
1441
cb2afeb3
R
1442 something_needs_operands_changed |= operands_changed;
1443
437a710d 1444 if (n_reloads != 0)
7609e720
BS
1445 {
1446 *pprev_reload = chain;
1447 pprev_reload = &chain->next_need_reload;
03acd8f8
BS
1448
1449 calculate_needs (chain);
7609e720 1450 }
1e5bd841 1451 }
1e5bd841 1452 }
7609e720 1453 *pprev_reload = 0;
1e5bd841
BS
1454}
1455
03acd8f8
BS
1456/* Compute the most additional registers needed by one instruction,
1457 given by CHAIN. Collect information separately for each class of regs.
1458
1459 To compute the number of reload registers of each class needed for an
1460 insn, we must simulate what choose_reload_regs can do. We do this by
1461 splitting an insn into an "input" and an "output" part. RELOAD_OTHER
1462 reloads are used in both. The input part uses those reloads,
1463 RELOAD_FOR_INPUT reloads, which must be live over the entire input section
1464 of reloads, and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1465 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the inputs.
1466
1467 The registers needed for output are RELOAD_OTHER and RELOAD_FOR_OUTPUT,
1468 which are live for the entire output portion, and the maximum of all the
1469 RELOAD_FOR_OUTPUT_ADDRESS reloads for each operand.
1e5bd841
BS
1470
1471 The total number of registers needed is the maximum of the
1472 inputs and outputs. */
1473
03acd8f8
BS
1474static void
1475calculate_needs (chain)
7609e720 1476 struct insn_chain *chain;
1e5bd841 1477{
1e5bd841
BS
1478 int i;
1479
1e5bd841
BS
1480 /* Each `struct needs' corresponds to one RELOAD_... type. */
1481 struct {
1482 struct needs other;
1483 struct needs input;
1484 struct needs output;
1485 struct needs insn;
1486 struct needs other_addr;
1487 struct needs op_addr;
1488 struct needs op_addr_reload;
1489 struct needs in_addr[MAX_RECOG_OPERANDS];
1490 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1491 struct needs out_addr[MAX_RECOG_OPERANDS];
1492 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1493 } insn_needs;
1494
03acd8f8
BS
1495 bzero ((char *) chain->group_size, sizeof chain->group_size);
1496 for (i = 0; i < N_REG_CLASSES; i++)
1497 chain->group_mode[i] = VOIDmode;
1e5bd841
BS
1498 bzero ((char *) &insn_needs, sizeof insn_needs);
1499
1500 /* Count each reload once in every class
1501 containing the reload's own class. */
1502
1503 for (i = 0; i < n_reloads; i++)
1504 {
1505 register enum reg_class *p;
eceef4c9 1506 enum reg_class class = rld[i].class;
1e5bd841
BS
1507 int size;
1508 enum machine_mode mode;
1509 struct needs *this_needs;
1510
1511 /* Don't count the dummy reloads, for which one of the
1512 regs mentioned in the insn can be used for reloading.
1513 Don't count optional reloads.
1514 Don't count reloads that got combined with others. */
eceef4c9
BS
1515 if (rld[i].reg_rtx != 0
1516 || rld[i].optional != 0
1517 || (rld[i].out == 0 && rld[i].in == 0
1518 && ! rld[i].secondary_p))
1e5bd841
BS
1519 continue;
1520
eceef4c9
BS
1521 mode = rld[i].inmode;
1522 if (GET_MODE_SIZE (rld[i].outmode) > GET_MODE_SIZE (mode))
1523 mode = rld[i].outmode;
1e5bd841
BS
1524 size = CLASS_MAX_NREGS (class, mode);
1525
1526 /* Decide which time-of-use to count this reload for. */
eceef4c9 1527 switch (rld[i].when_needed)
1e5bd841
BS
1528 {
1529 case RELOAD_OTHER:
1530 this_needs = &insn_needs.other;
1531 break;
1532 case RELOAD_FOR_INPUT:
1533 this_needs = &insn_needs.input;
1534 break;
1535 case RELOAD_FOR_OUTPUT:
1536 this_needs = &insn_needs.output;
1537 break;
1538 case RELOAD_FOR_INSN:
1539 this_needs = &insn_needs.insn;
1540 break;
1541 case RELOAD_FOR_OTHER_ADDRESS:
1542 this_needs = &insn_needs.other_addr;
1543 break;
1544 case RELOAD_FOR_INPUT_ADDRESS:
eceef4c9 1545 this_needs = &insn_needs.in_addr[rld[i].opnum];
1e5bd841
BS
1546 break;
1547 case RELOAD_FOR_INPADDR_ADDRESS:
eceef4c9 1548 this_needs = &insn_needs.in_addr_addr[rld[i].opnum];
1e5bd841
BS
1549 break;
1550 case RELOAD_FOR_OUTPUT_ADDRESS:
eceef4c9 1551 this_needs = &insn_needs.out_addr[rld[i].opnum];
1e5bd841
BS
1552 break;
1553 case RELOAD_FOR_OUTADDR_ADDRESS:
eceef4c9 1554 this_needs = &insn_needs.out_addr_addr[rld[i].opnum];
1e5bd841
BS
1555 break;
1556 case RELOAD_FOR_OPERAND_ADDRESS:
1557 this_needs = &insn_needs.op_addr;
1558 break;
1559 case RELOAD_FOR_OPADDR_ADDR:
1560 this_needs = &insn_needs.op_addr_reload;
1561 break;
973838fd
KG
1562 default:
1563 abort();
1e5bd841
BS
1564 }
1565
1566 if (size > 1)
1567 {
1568 enum machine_mode other_mode, allocate_mode;
1569
1570 /* Count number of groups needed separately from
1571 number of individual regs needed. */
1572 this_needs->groups[(int) class]++;
1573 p = reg_class_superclasses[(int) class];
1574 while (*p != LIM_REG_CLASSES)
1575 this_needs->groups[(int) *p++]++;
1576
1577 /* Record size and mode of a group of this class. */
1578 /* If more than one size group is needed,
1579 make all groups the largest needed size. */
03acd8f8 1580 if (chain->group_size[(int) class] < size)
1e5bd841 1581 {
03acd8f8 1582 other_mode = chain->group_mode[(int) class];
1e5bd841
BS
1583 allocate_mode = mode;
1584
03acd8f8
BS
1585 chain->group_size[(int) class] = size;
1586 chain->group_mode[(int) class] = mode;
1e5bd841
BS
1587 }
1588 else
1589 {
1590 other_mode = mode;
03acd8f8 1591 allocate_mode = chain->group_mode[(int) class];
1e5bd841
BS
1592 }
1593
1594 /* Crash if two dissimilar machine modes both need
1595 groups of consecutive regs of the same class. */
1596
1597 if (other_mode != VOIDmode && other_mode != allocate_mode
1598 && ! modes_equiv_for_class_p (allocate_mode,
1599 other_mode, class))
1600 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
03acd8f8 1601 chain->insn);
1e5bd841
BS
1602 }
1603 else if (size == 1)
1604 {
eceef4c9 1605 this_needs->regs[(unsigned char)rld[i].nongroup][(int) class] += 1;
1e5bd841
BS
1606 p = reg_class_superclasses[(int) class];
1607 while (*p != LIM_REG_CLASSES)
eceef4c9 1608 this_needs->regs[(unsigned char)rld[i].nongroup][(int) *p++] += 1;
1e5bd841
BS
1609 }
1610 else
1611 abort ();
1612 }
1613
1614 /* All reloads have been counted for this insn;
1615 now merge the various times of use.
1616 This sets insn_needs, etc., to the maximum total number
1617 of registers needed at any point in this insn. */
1618
1619 for (i = 0; i < N_REG_CLASSES; i++)
1620 {
1621 int j, in_max, out_max;
1622
1623 /* Compute normal and nongroup needs. */
1624 for (j = 0; j <= 1; j++)
1625 {
1626 int k;
1627 for (in_max = 0, out_max = 0, k = 0; k < reload_n_operands; k++)
1628 {
1629 in_max = MAX (in_max,
1630 (insn_needs.in_addr[k].regs[j][i]
1631 + insn_needs.in_addr_addr[k].regs[j][i]));
1632 out_max = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1633 out_max = MAX (out_max,
1634 insn_needs.out_addr_addr[k].regs[j][i]);
1635 }
1636
1637 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1638 and operand addresses but not things used to reload
1639 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1640 don't conflict with things needed to reload inputs or
1641 outputs. */
1642
1643 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1644 insn_needs.op_addr_reload.regs[j][i]),
1645 in_max);
1646
1647 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1648
1649 insn_needs.input.regs[j][i]
1650 = MAX (insn_needs.input.regs[j][i]
1651 + insn_needs.op_addr.regs[j][i]
1652 + insn_needs.insn.regs[j][i],
1653 in_max + insn_needs.input.regs[j][i]);
1654
1655 insn_needs.output.regs[j][i] += out_max;
1656 insn_needs.other.regs[j][i]
1657 += MAX (MAX (insn_needs.input.regs[j][i],
1658 insn_needs.output.regs[j][i]),
1659 insn_needs.other_addr.regs[j][i]);
1660
1661 }
1662
1663 /* Now compute group needs. */
1664 for (in_max = 0, out_max = 0, j = 0; j < reload_n_operands; j++)
1665 {
1666 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1667 in_max = MAX (in_max, insn_needs.in_addr_addr[j].groups[i]);
1668 out_max = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1669 out_max = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1670 }
1671
1672 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1673 insn_needs.op_addr_reload.groups[i]),
1674 in_max);
1675 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1676
1677 insn_needs.input.groups[i]
1678 = MAX (insn_needs.input.groups[i]
1679 + insn_needs.op_addr.groups[i]
1680 + insn_needs.insn.groups[i],
1681 in_max + insn_needs.input.groups[i]);
1682
1683 insn_needs.output.groups[i] += out_max;
1684 insn_needs.other.groups[i]
1685 += MAX (MAX (insn_needs.input.groups[i],
1686 insn_needs.output.groups[i]),
1687 insn_needs.other_addr.groups[i]);
1688 }
1689
7609e720
BS
1690 /* Record the needs for later. */
1691 chain->need = insn_needs.other;
1e5bd841 1692}
03acd8f8 1693\f
1e5bd841
BS
1694/* Find a group of exactly 2 registers.
1695
1696 First try to fill out the group by spilling a single register which
1697 would allow completion of the group.
1698
1699 Then try to create a new group from a pair of registers, neither of
1700 which are explicitly used.
1701
1702 Then try to create a group from any pair of registers. */
03acd8f8
BS
1703
1704static void
1705find_tworeg_group (chain, class, dumpfile)
1706 struct insn_chain *chain;
1e5bd841
BS
1707 int class;
1708 FILE *dumpfile;
1709{
1710 int i;
1711 /* First, look for a register that will complete a group. */
1712 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1713 {
1714 int j, other;
1715
1716 j = potential_reload_regs[i];
1717 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1718 && ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1719 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1720 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
03acd8f8
BS
1721 && HARD_REGNO_MODE_OK (other, chain->group_mode[class])
1722 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1e5bd841
BS
1723 /* We don't want one part of another group.
1724 We could get "two groups" that overlap! */
03acd8f8 1725 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))
1e5bd841
BS
1726 || (j < FIRST_PSEUDO_REGISTER - 1
1727 && (other = j + 1, spill_reg_order[other] >= 0)
1728 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1729 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
03acd8f8
BS
1730 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1731 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1732 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))))
1e5bd841
BS
1733 {
1734 register enum reg_class *p;
1735
1736 /* We have found one that will complete a group,
1737 so count off one group as provided. */
03acd8f8 1738 chain->need.groups[class]--;
1e5bd841
BS
1739 p = reg_class_superclasses[class];
1740 while (*p != LIM_REG_CLASSES)
1741 {
03acd8f8
BS
1742 if (chain->group_size [(int) *p] <= chain->group_size [class])
1743 chain->need.groups[(int) *p]--;
1e5bd841
BS
1744 p++;
1745 }
1746
1747 /* Indicate both these regs are part of a group. */
03acd8f8
BS
1748 SET_HARD_REG_BIT (chain->counted_for_groups, j);
1749 SET_HARD_REG_BIT (chain->counted_for_groups, other);
1e5bd841
BS
1750 break;
1751 }
1752 }
1753 /* We can't complete a group, so start one. */
1e5bd841
BS
1754 if (i == FIRST_PSEUDO_REGISTER)
1755 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1756 {
1757 int j, k;
1758 j = potential_reload_regs[i];
1759 /* Verify that J+1 is a potential reload reg. */
1760 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1761 if (potential_reload_regs[k] == j + 1)
1762 break;
1763 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1764 && k < FIRST_PSEUDO_REGISTER
1765 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1766 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1767 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
03acd8f8
BS
1768 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1769 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, j + 1)
1e5bd841
BS
1770 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1771 break;
1772 }
1773
1774 /* I should be the index in potential_reload_regs
1775 of the new reload reg we have found. */
1776
03acd8f8 1777 new_spill_reg (chain, i, class, 0, dumpfile);
1e5bd841
BS
1778}
1779
1780/* Find a group of more than 2 registers.
1781 Look for a sufficient sequence of unspilled registers, and spill them all
1782 at once. */
03acd8f8
BS
1783
1784static void
1785find_group (chain, class, dumpfile)
1786 struct insn_chain *chain;
1e5bd841
BS
1787 int class;
1788 FILE *dumpfile;
1789{
1e5bd841
BS
1790 int i;
1791
1792 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1793 {
03acd8f8 1794 int j = potential_reload_regs[i];
1e5bd841 1795
1e5bd841 1796 if (j >= 0
03acd8f8
BS
1797 && j + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
1798 && HARD_REGNO_MODE_OK (j, chain->group_mode[class]))
1e5bd841 1799 {
03acd8f8 1800 int k;
1e5bd841 1801 /* Check each reg in the sequence. */
03acd8f8 1802 for (k = 0; k < chain->group_size[class]; k++)
1e5bd841
BS
1803 if (! (spill_reg_order[j + k] < 0
1804 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1805 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1806 break;
1807 /* We got a full sequence, so spill them all. */
03acd8f8 1808 if (k == chain->group_size[class])
1e5bd841
BS
1809 {
1810 register enum reg_class *p;
03acd8f8 1811 for (k = 0; k < chain->group_size[class]; k++)
1e5bd841
BS
1812 {
1813 int idx;
03acd8f8 1814 SET_HARD_REG_BIT (chain->counted_for_groups, j + k);
1e5bd841
BS
1815 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1816 if (potential_reload_regs[idx] == j + k)
1817 break;
03acd8f8 1818 new_spill_reg (chain, idx, class, 0, dumpfile);
1e5bd841
BS
1819 }
1820
1821 /* We have found one that will complete a group,
1822 so count off one group as provided. */
03acd8f8 1823 chain->need.groups[class]--;
1e5bd841
BS
1824 p = reg_class_superclasses[class];
1825 while (*p != LIM_REG_CLASSES)
1826 {
03acd8f8
BS
1827 if (chain->group_size [(int) *p]
1828 <= chain->group_size [class])
1829 chain->need.groups[(int) *p]--;
1e5bd841
BS
1830 p++;
1831 }
03acd8f8 1832 return;
1e5bd841
BS
1833 }
1834 }
1835 }
1836 /* There are no groups left. */
03acd8f8 1837 spill_failure (chain->insn);
1e5bd841 1838 failure = 1;
1e5bd841
BS
1839}
1840
03acd8f8
BS
1841/* If pseudo REG conflicts with one of our reload registers, mark it as
1842 spilled. */
1843static void
1844maybe_mark_pseudo_spilled (reg)
1845 int reg;
1846{
1847 int i;
1848 int r = reg_renumber[reg];
1849 int nregs;
1850
1851 if (r < 0)
1852 abort ();
1853 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1854 for (i = 0; i < n_spills; i++)
1855 if (r <= spill_regs[i] && r + nregs > spill_regs[i])
1856 {
1857 SET_REGNO_REG_SET (spilled_pseudos, reg);
1858 return;
1859 }
1860}
1861
1862/* Find more reload regs to satisfy the remaining need of an insn, which
1863 is given by CHAIN.
1e5bd841
BS
1864 Do it by ascending class number, since otherwise a reg
1865 might be spilled for a big class and might fail to count
1866 for a smaller class even though it belongs to that class.
1867
1868 Count spilled regs in `spills', and add entries to
1869 `spill_regs' and `spill_reg_order'.
1870
1871 ??? Note there is a problem here.
1872 When there is a need for a group in a high-numbered class,
1873 and also need for non-group regs that come from a lower class,
1874 the non-group regs are chosen first. If there aren't many regs,
1875 they might leave no room for a group.
1876
1877 This was happening on the 386. To fix it, we added the code
1878 that calls possible_group_p, so that the lower class won't
1879 break up the last possible group.
1880
1881 Really fixing the problem would require changes above
1882 in counting the regs already spilled, and in choose_reload_regs.
1883 It might be hard to avoid introducing bugs there. */
1884
03acd8f8
BS
1885static void
1886find_reload_regs (chain, dumpfile)
1887 struct insn_chain *chain;
1e5bd841
BS
1888 FILE *dumpfile;
1889{
03acd8f8
BS
1890 int i, class;
1891 short *group_needs = chain->need.groups;
1892 short *simple_needs = chain->need.regs[0];
1893 short *nongroup_needs = chain->need.regs[1];
1894
1895 if (dumpfile)
1896 fprintf (dumpfile, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1897
1898 /* Compute the order of preference for hard registers to spill.
1899 Store them by decreasing preference in potential_reload_regs. */
1900
1901 order_regs_for_reload (chain);
1902
1903 /* So far, no hard regs have been spilled. */
1904 n_spills = 0;
1905 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1906 spill_reg_order[i] = -1;
1e5bd841 1907
03acd8f8
BS
1908 CLEAR_HARD_REG_SET (chain->used_spill_regs);
1909 CLEAR_HARD_REG_SET (chain->counted_for_groups);
1910 CLEAR_HARD_REG_SET (chain->counted_for_nongroups);
1e5bd841
BS
1911
1912 for (class = 0; class < N_REG_CLASSES; class++)
1913 {
1914 /* First get the groups of registers.
1915 If we got single registers first, we might fragment
1916 possible groups. */
03acd8f8 1917 while (group_needs[class] > 0)
1e5bd841
BS
1918 {
1919 /* If any single spilled regs happen to form groups,
1920 count them now. Maybe we don't really need
1921 to spill another group. */
03acd8f8 1922 count_possible_groups (chain, class);
1e5bd841 1923
03acd8f8 1924 if (group_needs[class] <= 0)
1e5bd841
BS
1925 break;
1926
03acd8f8 1927 /* Groups of size 2, the only groups used on most machines,
1e5bd841 1928 are treated specially. */
03acd8f8
BS
1929 if (chain->group_size[class] == 2)
1930 find_tworeg_group (chain, class, dumpfile);
1e5bd841 1931 else
03acd8f8 1932 find_group (chain, class, dumpfile);
1e5bd841 1933 if (failure)
03acd8f8 1934 return;
1e5bd841
BS
1935 }
1936
1937 /* Now similarly satisfy all need for single registers. */
1938
03acd8f8 1939 while (simple_needs[class] > 0 || nongroup_needs[class] > 0)
1e5bd841 1940 {
1e5bd841
BS
1941 /* If we spilled enough regs, but they weren't counted
1942 against the non-group need, see if we can count them now.
1943 If so, we can avoid some actual spilling. */
03acd8f8 1944 if (simple_needs[class] <= 0 && nongroup_needs[class] > 0)
1e5bd841
BS
1945 for (i = 0; i < n_spills; i++)
1946 {
1947 int regno = spill_regs[i];
1948 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
03acd8f8
BS
1949 && !TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
1950 && !TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno)
1951 && nongroup_needs[class] > 0)
1952 {
1953 register enum reg_class *p;
1e5bd841 1954
03acd8f8
BS
1955 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
1956 nongroup_needs[class]--;
1957 p = reg_class_superclasses[class];
1958 while (*p != LIM_REG_CLASSES)
1959 nongroup_needs[(int) *p++]--;
1960 }
1e5bd841 1961 }
03acd8f8
BS
1962
1963 if (simple_needs[class] <= 0 && nongroup_needs[class] <= 0)
1e5bd841
BS
1964 break;
1965
1966 /* Consider the potential reload regs that aren't
1967 yet in use as reload regs, in order of preference.
1968 Find the most preferred one that's in this class. */
1969
1970 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1971 {
1972 int regno = potential_reload_regs[i];
1973 if (regno >= 0
1974 && TEST_HARD_REG_BIT (reg_class_contents[class], regno)
1975 /* If this reg will not be available for groups,
1976 pick one that does not foreclose possible groups.
1977 This is a kludge, and not very general,
1978 but it should be sufficient to make the 386 work,
1979 and the problem should not occur on machines with
1980 more registers. */
03acd8f8
BS
1981 && (nongroup_needs[class] == 0
1982 || possible_group_p (chain, regno)))
1e5bd841
BS
1983 break;
1984 }
1985
1986 /* If we couldn't get a register, try to get one even if we
1987 might foreclose possible groups. This may cause problems
1988 later, but that's better than aborting now, since it is
1989 possible that we will, in fact, be able to form the needed
1990 group even with this allocation. */
1991
1992 if (i >= FIRST_PSEUDO_REGISTER
03acd8f8 1993 && asm_noperands (chain->insn) < 0)
1e5bd841
BS
1994 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1995 if (potential_reload_regs[i] >= 0
1996 && TEST_HARD_REG_BIT (reg_class_contents[class],
1997 potential_reload_regs[i]))
1998 break;
1999
2000 /* I should be the index in potential_reload_regs
2001 of the new reload reg we have found. */
2002
03acd8f8
BS
2003 new_spill_reg (chain, i, class, 1, dumpfile);
2004 if (failure)
2005 return;
1e5bd841
BS
2006 }
2007 }
05d10675 2008
03acd8f8
BS
2009 /* We know which hard regs to use, now mark the pseudos that live in them
2010 as needing to be kicked out. */
2011 EXECUTE_IF_SET_IN_REG_SET
2012 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
2013 {
2014 maybe_mark_pseudo_spilled (i);
2015 });
2016 EXECUTE_IF_SET_IN_REG_SET
2017 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
2018 {
2019 maybe_mark_pseudo_spilled (i);
2020 });
2021
2022 IOR_HARD_REG_SET (used_spill_regs, chain->used_spill_regs);
1e5bd841
BS
2023}
2024
03acd8f8
BS
2025void
2026dump_needs (chain, dumpfile)
2027 struct insn_chain *chain;
09dd1133
BS
2028 FILE *dumpfile;
2029{
a4ec8d12 2030 static const char * const reg_class_names[] = REG_CLASS_NAMES;
09dd1133 2031 int i;
03acd8f8 2032 struct needs *n = &chain->need;
09dd1133
BS
2033
2034 for (i = 0; i < N_REG_CLASSES; i++)
2035 {
03acd8f8 2036 if (n->regs[i][0] > 0)
09dd1133 2037 fprintf (dumpfile,
03acd8f8
BS
2038 ";; Need %d reg%s of class %s.\n",
2039 n->regs[i][0], n->regs[i][0] == 1 ? "" : "s",
2040 reg_class_names[i]);
2041 if (n->regs[i][1] > 0)
09dd1133 2042 fprintf (dumpfile,
03acd8f8
BS
2043 ";; Need %d nongroup reg%s of class %s.\n",
2044 n->regs[i][1], n->regs[i][1] == 1 ? "" : "s",
2045 reg_class_names[i]);
2046 if (n->groups[i] > 0)
09dd1133 2047 fprintf (dumpfile,
03acd8f8
BS
2048 ";; Need %d group%s (%smode) of class %s.\n",
2049 n->groups[i], n->groups[i] == 1 ? "" : "s",
a4ec8d12 2050 GET_MODE_NAME(chain->group_mode[i]),
03acd8f8 2051 reg_class_names[i]);
09dd1133
BS
2052 }
2053}
32131a9c 2054\f
437a710d
BS
2055/* Delete all insns that were inserted by emit_caller_save_insns during
2056 this iteration. */
2057static void
7609e720 2058delete_caller_save_insns ()
437a710d 2059{
7609e720 2060 struct insn_chain *c = reload_insn_chain;
437a710d 2061
7609e720 2062 while (c != 0)
437a710d 2063 {
7609e720 2064 while (c != 0 && c->is_caller_save_insn)
437a710d 2065 {
7609e720
BS
2066 struct insn_chain *next = c->next;
2067 rtx insn = c->insn;
2068
3b413743
RH
2069 if (insn == BLOCK_HEAD (c->block))
2070 BLOCK_HEAD (c->block) = NEXT_INSN (insn);
2071 if (insn == BLOCK_END (c->block))
2072 BLOCK_END (c->block) = PREV_INSN (insn);
7609e720
BS
2073 if (c == reload_insn_chain)
2074 reload_insn_chain = next;
2075
2076 if (NEXT_INSN (insn) != 0)
2077 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2078 if (PREV_INSN (insn) != 0)
2079 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2080
2081 if (next)
2082 next->prev = c->prev;
2083 if (c->prev)
2084 c->prev->next = next;
2085 c->next = unused_insn_chains;
2086 unused_insn_chains = c;
2087 c = next;
437a710d 2088 }
7609e720
BS
2089 if (c != 0)
2090 c = c->next;
437a710d
BS
2091 }
2092}
2093\f
32131a9c
RK
2094/* Nonzero if, after spilling reg REGNO for non-groups,
2095 it will still be possible to find a group if we still need one. */
2096
2097static int
03acd8f8
BS
2098possible_group_p (chain, regno)
2099 struct insn_chain *chain;
32131a9c 2100 int regno;
32131a9c
RK
2101{
2102 int i;
2103 int class = (int) NO_REGS;
2104
2105 for (i = 0; i < (int) N_REG_CLASSES; i++)
03acd8f8 2106 if (chain->need.groups[i] > 0)
32131a9c
RK
2107 {
2108 class = i;
2109 break;
2110 }
2111
2112 if (class == (int) NO_REGS)
2113 return 1;
2114
2115 /* Consider each pair of consecutive registers. */
2116 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2117 {
2118 /* Ignore pairs that include reg REGNO. */
2119 if (i == regno || i + 1 == regno)
2120 continue;
2121
2122 /* Ignore pairs that are outside the class that needs the group.
2123 ??? Here we fail to handle the case where two different classes
2124 independently need groups. But this never happens with our
2125 current machine descriptions. */
2126 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2127 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2128 continue;
2129
2130 /* A pair of consecutive regs we can still spill does the trick. */
2131 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2132 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2133 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2134 return 1;
2135
2136 /* A pair of one already spilled and one we can spill does it
2137 provided the one already spilled is not otherwise reserved. */
2138 if (spill_reg_order[i] < 0
2139 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2140 && spill_reg_order[i + 1] >= 0
03acd8f8
BS
2141 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i + 1)
2142 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i + 1))
32131a9c
RK
2143 return 1;
2144 if (spill_reg_order[i + 1] < 0
2145 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2146 && spill_reg_order[i] >= 0
03acd8f8
BS
2147 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i)
2148 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i))
32131a9c
RK
2149 return 1;
2150 }
2151
2152 return 0;
2153}
03acd8f8 2154
066aca28
RK
2155/* Count any groups of CLASS that can be formed from the registers recently
2156 spilled. */
32131a9c
RK
2157
2158static void
03acd8f8
BS
2159count_possible_groups (chain, class)
2160 struct insn_chain *chain;
066aca28 2161 int class;
32131a9c 2162{
066aca28
RK
2163 HARD_REG_SET new;
2164 int i, j;
2165
32131a9c
RK
2166 /* Now find all consecutive groups of spilled registers
2167 and mark each group off against the need for such groups.
2168 But don't count them against ordinary need, yet. */
2169
03acd8f8 2170 if (chain->group_size[class] == 0)
066aca28
RK
2171 return;
2172
2173 CLEAR_HARD_REG_SET (new);
2174
2175 /* Make a mask of all the regs that are spill regs in class I. */
2176 for (i = 0; i < n_spills; i++)
03acd8f8
BS
2177 {
2178 int regno = spill_regs[i];
2179
2180 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
2181 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
2182 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno))
2183 SET_HARD_REG_BIT (new, regno);
2184 }
066aca28
RK
2185
2186 /* Find each consecutive group of them. */
03acd8f8 2187 for (i = 0; i < FIRST_PSEUDO_REGISTER && chain->need.groups[class] > 0; i++)
066aca28 2188 if (TEST_HARD_REG_BIT (new, i)
03acd8f8
BS
2189 && i + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
2190 && HARD_REGNO_MODE_OK (i, chain->group_mode[class]))
32131a9c 2191 {
03acd8f8 2192 for (j = 1; j < chain->group_size[class]; j++)
066aca28
RK
2193 if (! TEST_HARD_REG_BIT (new, i + j))
2194 break;
32131a9c 2195
03acd8f8 2196 if (j == chain->group_size[class])
066aca28
RK
2197 {
2198 /* We found a group. Mark it off against this class's need for
2199 groups, and against each superclass too. */
2200 register enum reg_class *p;
2201
03acd8f8 2202 chain->need.groups[class]--;
066aca28
RK
2203 p = reg_class_superclasses[class];
2204 while (*p != LIM_REG_CLASSES)
d601d5da 2205 {
03acd8f8
BS
2206 if (chain->group_size [(int) *p] <= chain->group_size [class])
2207 chain->need.groups[(int) *p]--;
d601d5da
JW
2208 p++;
2209 }
066aca28
RK
2210
2211 /* Don't count these registers again. */
03acd8f8
BS
2212 for (j = 0; j < chain->group_size[class]; j++)
2213 SET_HARD_REG_BIT (chain->counted_for_groups, i + j);
066aca28
RK
2214 }
2215
2216 /* Skip to the last reg in this group. When i is incremented above,
2217 it will then point to the first reg of the next possible group. */
2218 i += j - 1;
2219 }
32131a9c
RK
2220}
2221\f
2222/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2223 another mode that needs to be reloaded for the same register class CLASS.
2224 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2225 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2226
2227 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2228 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2229 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2230 causes unnecessary failures on machines requiring alignment of register
2231 groups when the two modes are different sizes, because the larger mode has
2232 more strict alignment rules than the smaller mode. */
2233
2234static int
2235modes_equiv_for_class_p (allocate_mode, other_mode, class)
2236 enum machine_mode allocate_mode, other_mode;
2237 enum reg_class class;
2238{
2239 register int regno;
2240 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2241 {
2242 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2243 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2244 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2245 return 0;
2246 }
2247 return 1;
2248}
03acd8f8 2249\f
5352b11a
RS
2250/* Handle the failure to find a register to spill.
2251 INSN should be one of the insns which needed this particular spill reg. */
2252
2253static void
2254spill_failure (insn)
2255 rtx insn;
2256{
2257 if (asm_noperands (PATTERN (insn)) >= 0)
2258 error_for_asm (insn, "`asm' needs too many reloads");
2259 else
a89b2cc4 2260 fatal_insn ("Unable to find a register to spill.", insn);
5352b11a
RS
2261}
2262
03acd8f8
BS
2263/* Add a new register to the tables of available spill-registers.
2264 CHAIN is the insn for which the register will be used; we decrease the
2265 needs of that insn.
32131a9c
RK
2266 I is the index of this register in potential_reload_regs.
2267 CLASS is the regclass whose need is being satisfied.
03acd8f8
BS
2268 NONGROUP is 0 if this register is part of a group.
2269 DUMPFILE is the same as the one that `reload' got. */
32131a9c 2270
03acd8f8
BS
2271static void
2272new_spill_reg (chain, i, class, nongroup, dumpfile)
2273 struct insn_chain *chain;
32131a9c
RK
2274 int i;
2275 int class;
03acd8f8 2276 int nongroup;
32131a9c
RK
2277 FILE *dumpfile;
2278{
2279 register enum reg_class *p;
32131a9c
RK
2280 int regno = potential_reload_regs[i];
2281
2282 if (i >= FIRST_PSEUDO_REGISTER)
03acd8f8
BS
2283 {
2284 spill_failure (chain->insn);
2285 failure = 1;
2286 return;
2287 }
32131a9c 2288
03acd8f8 2289 if (TEST_HARD_REG_BIT (bad_spill_regs, regno))
da275344 2290 {
a4ec8d12 2291 static const char * const reg_class_names[] = REG_CLASS_NAMES;
03acd8f8
BS
2292
2293 if (asm_noperands (PATTERN (chain->insn)) < 0)
2294 {
05d10675
BS
2295 /* The error message is still correct - we know only that it wasn't
2296 an asm statement that caused the problem, but one of the global
2297 registers declared by the users might have screwed us. */
03acd8f8
BS
2298 error ("fixed or forbidden register %d (%s) was spilled for class %s.",
2299 regno, reg_names[regno], reg_class_names[class]);
2300 error ("This may be due to a compiler bug or to impossible asm");
2301 error ("statements or clauses.");
2302 fatal_insn ("This is the instruction:", chain->insn);
2303 }
2304 error_for_asm (chain->insn, "Invalid `asm' statement:");
2305 error_for_asm (chain->insn,
2306 "fixed or forbidden register %d (%s) was spilled for class %s.",
2307 regno, reg_names[regno], reg_class_names[class]);
2308 failure = 1;
2309 return;
da275344 2310 }
32131a9c
RK
2311
2312 /* Make reg REGNO an additional reload reg. */
2313
2314 potential_reload_regs[i] = -1;
2315 spill_regs[n_spills] = regno;
2316 spill_reg_order[regno] = n_spills;
2317 if (dumpfile)
03acd8f8
BS
2318 fprintf (dumpfile, "Spilling reg %d.\n", regno);
2319 SET_HARD_REG_BIT (chain->used_spill_regs, regno);
32131a9c
RK
2320
2321 /* Clear off the needs we just satisfied. */
2322
03acd8f8 2323 chain->need.regs[0][class]--;
32131a9c
RK
2324 p = reg_class_superclasses[class];
2325 while (*p != LIM_REG_CLASSES)
03acd8f8 2326 chain->need.regs[0][(int) *p++]--;
32131a9c 2327
03acd8f8 2328 if (nongroup && chain->need.regs[1][class] > 0)
32131a9c 2329 {
03acd8f8
BS
2330 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
2331 chain->need.regs[1][class]--;
32131a9c
RK
2332 p = reg_class_superclasses[class];
2333 while (*p != LIM_REG_CLASSES)
03acd8f8 2334 chain->need.regs[1][(int) *p++]--;
32131a9c
RK
2335 }
2336
32131a9c 2337 n_spills++;
32131a9c
RK
2338}
2339\f
2340/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2341 data that is dead in INSN. */
2342
2343static void
2344delete_dead_insn (insn)
2345 rtx insn;
2346{
2347 rtx prev = prev_real_insn (insn);
2348 rtx prev_dest;
2349
2350 /* If the previous insn sets a register that dies in our insn, delete it
2351 too. */
2352 if (prev && GET_CODE (PATTERN (prev)) == SET
2353 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2354 && reg_mentioned_p (prev_dest, PATTERN (insn))
b294ca38
R
2355 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2356 && ! side_effects_p (SET_SRC (PATTERN (prev))))
32131a9c
RK
2357 delete_dead_insn (prev);
2358
2359 PUT_CODE (insn, NOTE);
2360 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2361 NOTE_SOURCE_FILE (insn) = 0;
2362}
2363
2364/* Modify the home of pseudo-reg I.
2365 The new home is present in reg_renumber[I].
2366
2367 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2368 or it may be -1, meaning there is none or it is not relevant.
2369 This is used so that all pseudos spilled from a given hard reg
2370 can share one stack slot. */
2371
2372static void
2373alter_reg (i, from_reg)
2374 register int i;
2375 int from_reg;
2376{
2377 /* When outputting an inline function, this can happen
2378 for a reg that isn't actually used. */
2379 if (regno_reg_rtx[i] == 0)
2380 return;
2381
2382 /* If the reg got changed to a MEM at rtl-generation time,
2383 ignore it. */
2384 if (GET_CODE (regno_reg_rtx[i]) != REG)
2385 return;
2386
2387 /* Modify the reg-rtx to contain the new hard reg
2388 number or else to contain its pseudo reg number. */
2389 REGNO (regno_reg_rtx[i])
2390 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2391
2392 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2393 allocate a stack slot for it. */
2394
2395 if (reg_renumber[i] < 0
b1f21e0a 2396 && REG_N_REFS (i) > 0
32131a9c
RK
2397 && reg_equiv_constant[i] == 0
2398 && reg_equiv_memory_loc[i] == 0)
2399 {
2400 register rtx x;
2401 int inherent_size = PSEUDO_REGNO_BYTES (i);
2402 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2403 int adjust = 0;
2404
2405 /* Each pseudo reg has an inherent size which comes from its own mode,
2406 and a total size which provides room for paradoxical subregs
2407 which refer to the pseudo reg in wider modes.
2408
2409 We can use a slot already allocated if it provides both
2410 enough inherent space and enough total space.
2411 Otherwise, we allocate a new slot, making sure that it has no less
2412 inherent space, and no less total space, then the previous slot. */
2413 if (from_reg == -1)
2414 {
2415 /* No known place to spill from => no slot to reuse. */
cabcf079
ILT
2416 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2417 inherent_size == total_size ? 0 : -1);
f76b9db2 2418 if (BYTES_BIG_ENDIAN)
02db8dd0
RK
2419 /* Cancel the big-endian correction done in assign_stack_local.
2420 Get the address of the beginning of the slot.
2421 This is so we can do a big-endian correction unconditionally
2422 below. */
2423 adjust = inherent_size - total_size;
2424
2425 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
32131a9c
RK
2426 }
2427 /* Reuse a stack slot if possible. */
2428 else if (spill_stack_slot[from_reg] != 0
2429 && spill_stack_slot_width[from_reg] >= total_size
2430 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2431 >= inherent_size))
2432 x = spill_stack_slot[from_reg];
2433 /* Allocate a bigger slot. */
2434 else
2435 {
2436 /* Compute maximum size needed, both for inherent size
2437 and for total size. */
2438 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
4f2d3674 2439 rtx stack_slot;
32131a9c
RK
2440 if (spill_stack_slot[from_reg])
2441 {
2442 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2443 > inherent_size)
2444 mode = GET_MODE (spill_stack_slot[from_reg]);
2445 if (spill_stack_slot_width[from_reg] > total_size)
2446 total_size = spill_stack_slot_width[from_reg];
2447 }
2448 /* Make a slot with that size. */
cabcf079
ILT
2449 x = assign_stack_local (mode, total_size,
2450 inherent_size == total_size ? 0 : -1);
4f2d3674 2451 stack_slot = x;
f76b9db2
ILT
2452 if (BYTES_BIG_ENDIAN)
2453 {
2454 /* Cancel the big-endian correction done in assign_stack_local.
2455 Get the address of the beginning of the slot.
2456 This is so we can do a big-endian correction unconditionally
2457 below. */
2458 adjust = GET_MODE_SIZE (mode) - total_size;
4f2d3674 2459 if (adjust)
38a448ca
RH
2460 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2461 * BITS_PER_UNIT,
2462 MODE_INT, 1),
05d10675 2463 plus_constant (XEXP (x, 0), adjust));
f76b9db2 2464 }
4f2d3674 2465 spill_stack_slot[from_reg] = stack_slot;
32131a9c
RK
2466 spill_stack_slot_width[from_reg] = total_size;
2467 }
2468
32131a9c
RK
2469 /* On a big endian machine, the "address" of the slot
2470 is the address of the low part that fits its inherent mode. */
f76b9db2 2471 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
32131a9c 2472 adjust += (total_size - inherent_size);
32131a9c
RK
2473
2474 /* If we have any adjustment to make, or if the stack slot is the
2475 wrong mode, make a new stack slot. */
2476 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2477 {
38a448ca 2478 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
05d10675 2479 plus_constant (XEXP (x, 0), adjust));
9ec36da5
JL
2480
2481 /* If this was shared among registers, must ensure we never
2482 set it readonly since that can cause scheduling
2483 problems. Note we would only have in this adjustment
2484 case in any event, since the code above doesn't set it. */
2485
2486 if (from_reg == -1)
2487 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
32131a9c
RK
2488 }
2489
2490 /* Save the stack slot for later. */
2491 reg_equiv_memory_loc[i] = x;
2492 }
2493}
2494
2495/* Mark the slots in regs_ever_live for the hard regs
2496 used by pseudo-reg number REGNO. */
2497
2498void
2499mark_home_live (regno)
2500 int regno;
2501{
2502 register int i, lim;
2503 i = reg_renumber[regno];
2504 if (i < 0)
2505 return;
2506 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2507 while (i < lim)
2508 regs_ever_live[i++] = 1;
2509}
2510\f
2511/* This function handles the tracking of elimination offsets around branches.
2512
2513 X is a piece of RTL being scanned.
2514
2515 INSN is the insn that it came from, if any.
2516
2517 INITIAL_P is non-zero if we are to set the offset to be the initial
2518 offset and zero if we are setting the offset of the label to be the
2519 current offset. */
2520
2521static void
2522set_label_offsets (x, insn, initial_p)
2523 rtx x;
2524 rtx insn;
2525 int initial_p;
2526{
2527 enum rtx_code code = GET_CODE (x);
2528 rtx tem;
e51712db 2529 unsigned int i;
32131a9c
RK
2530 struct elim_table *p;
2531
2532 switch (code)
2533 {
2534 case LABEL_REF:
8be386d9
RS
2535 if (LABEL_REF_NONLOCAL_P (x))
2536 return;
2537
32131a9c
RK
2538 x = XEXP (x, 0);
2539
0f41302f 2540 /* ... fall through ... */
32131a9c
RK
2541
2542 case CODE_LABEL:
2543 /* If we know nothing about this label, set the desired offsets. Note
2544 that this sets the offset at a label to be the offset before a label
2545 if we don't know anything about the label. This is not correct for
2546 the label after a BARRIER, but is the best guess we can make. If
2547 we guessed wrong, we will suppress an elimination that might have
2548 been possible had we been able to guess correctly. */
2549
2550 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2551 {
2552 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2553 offsets_at[CODE_LABEL_NUMBER (x)][i]
2554 = (initial_p ? reg_eliminate[i].initial_offset
2555 : reg_eliminate[i].offset);
2556 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2557 }
2558
2559 /* Otherwise, if this is the definition of a label and it is
d45cf215 2560 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2561 that label. */
2562
2563 else if (x == insn
2564 && (tem = prev_nonnote_insn (insn)) != 0
2565 && GET_CODE (tem) == BARRIER)
1f3b1e1a 2566 set_offsets_for_label (insn);
32131a9c
RK
2567 else
2568 /* If neither of the above cases is true, compare each offset
2569 with those previously recorded and suppress any eliminations
2570 where the offsets disagree. */
a8fdc208 2571
32131a9c
RK
2572 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2573 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2574 != (initial_p ? reg_eliminate[i].initial_offset
2575 : reg_eliminate[i].offset))
2576 reg_eliminate[i].can_eliminate = 0;
2577
2578 return;
2579
2580 case JUMP_INSN:
2581 set_label_offsets (PATTERN (insn), insn, initial_p);
2582
0f41302f 2583 /* ... fall through ... */
32131a9c
RK
2584
2585 case INSN:
2586 case CALL_INSN:
2587 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2588 and hence must have all eliminations at their initial offsets. */
2589 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2590 if (REG_NOTE_KIND (tem) == REG_LABEL)
2591 set_label_offsets (XEXP (tem, 0), insn, 1);
2592 return;
2593
2594 case ADDR_VEC:
2595 case ADDR_DIFF_VEC:
2596 /* Each of the labels in the address vector must be at their initial
38e01259 2597 offsets. We want the first field for ADDR_VEC and the second
32131a9c
RK
2598 field for ADDR_DIFF_VEC. */
2599
e51712db 2600 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
32131a9c
RK
2601 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2602 insn, initial_p);
2603 return;
2604
2605 case SET:
2606 /* We only care about setting PC. If the source is not RETURN,
2607 IF_THEN_ELSE, or a label, disable any eliminations not at
2608 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2609 isn't one of those possibilities. For branches to a label,
2610 call ourselves recursively.
2611
2612 Note that this can disable elimination unnecessarily when we have
2613 a non-local goto since it will look like a non-constant jump to
2614 someplace in the current function. This isn't a significant
2615 problem since such jumps will normally be when all elimination
2616 pairs are back to their initial offsets. */
2617
2618 if (SET_DEST (x) != pc_rtx)
2619 return;
2620
2621 switch (GET_CODE (SET_SRC (x)))
2622 {
2623 case PC:
2624 case RETURN:
2625 return;
2626
2627 case LABEL_REF:
2628 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2629 return;
2630
2631 case IF_THEN_ELSE:
2632 tem = XEXP (SET_SRC (x), 1);
2633 if (GET_CODE (tem) == LABEL_REF)
2634 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2635 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2636 break;
2637
2638 tem = XEXP (SET_SRC (x), 2);
2639 if (GET_CODE (tem) == LABEL_REF)
2640 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2641 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2642 break;
2643 return;
e9a25f70
JL
2644
2645 default:
2646 break;
32131a9c
RK
2647 }
2648
2649 /* If we reach here, all eliminations must be at their initial
2650 offset because we are doing a jump to a variable address. */
2651 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2652 if (p->offset != p->initial_offset)
2653 p->can_eliminate = 0;
e9a25f70 2654 break;
05d10675 2655
e9a25f70
JL
2656 default:
2657 break;
32131a9c
RK
2658 }
2659}
2660\f
2661/* Used for communication between the next two function to properly share
2662 the vector for an ASM_OPERANDS. */
2663
2664static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2665
a8fdc208 2666/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2667 replacement (such as sp), plus an offset.
2668
2669 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2670 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2671 MEM, we are allowed to replace a sum of a register and the constant zero
2672 with the register, which we cannot do outside a MEM. In addition, we need
2673 to record the fact that a register is referenced outside a MEM.
2674
ff32812a 2675 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2676 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2677 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
38e01259 2678 the REG is being modified.
32131a9c 2679
ff32812a
RS
2680 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2681 That's used when we eliminate in expressions stored in notes.
2682 This means, do not set ref_outside_mem even if the reference
2683 is outside of MEMs.
2684
32131a9c
RK
2685 If we see a modification to a register we know about, take the
2686 appropriate action (see case SET, below).
2687
2688 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2689 replacements done assuming all offsets are at their initial values. If
2690 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2691 encounter, return the actual location so that find_reloads will do
2692 the proper thing. */
2693
2694rtx
1914f5da 2695eliminate_regs (x, mem_mode, insn)
32131a9c
RK
2696 rtx x;
2697 enum machine_mode mem_mode;
2698 rtx insn;
2699{
2700 enum rtx_code code = GET_CODE (x);
2701 struct elim_table *ep;
2702 int regno;
2703 rtx new;
2704 int i, j;
6f7d635c 2705 const char *fmt;
32131a9c
RK
2706 int copied = 0;
2707
d6633f01
NS
2708 if (! current_function_decl)
2709 return x;
9969bb2c 2710
32131a9c
RK
2711 switch (code)
2712 {
2713 case CONST_INT:
2714 case CONST_DOUBLE:
2715 case CONST:
2716 case SYMBOL_REF:
2717 case CODE_LABEL:
2718 case PC:
2719 case CC0:
2720 case ASM_INPUT:
2721 case ADDR_VEC:
2722 case ADDR_DIFF_VEC:
2723 case RETURN:
2724 return x;
2725
e9a25f70
JL
2726 case ADDRESSOF:
2727 /* This is only for the benefit of the debugging backends, which call
2728 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2729 removed after CSE. */
1914f5da 2730 new = eliminate_regs (XEXP (x, 0), 0, insn);
e9a25f70
JL
2731 if (GET_CODE (new) == MEM)
2732 return XEXP (new, 0);
2733 return x;
2734
32131a9c
RK
2735 case REG:
2736 regno = REGNO (x);
2737
2738 /* First handle the case where we encounter a bare register that
2739 is eliminable. Replace it with a PLUS. */
2740 if (regno < FIRST_PSEUDO_REGISTER)
2741 {
2742 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2743 ep++)
2744 if (ep->from_rtx == x && ep->can_eliminate)
2745 {
ff32812a
RS
2746 if (! mem_mode
2747 /* Refs inside notes don't count for this purpose. */
fe089a90 2748 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2749 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2750 ep->ref_outside_mem = 1;
2751 return plus_constant (ep->to_rtx, ep->previous_offset);
2752 }
2753
2754 }
2b49ee39
R
2755 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2756 && reg_equiv_constant[regno]
2757 && ! CONSTANT_P (reg_equiv_constant[regno]))
2758 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2759 mem_mode, insn);
32131a9c
RK
2760 return x;
2761
c5c76735
JL
2762 /* You might think handling MINUS in a manner similar to PLUS is a
2763 good idea. It is not. It has been tried multiple times and every
2764 time the change has had to have been reverted.
2765
2766 Other parts of reload know a PLUS is special (gen_reload for example)
2767 and require special code to handle code a reloaded PLUS operand.
2768
2769 Also consider backends where the flags register is clobbered by a
2770 MINUS, but we can emit a PLUS that does not clobber flags (ia32,
2771 lea instruction comes to mind). If we try to reload a MINUS, we
2772 may kill the flags register that was holding a useful value.
2773
2774 So, please before trying to handle MINUS, consider reload as a
2775 whole instead of this little section as well as the backend issues. */
32131a9c
RK
2776 case PLUS:
2777 /* If this is the sum of an eliminable register and a constant, rework
2778 the sum. */
2779 if (GET_CODE (XEXP (x, 0)) == REG
2780 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2781 && CONSTANT_P (XEXP (x, 1)))
2782 {
2783 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2784 ep++)
2785 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2786 {
e5687447
JW
2787 if (! mem_mode
2788 /* Refs inside notes don't count for this purpose. */
2789 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2790 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2791 ep->ref_outside_mem = 1;
2792
2793 /* The only time we want to replace a PLUS with a REG (this
2794 occurs when the constant operand of the PLUS is the negative
2795 of the offset) is when we are inside a MEM. We won't want
2796 to do so at other times because that would change the
2797 structure of the insn in a way that reload can't handle.
2798 We special-case the commonest situation in
2799 eliminate_regs_in_insn, so just replace a PLUS with a
2800 PLUS here, unless inside a MEM. */
a23b64d5 2801 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2802 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2803 return ep->to_rtx;
2804 else
38a448ca
RH
2805 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2806 plus_constant (XEXP (x, 1),
2807 ep->previous_offset));
32131a9c
RK
2808 }
2809
2810 /* If the register is not eliminable, we are done since the other
2811 operand is a constant. */
2812 return x;
2813 }
2814
2815 /* If this is part of an address, we want to bring any constant to the
2816 outermost PLUS. We will do this by doing register replacement in
2817 our operands and seeing if a constant shows up in one of them.
2818
2819 We assume here this is part of an address (or a "load address" insn)
2820 since an eliminable register is not likely to appear in any other
2821 context.
2822
2823 If we have (plus (eliminable) (reg)), we want to produce
930aeef3 2824 (plus (plus (replacement) (reg) (const))). If this was part of a
32131a9c
RK
2825 normal add insn, (plus (replacement) (reg)) will be pushed as a
2826 reload. This is the desired action. */
2827
2828 {
1914f5da
RH
2829 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2830 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2831
2832 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2833 {
2834 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2835 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2836 we must replace the constant here since it may no longer
2837 be in the position of any operand. */
2838 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2839 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2840 && reg_renumber[REGNO (new1)] < 0
2841 && reg_equiv_constant != 0
2842 && reg_equiv_constant[REGNO (new1)] != 0)
2843 new1 = reg_equiv_constant[REGNO (new1)];
2844 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2845 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2846 && reg_renumber[REGNO (new0)] < 0
2847 && reg_equiv_constant[REGNO (new0)] != 0)
2848 new0 = reg_equiv_constant[REGNO (new0)];
2849
2850 new = form_sum (new0, new1);
2851
2852 /* As above, if we are not inside a MEM we do not want to
2853 turn a PLUS into something else. We might try to do so here
2854 for an addition of 0 if we aren't optimizing. */
2855 if (! mem_mode && GET_CODE (new) != PLUS)
38a448ca 2856 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
32131a9c
RK
2857 else
2858 return new;
2859 }
2860 }
2861 return x;
2862
981c7390 2863 case MULT:
05d10675 2864 /* If this is the product of an eliminable register and a
981c7390
RK
2865 constant, apply the distribute law and move the constant out
2866 so that we have (plus (mult ..) ..). This is needed in order
9faa82d8 2867 to keep load-address insns valid. This case is pathological.
981c7390
RK
2868 We ignore the possibility of overflow here. */
2869 if (GET_CODE (XEXP (x, 0)) == REG
2870 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2871 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2872 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2873 ep++)
2874 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2875 {
2876 if (! mem_mode
2877 /* Refs inside notes don't count for this purpose. */
2878 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2879 || GET_CODE (insn) == INSN_LIST)))
2880 ep->ref_outside_mem = 1;
2881
2882 return
38a448ca 2883 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
981c7390
RK
2884 ep->previous_offset * INTVAL (XEXP (x, 1)));
2885 }
32131a9c 2886
0f41302f 2887 /* ... fall through ... */
32131a9c 2888
32131a9c
RK
2889 case CALL:
2890 case COMPARE:
c5c76735 2891 /* See comments before PLUS about handling MINUS. */
930aeef3 2892 case MINUS:
32131a9c
RK
2893 case DIV: case UDIV:
2894 case MOD: case UMOD:
2895 case AND: case IOR: case XOR:
45620ed4
RK
2896 case ROTATERT: case ROTATE:
2897 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
2898 case NE: case EQ:
2899 case GE: case GT: case GEU: case GTU:
2900 case LE: case LT: case LEU: case LTU:
2901 {
1914f5da 2902 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2903 rtx new1
1914f5da 2904 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2905
2906 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
38a448ca 2907 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
32131a9c
RK
2908 }
2909 return x;
2910
981c7390
RK
2911 case EXPR_LIST:
2912 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2913 if (XEXP (x, 0))
2914 {
1914f5da 2915 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
981c7390 2916 if (new != XEXP (x, 0))
13bb79d4
R
2917 {
2918 /* If this is a REG_DEAD note, it is not valid anymore.
2919 Using the eliminated version could result in creating a
2920 REG_DEAD note for the stack or frame pointer. */
2921 if (GET_MODE (x) == REG_DEAD)
2922 return (XEXP (x, 1)
2923 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2924 : NULL_RTX);
2925
2926 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2927 }
981c7390
RK
2928 }
2929
0f41302f 2930 /* ... fall through ... */
981c7390
RK
2931
2932 case INSN_LIST:
2933 /* Now do eliminations in the rest of the chain. If this was
2934 an EXPR_LIST, this might result in allocating more memory than is
2935 strictly needed, but it simplifies the code. */
2936 if (XEXP (x, 1))
2937 {
1914f5da 2938 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
981c7390 2939 if (new != XEXP (x, 1))
38a448ca 2940 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
981c7390
RK
2941 }
2942 return x;
2943
32131a9c
RK
2944 case PRE_INC:
2945 case POST_INC:
2946 case PRE_DEC:
2947 case POST_DEC:
2948 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2949 if (ep->to_rtx == XEXP (x, 0))
2950 {
4c05b187
RK
2951 int size = GET_MODE_SIZE (mem_mode);
2952
2953 /* If more bytes than MEM_MODE are pushed, account for them. */
2954#ifdef PUSH_ROUNDING
2955 if (ep->to_rtx == stack_pointer_rtx)
2956 size = PUSH_ROUNDING (size);
2957#endif
32131a9c 2958 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2959 ep->offset += size;
32131a9c 2960 else
4c05b187 2961 ep->offset -= size;
32131a9c
RK
2962 }
2963
2964 /* Fall through to generic unary operation case. */
32131a9c
RK
2965 case STRICT_LOW_PART:
2966 case NEG: case NOT:
2967 case SIGN_EXTEND: case ZERO_EXTEND:
2968 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2969 case FLOAT: case FIX:
2970 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2971 case ABS:
2972 case SQRT:
2973 case FFS:
1914f5da 2974 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c 2975 if (new != XEXP (x, 0))
38a448ca 2976 return gen_rtx_fmt_e (code, GET_MODE (x), new);
32131a9c
RK
2977 return x;
2978
2979 case SUBREG:
2980 /* Similar to above processing, but preserve SUBREG_WORD.
2981 Convert (subreg (mem)) to (mem) if not paradoxical.
2982 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2983 pseudo didn't get a hard reg, we must replace this with the
2984 eliminated version of the memory location because push_reloads
2985 may do the replacement in certain circumstances. */
2986 if (GET_CODE (SUBREG_REG (x)) == REG
2987 && (GET_MODE_SIZE (GET_MODE (x))
2988 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2989 && reg_equiv_memory_loc != 0
2990 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2991 {
cb2afeb3 2992#if 0
32131a9c 2993 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
1914f5da 2994 mem_mode, insn);
32131a9c
RK
2995
2996 /* If we didn't change anything, we must retain the pseudo. */
2997 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
59e2c378 2998 new = SUBREG_REG (x);
32131a9c 2999 else
59e2c378 3000 {
59e2c378
RK
3001 /* In this case, we must show that the pseudo is used in this
3002 insn so that delete_output_reload will do the right thing. */
3003 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3004 && GET_CODE (insn) != INSN_LIST)
b60a8416
R
3005 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode,
3006 SUBREG_REG (x)),
05d10675 3007 insn))
b60a8416
R
3008 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
3009
3010 /* Ensure NEW isn't shared in case we have to reload it. */
3011 new = copy_rtx (new);
59e2c378 3012 }
cb2afeb3
R
3013#else
3014 new = SUBREG_REG (x);
3015#endif
32131a9c
RK
3016 }
3017 else
1914f5da 3018 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
3019
3020 if (new != XEXP (x, 0))
3021 {
29ae5012
RK
3022 int x_size = GET_MODE_SIZE (GET_MODE (x));
3023 int new_size = GET_MODE_SIZE (GET_MODE (new));
3024
1914f5da 3025 if (GET_CODE (new) == MEM
6d49a073 3026 && ((x_size < new_size
1914f5da 3027#ifdef WORD_REGISTER_OPERATIONS
6d49a073
JW
3028 /* On these machines, combine can create rtl of the form
3029 (set (subreg:m1 (reg:m2 R) 0) ...)
05d10675 3030 where m1 < m2, and expects something interesting to
6d49a073
JW
3031 happen to the entire word. Moreover, it will use the
3032 (reg:m2 R) later, expecting all bits to be preserved.
05d10675 3033 So if the number of words is the same, preserve the
6d49a073
JW
3034 subreg so that push_reloads can see it. */
3035 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
1914f5da 3036#endif
6d49a073
JW
3037 )
3038 || (x_size == new_size))
1914f5da 3039 )
32131a9c
RK
3040 {
3041 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3042 enum machine_mode mode = GET_MODE (x);
3043
f76b9db2
ILT
3044 if (BYTES_BIG_ENDIAN)
3045 offset += (MIN (UNITS_PER_WORD,
3046 GET_MODE_SIZE (GET_MODE (new)))
3047 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
32131a9c
RK
3048
3049 PUT_MODE (new, mode);
3050 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3051 return new;
3052 }
3053 else
38a448ca 3054 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
32131a9c
RK
3055 }
3056
3057 return x;
3058
94714ecc
RK
3059 case USE:
3060 /* If using a register that is the source of an eliminate we still
3061 think can be performed, note it cannot be performed since we don't
3062 know how this register is used. */
3063 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3064 if (ep->from_rtx == XEXP (x, 0))
3065 ep->can_eliminate = 0;
3066
1914f5da 3067 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
94714ecc 3068 if (new != XEXP (x, 0))
38a448ca 3069 return gen_rtx_fmt_e (code, GET_MODE (x), new);
94714ecc
RK
3070 return x;
3071
32131a9c
RK
3072 case CLOBBER:
3073 /* If clobbering a register that is the replacement register for an
d45cf215 3074 elimination we still think can be performed, note that it cannot
32131a9c
RK
3075 be performed. Otherwise, we need not be concerned about it. */
3076 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3077 if (ep->to_rtx == XEXP (x, 0))
3078 ep->can_eliminate = 0;
3079
1914f5da 3080 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c 3081 if (new != XEXP (x, 0))
38a448ca 3082 return gen_rtx_fmt_e (code, GET_MODE (x), new);
32131a9c
RK
3083 return x;
3084
3085 case ASM_OPERANDS:
3086 {
3087 rtx *temp_vec;
3088 /* Properly handle sharing input and constraint vectors. */
3089 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3090 {
3091 /* When we come to a new vector not seen before,
3092 scan all its elements; keep the old vector if none
3093 of them changes; otherwise, make a copy. */
3094 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3095 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3096 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3097 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
1914f5da 3098 mem_mode, insn);
32131a9c
RK
3099
3100 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3101 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3102 break;
3103
3104 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3105 new_asm_operands_vec = old_asm_operands_vec;
3106 else
3107 new_asm_operands_vec
3108 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3109 }
3110
3111 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3112 if (new_asm_operands_vec == old_asm_operands_vec)
3113 return x;
3114
38a448ca
RH
3115 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3116 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3117 ASM_OPERANDS_OUTPUT_IDX (x),
3118 new_asm_operands_vec,
3119 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3120 ASM_OPERANDS_SOURCE_FILE (x),
3121 ASM_OPERANDS_SOURCE_LINE (x));
32131a9c
RK
3122 new->volatil = x->volatil;
3123 return new;
3124 }
3125
3126 case SET:
3127 /* Check for setting a register that we know about. */
3128 if (GET_CODE (SET_DEST (x)) == REG)
3129 {
3130 /* See if this is setting the replacement register for an
a8fdc208 3131 elimination.
32131a9c 3132
3ec2ea3e
DE
3133 If DEST is the hard frame pointer, we do nothing because we
3134 assume that all assignments to the frame pointer are for
3135 non-local gotos and are being done at a time when they are valid
3136 and do not disturb anything else. Some machines want to
3137 eliminate a fake argument pointer (or even a fake frame pointer)
3138 with either the real frame or the stack pointer. Assignments to
3139 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
3140
3141 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3142 ep++)
3143 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 3144 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 3145 {
6dc42e49 3146 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
3147 this elimination can't be done. */
3148 rtx src = SET_SRC (x);
3149
3150 if (GET_CODE (src) == PLUS
3151 && XEXP (src, 0) == SET_DEST (x)
3152 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3153 ep->offset -= INTVAL (XEXP (src, 1));
3154 else
3155 ep->can_eliminate = 0;
3156 }
3157
3158 /* Now check to see we are assigning to a register that can be
3159 eliminated. If so, it must be as part of a PARALLEL, since we
3160 will not have been called if this is a single SET. So indicate
3161 that we can no longer eliminate this reg. */
3162 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3163 ep++)
3164 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3165 ep->can_eliminate = 0;
3166 }
3167
3168 /* Now avoid the loop below in this common case. */
3169 {
1914f5da
RH
3170 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3171 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3172
ff32812a 3173 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3174 write a CLOBBER insn. */
3175 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3176 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3177 && GET_CODE (insn) != INSN_LIST)
38a448ca 3178 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
32131a9c
RK
3179
3180 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
38a448ca 3181 return gen_rtx_SET (VOIDmode, new0, new1);
32131a9c
RK
3182 }
3183
3184 return x;
3185
3186 case MEM:
e9a25f70
JL
3187 /* This is only for the benefit of the debugging backends, which call
3188 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3189 removed after CSE. */
3190 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
1914f5da 3191 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
e9a25f70 3192
32131a9c
RK
3193 /* Our only special processing is to pass the mode of the MEM to our
3194 recursive call and copy the flags. While we are here, handle this
3195 case more efficiently. */
1914f5da 3196 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3197 if (new != XEXP (x, 0))
3198 {
38a448ca 3199 new = gen_rtx_MEM (GET_MODE (x), new);
32131a9c
RK
3200 new->volatil = x->volatil;
3201 new->unchanging = x->unchanging;
3202 new->in_struct = x->in_struct;
3203 return new;
3204 }
3205 else
3206 return x;
05d10675 3207
e9a25f70
JL
3208 default:
3209 break;
32131a9c
RK
3210 }
3211
3212 /* Process each of our operands recursively. If any have changed, make a
3213 copy of the rtx. */
3214 fmt = GET_RTX_FORMAT (code);
3215 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3216 {
3217 if (*fmt == 'e')
3218 {
1914f5da 3219 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3220 if (new != XEXP (x, i) && ! copied)
3221 {
3222 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3223 bcopy ((char *) x, (char *) new_x,
3224 (sizeof (*new_x) - sizeof (new_x->fld)
3225 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
32131a9c
RK
3226 x = new_x;
3227 copied = 1;
3228 }
3229 XEXP (x, i) = new;
3230 }
3231 else if (*fmt == 'E')
3232 {
3233 int copied_vec = 0;
3234 for (j = 0; j < XVECLEN (x, i); j++)
3235 {
1914f5da 3236 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
32131a9c
RK
3237 if (new != XVECEXP (x, i, j) && ! copied_vec)
3238 {
8f985ec4
ZW
3239 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3240 XVEC (x, i)->elem);
32131a9c
RK
3241 if (! copied)
3242 {
3243 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3244 bcopy ((char *) x, (char *) new_x,
3245 (sizeof (*new_x) - sizeof (new_x->fld)
3246 + (sizeof (new_x->fld[0])
3247 * GET_RTX_LENGTH (code))));
32131a9c
RK
3248 x = new_x;
3249 copied = 1;
3250 }
3251 XVEC (x, i) = new_v;
3252 copied_vec = 1;
3253 }
3254 XVECEXP (x, i, j) = new;
3255 }
3256 }
3257 }
3258
3259 return x;
3260}
3261\f
3262/* Scan INSN and eliminate all eliminable registers in it.
3263
3264 If REPLACE is nonzero, do the replacement destructively. Also
3265 delete the insn as dead it if it is setting an eliminable register.
3266
3267 If REPLACE is zero, do all our allocations in reload_obstack.
3268
3269 If no eliminations were done and this insn doesn't require any elimination
3270 processing (these are not identical conditions: it might be updating sp,
3271 but not referencing fp; this needs to be seen during reload_as_needed so
3272 that the offset between fp and sp can be taken into consideration), zero
3273 is returned. Otherwise, 1 is returned. */
3274
3275static int
3276eliminate_regs_in_insn (insn, replace)
3277 rtx insn;
3278 int replace;
3279{
3280 rtx old_body = PATTERN (insn);
774672d2 3281 rtx old_set = single_set (insn);
32131a9c
RK
3282 rtx new_body;
3283 int val = 0;
3284 struct elim_table *ep;
3285
3286 if (! replace)
3287 push_obstacks (&reload_obstack, &reload_obstack);
3288
774672d2
RK
3289 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3290 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
3291 {
3292 /* Check for setting an eliminable register. */
3293 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
774672d2 3294 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
32131a9c 3295 {
dd1eab0a
RK
3296#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3297 /* If this is setting the frame pointer register to the
3298 hardware frame pointer register and this is an elimination
3299 that will be done (tested above), this insn is really
3300 adjusting the frame pointer downward to compensate for
3301 the adjustment done before a nonlocal goto. */
3302 if (ep->from == FRAME_POINTER_REGNUM
3303 && ep->to == HARD_FRAME_POINTER_REGNUM)
3304 {
3305 rtx src = SET_SRC (old_set);
973838fd 3306 int offset = 0, ok = 0;
8026ebba 3307 rtx prev_insn, prev_set;
dd1eab0a
RK
3308
3309 if (src == ep->to_rtx)
3310 offset = 0, ok = 1;
3311 else if (GET_CODE (src) == PLUS
bb22893c
JW
3312 && GET_CODE (XEXP (src, 0)) == CONST_INT
3313 && XEXP (src, 1) == ep->to_rtx)
dd1eab0a 3314 offset = INTVAL (XEXP (src, 0)), ok = 1;
bb22893c
JW
3315 else if (GET_CODE (src) == PLUS
3316 && GET_CODE (XEXP (src, 1)) == CONST_INT
3317 && XEXP (src, 0) == ep->to_rtx)
3318 offset = INTVAL (XEXP (src, 1)), ok = 1;
8026ebba
ILT
3319 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3320 && (prev_set = single_set (prev_insn)) != 0
3321 && rtx_equal_p (SET_DEST (prev_set), src))
3322 {
3323 src = SET_SRC (prev_set);
3324 if (src == ep->to_rtx)
3325 offset = 0, ok = 1;
3326 else if (GET_CODE (src) == PLUS
3327 && GET_CODE (XEXP (src, 0)) == CONST_INT
3328 && XEXP (src, 1) == ep->to_rtx)
3329 offset = INTVAL (XEXP (src, 0)), ok = 1;
3330 else if (GET_CODE (src) == PLUS
3331 && GET_CODE (XEXP (src, 1)) == CONST_INT
3332 && XEXP (src, 0) == ep->to_rtx)
3333 offset = INTVAL (XEXP (src, 1)), ok = 1;
3334 }
dd1eab0a
RK
3335
3336 if (ok)
3337 {
3338 if (replace)
3339 {
3340 rtx src
3341 = plus_constant (ep->to_rtx, offset - ep->offset);
3342
3343 /* First see if this insn remains valid when we
3344 make the change. If not, keep the INSN_CODE
3345 the same and let reload fit it up. */
3346 validate_change (insn, &SET_SRC (old_set), src, 1);
3347 validate_change (insn, &SET_DEST (old_set),
3348 ep->to_rtx, 1);
3349 if (! apply_change_group ())
3350 {
3351 SET_SRC (old_set) = src;
3352 SET_DEST (old_set) = ep->to_rtx;
3353 }
3354 }
3355
3356 val = 1;
3357 goto done;
3358 }
3359 }
3360#endif
3361
32131a9c
RK
3362 /* In this case this insn isn't serving a useful purpose. We
3363 will delete it in reload_as_needed once we know that this
3364 elimination is, in fact, being done.
3365
abc95ed3 3366 If REPLACE isn't set, we can't delete this insn, but needn't
32131a9c
RK
3367 process it since it won't be used unless something changes. */
3368 if (replace)
8a34409d
RH
3369 {
3370 delete_dead_insn (insn);
3371 return 1;
3372 }
32131a9c
RK
3373 val = 1;
3374 goto done;
3375 }
3376
3377 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3378 in the insn is the negative of the offset in FROM. Substitute
3379 (set (reg) (reg to)) for the insn and change its code.
3380
cb2afeb3 3381 We have to do this here, rather than in eliminate_regs, so that we can
32131a9c
RK
3382 change the insn code. */
3383
774672d2
RK
3384 if (GET_CODE (SET_SRC (old_set)) == PLUS
3385 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3386 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
32131a9c
RK
3387 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3388 ep++)
774672d2 3389 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
922d9d40 3390 && ep->can_eliminate)
32131a9c 3391 {
922d9d40
RK
3392 /* We must stop at the first elimination that will be used.
3393 If this one would replace the PLUS with a REG, do it
3394 now. Otherwise, quit the loop and let eliminate_regs
3395 do its normal replacement. */
774672d2 3396 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
922d9d40 3397 {
774672d2
RK
3398 /* We assume here that we don't need a PARALLEL of
3399 any CLOBBERs for this assignment. There's not
3400 much we can do if we do need it. */
38a448ca
RH
3401 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3402 SET_DEST (old_set),
3403 ep->to_rtx);
922d9d40
RK
3404 INSN_CODE (insn) = -1;
3405 val = 1;
3406 goto done;
3407 }
3408
3409 break;
32131a9c
RK
3410 }
3411 }
3412
3413 old_asm_operands_vec = 0;
3414
3415 /* Replace the body of this insn with a substituted form. If we changed
05d10675 3416 something, return non-zero.
32131a9c
RK
3417
3418 If we are replacing a body that was a (set X (plus Y Z)), try to
3419 re-recognize the insn. We do this in case we had a simple addition
3420 but now can do this as a load-address. This saves an insn in this
0f41302f 3421 common case. */
32131a9c 3422
1914f5da 3423 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3424 if (new_body != old_body)
3425 {
7c791b13
RK
3426 /* If we aren't replacing things permanently and we changed something,
3427 make another copy to ensure that all the RTL is new. Otherwise
3428 things can go wrong if find_reload swaps commutative operands
0f41302f 3429 and one is inside RTL that has been copied while the other is not. */
7c791b13 3430
4d411872
RS
3431 /* Don't copy an asm_operands because (1) there's no need and (2)
3432 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3433 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3434 new_body = copy_rtx (new_body);
3435
774672d2
RK
3436 /* If we had a move insn but now we don't, rerecognize it. This will
3437 cause spurious re-recognition if the old move had a PARALLEL since
3438 the new one still will, but we can't call single_set without
3439 having put NEW_BODY into the insn and the re-recognition won't
3440 hurt in this rare case. */
3441 if (old_set != 0
3442 && ((GET_CODE (SET_SRC (old_set)) == REG
3443 && (GET_CODE (new_body) != SET
3444 || GET_CODE (SET_SRC (new_body)) != REG))
3445 /* If this was a load from or store to memory, compare
1ccbefce
RH
3446 the MEM in recog_data.operand to the one in the insn.
3447 If they are not equal, then rerecognize the insn. */
774672d2
RK
3448 || (old_set != 0
3449 && ((GET_CODE (SET_SRC (old_set)) == MEM
1ccbefce 3450 && SET_SRC (old_set) != recog_data.operand[1])
774672d2 3451 || (GET_CODE (SET_DEST (old_set)) == MEM
1ccbefce 3452 && SET_DEST (old_set) != recog_data.operand[0])))
774672d2
RK
3453 /* If this was an add insn before, rerecognize. */
3454 || GET_CODE (SET_SRC (old_set)) == PLUS))
4a5d0fb5
RS
3455 {
3456 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3457 /* If recognition fails, store the new body anyway.
3458 It's normal to have recognition failures here
3459 due to bizarre memory addresses; reloading will fix them. */
3460 PATTERN (insn) = new_body;
4a5d0fb5 3461 }
0ba846c7 3462 else
32131a9c
RK
3463 PATTERN (insn) = new_body;
3464
32131a9c
RK
3465 val = 1;
3466 }
a8fdc208 3467
cb2afeb3 3468 /* Loop through all elimination pairs. See if any have changed.
a8efe40d 3469
32131a9c
RK
3470 We also detect a cases where register elimination cannot be done,
3471 namely, if a register would be both changed and referenced outside a MEM
3472 in the resulting insn since such an insn is often undefined and, even if
3473 not, we cannot know what meaning will be given to it. Note that it is
3474 valid to have a register used in an address in an insn that changes it
3475 (presumably with a pre- or post-increment or decrement).
3476
3477 If anything changes, return nonzero. */
3478
32131a9c
RK
3479 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3480 {
3481 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3482 ep->can_eliminate = 0;
3483
3484 ep->ref_outside_mem = 0;
3485
3486 if (ep->previous_offset != ep->offset)
3487 val = 1;
32131a9c
RK
3488 }
3489
3490 done:
9faa82d8 3491 /* If we changed something, perform elimination in REG_NOTES. This is
05b4c365
RK
3492 needed even when REPLACE is zero because a REG_DEAD note might refer
3493 to a register that we eliminate and could cause a different number
3494 of spill registers to be needed in the final reload pass than in
3495 the pre-passes. */
20748cab 3496 if (val && REG_NOTES (insn) != 0)
1914f5da 3497 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3498
32131a9c
RK
3499 if (! replace)
3500 pop_obstacks ();
3501
3502 return val;
3503}
3504
cb2afeb3
R
3505/* Loop through all elimination pairs.
3506 Recalculate the number not at initial offset.
3507
3508 Compute the maximum offset (minimum offset if the stack does not
3509 grow downward) for each elimination pair. */
3510
3511static void
3512update_eliminable_offsets ()
3513{
3514 struct elim_table *ep;
3515
3516 num_not_at_initial_offset = 0;
3517 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3518 {
3519 ep->previous_offset = ep->offset;
3520 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3521 num_not_at_initial_offset++;
cb2afeb3
R
3522 }
3523}
3524
32131a9c
RK
3525/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3526 replacement we currently believe is valid, mark it as not eliminable if X
3527 modifies DEST in any way other than by adding a constant integer to it.
3528
3529 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3530 all assignments to the hard frame pointer are nonlocal gotos and are being
3531 done at a time when they are valid and do not disturb anything else.
32131a9c 3532 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3533 frame or stack pointer. Assignments to the hard frame pointer must not
3534 prevent this elimination.
32131a9c
RK
3535
3536 Called via note_stores from reload before starting its passes to scan
3537 the insns of the function. */
3538
3539static void
3540mark_not_eliminable (dest, x)
3541 rtx dest;
3542 rtx x;
3543{
e51712db 3544 register unsigned int i;
32131a9c
RK
3545
3546 /* A SUBREG of a hard register here is just changing its mode. We should
3547 not see a SUBREG of an eliminable hard register, but check just in
3548 case. */
3549 if (GET_CODE (dest) == SUBREG)
3550 dest = SUBREG_REG (dest);
3551
3ec2ea3e 3552 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3553 return;
3554
3555 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3556 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3557 && (GET_CODE (x) != SET
3558 || GET_CODE (SET_SRC (x)) != PLUS
3559 || XEXP (SET_SRC (x), 0) != dest
3560 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3561 {
3562 reg_eliminate[i].can_eliminate_previous
3563 = reg_eliminate[i].can_eliminate = 0;
3564 num_eliminable--;
3565 }
3566}
09dd1133 3567
c47f5ea5
BS
3568/* Verify that the initial elimination offsets did not change since the
3569 last call to set_initial_elim_offsets. This is used to catch cases
3570 where something illegal happened during reload_as_needed that could
3571 cause incorrect code to be generated if we did not check for it. */
3572static void
3573verify_initial_elim_offsets ()
3574{
3575 int t;
3576
3577#ifdef ELIMINABLE_REGS
3578 struct elim_table *ep;
3579
3580 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3581 {
3582 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3583 if (t != ep->initial_offset)
3584 abort ();
3585 }
3586#else
3587 INITIAL_FRAME_POINTER_OFFSET (t);
3588 if (t != reg_eliminate[0].initial_offset)
3589 abort ();
05d10675 3590#endif
c47f5ea5
BS
3591}
3592
09dd1133
BS
3593/* Reset all offsets on eliminable registers to their initial values. */
3594static void
3595set_initial_elim_offsets ()
3596{
1f3b1e1a 3597 struct elim_table *ep = reg_eliminate;
09dd1133
BS
3598
3599#ifdef ELIMINABLE_REGS
1f3b1e1a 3600 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
09dd1133
BS
3601 {
3602 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
1f3b1e1a 3603 ep->previous_offset = ep->offset = ep->initial_offset;
09dd1133
BS
3604 }
3605#else
1f3b1e1a
JL
3606 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3607 ep->previous_offset = ep->offset = ep->initial_offset;
09dd1133
BS
3608#endif
3609
3610 num_not_at_initial_offset = 0;
1f3b1e1a 3611}
09dd1133 3612
1f3b1e1a
JL
3613/* Initialize the known label offsets.
3614 Set a known offset for each forced label to be at the initial offset
3615 of each elimination. We do this because we assume that all
3616 computed jumps occur from a location where each elimination is
3617 at its initial offset.
3618 For all other labels, show that we don't know the offsets. */
09dd1133 3619
1f3b1e1a
JL
3620static void
3621set_initial_label_offsets ()
3622{
3623 rtx x;
3624 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
09dd1133
BS
3625
3626 for (x = forced_labels; x; x = XEXP (x, 1))
3627 if (XEXP (x, 0))
3628 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3629}
3630
1f3b1e1a
JL
3631/* Set all elimination offsets to the known values for the code label given
3632 by INSN. */
3633static void
3634set_offsets_for_label (insn)
3635 rtx insn;
3636{
973838fd 3637 unsigned int i;
1f3b1e1a
JL
3638 int label_nr = CODE_LABEL_NUMBER (insn);
3639 struct elim_table *ep;
3640
3641 num_not_at_initial_offset = 0;
3642 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3643 {
3644 ep->offset = ep->previous_offset = offsets_at[label_nr][i];
3645 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3646 num_not_at_initial_offset++;
3647 }
3648}
3649
09dd1133
BS
3650/* See if anything that happened changes which eliminations are valid.
3651 For example, on the Sparc, whether or not the frame pointer can
3652 be eliminated can depend on what registers have been used. We need
3653 not check some conditions again (such as flag_omit_frame_pointer)
3654 since they can't have changed. */
3655
3656static void
3657update_eliminables (pset)
3658 HARD_REG_SET *pset;
3659{
3660#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3661 int previous_frame_pointer_needed = frame_pointer_needed;
3662#endif
3663 struct elim_table *ep;
3664
3665 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3666 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3667#ifdef ELIMINABLE_REGS
3668 || ! CAN_ELIMINATE (ep->from, ep->to)
3669#endif
3670 )
3671 ep->can_eliminate = 0;
3672
3673 /* Look for the case where we have discovered that we can't replace
3674 register A with register B and that means that we will now be
3675 trying to replace register A with register C. This means we can
3676 no longer replace register C with register B and we need to disable
3677 such an elimination, if it exists. This occurs often with A == ap,
3678 B == sp, and C == fp. */
3679
3680 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3681 {
3682 struct elim_table *op;
3683 register int new_to = -1;
3684
3685 if (! ep->can_eliminate && ep->can_eliminate_previous)
3686 {
3687 /* Find the current elimination for ep->from, if there is a
3688 new one. */
3689 for (op = reg_eliminate;
3690 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3691 if (op->from == ep->from && op->can_eliminate)
3692 {
3693 new_to = op->to;
3694 break;
3695 }
3696
3697 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3698 disable it. */
3699 for (op = reg_eliminate;
3700 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3701 if (op->from == new_to && op->to == ep->to)
3702 op->can_eliminate = 0;
3703 }
3704 }
3705
3706 /* See if any registers that we thought we could eliminate the previous
3707 time are no longer eliminable. If so, something has changed and we
3708 must spill the register. Also, recompute the number of eliminable
3709 registers and see if the frame pointer is needed; it is if there is
3710 no elimination of the frame pointer that we can perform. */
3711
3712 frame_pointer_needed = 1;
3713 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3714 {
3715 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3716 && ep->to != HARD_FRAME_POINTER_REGNUM)
3717 frame_pointer_needed = 0;
3718
3719 if (! ep->can_eliminate && ep->can_eliminate_previous)
3720 {
3721 ep->can_eliminate_previous = 0;
3722 SET_HARD_REG_BIT (*pset, ep->from);
3723 num_eliminable--;
3724 }
3725 }
3726
3727#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3728 /* If we didn't need a frame pointer last time, but we do now, spill
3729 the hard frame pointer. */
3730 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3731 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3732#endif
3733}
3734
3735/* Initialize the table of registers to eliminate. */
3736static void
3737init_elim_table ()
3738{
3739 struct elim_table *ep;
590cf94d
KG
3740#ifdef ELIMINABLE_REGS
3741 struct elim_table_1 *ep1;
3742#endif
09dd1133 3743
590cf94d 3744 if (!reg_eliminate)
ad85216e
KG
3745 reg_eliminate = (struct elim_table *)
3746 xcalloc(sizeof(struct elim_table), NUM_ELIMINABLE_REGS);
05d10675 3747
09dd1133
BS
3748 /* Does this function require a frame pointer? */
3749
3750 frame_pointer_needed = (! flag_omit_frame_pointer
3751#ifdef EXIT_IGNORE_STACK
3752 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3753 and restore sp for alloca. So we can't eliminate
3754 the frame pointer in that case. At some point,
3755 we should improve this by emitting the
3756 sp-adjusting insns for this case. */
3757 || (current_function_calls_alloca
3758 && EXIT_IGNORE_STACK)
3759#endif
3760 || FRAME_POINTER_REQUIRED);
3761
3762 num_eliminable = 0;
3763
3764#ifdef ELIMINABLE_REGS
590cf94d
KG
3765 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3766 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
09dd1133 3767 {
590cf94d
KG
3768 ep->from = ep1->from;
3769 ep->to = ep1->to;
09dd1133
BS
3770 ep->can_eliminate = ep->can_eliminate_previous
3771 = (CAN_ELIMINATE (ep->from, ep->to)
3772 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3773 }
3774#else
590cf94d
KG
3775 reg_eliminate[0].from = reg_eliminate_1[0].from;
3776 reg_eliminate[0].to = reg_eliminate_1[0].to;
09dd1133
BS
3777 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3778 = ! frame_pointer_needed;
3779#endif
3780
3781 /* Count the number of eliminable registers and build the FROM and TO
3782 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3783 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3784 We depend on this. */
3785 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3786 {
3787 num_eliminable += ep->can_eliminate;
3788 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3789 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3790 }
3791}
32131a9c
RK
3792\f
3793/* Kick all pseudos out of hard register REGNO.
32131a9c
RK
3794 If DUMPFILE is nonzero, log actions taken on that file.
3795
3796 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3797 because we found we can't eliminate some register. In the case, no pseudos
3798 are allowed to be in the register, even if they are only in a block that
3799 doesn't require spill registers, unlike the case when we are spilling this
3800 hard reg to produce another spill register.
3801
3802 Return nonzero if any pseudos needed to be kicked out. */
3803
03acd8f8
BS
3804static void
3805spill_hard_reg (regno, dumpfile, cant_eliminate)
32131a9c 3806 register int regno;
6a651371 3807 FILE *dumpfile ATTRIBUTE_UNUSED;
32131a9c
RK
3808 int cant_eliminate;
3809{
32131a9c
RK
3810 register int i;
3811
9ff3516a 3812 if (cant_eliminate)
03acd8f8
BS
3813 {
3814 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3815 regs_ever_live[regno] = 1;
3816 }
9ff3516a 3817
32131a9c
RK
3818 /* Spill every pseudo reg that was allocated to this reg
3819 or to something that overlaps this reg. */
3820
3821 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3822 if (reg_renumber[i] >= 0
3823 && reg_renumber[i] <= regno
a8fdc208 3824 && (reg_renumber[i]
32131a9c
RK
3825 + HARD_REGNO_NREGS (reg_renumber[i],
3826 PSEUDO_REGNO_MODE (i))
3827 > regno))
03acd8f8
BS
3828 SET_REGNO_REG_SET (spilled_pseudos, i);
3829}
32131a9c 3830
03acd8f8
BS
3831/* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3832 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3833static void
3834ior_hard_reg_set (set1, set2)
3835 HARD_REG_SET *set1, *set2;
3836{
3837 IOR_HARD_REG_SET (*set1, *set2);
3838}
05d10675 3839
03acd8f8
BS
3840/* After find_reload_regs has been run for all insn that need reloads,
3841 and/or spill_hard_regs was called, this function is used to actually
3842 spill pseudo registers and try to reallocate them. It also sets up the
3843 spill_regs array for use by choose_reload_regs. */
a8fdc208 3844
03acd8f8
BS
3845static int
3846finish_spills (global, dumpfile)
3847 int global;
3848 FILE *dumpfile;
3849{
3850 struct insn_chain *chain;
3851 int something_changed = 0;
3852 int i;
3853
3854 /* Build the spill_regs array for the function. */
3855 /* If there are some registers still to eliminate and one of the spill regs
3856 wasn't ever used before, additional stack space may have to be
3857 allocated to store this register. Thus, we may have changed the offset
3858 between the stack and frame pointers, so mark that something has changed.
32131a9c 3859
03acd8f8
BS
3860 One might think that we need only set VAL to 1 if this is a call-used
3861 register. However, the set of registers that must be saved by the
3862 prologue is not identical to the call-used set. For example, the
3863 register used by the call insn for the return PC is a call-used register,
3864 but must be saved by the prologue. */
3865
3866 n_spills = 0;
3867 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3868 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3869 {
3870 spill_reg_order[i] = n_spills;
3871 spill_regs[n_spills++] = i;
3872 if (num_eliminable && ! regs_ever_live[i])
3873 something_changed = 1;
3874 regs_ever_live[i] = 1;
3875 }
3876 else
3877 spill_reg_order[i] = -1;
3878
3879 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3880 if (REGNO_REG_SET_P (spilled_pseudos, i))
3881 {
3882 /* Record the current hard register the pseudo is allocated to in
3883 pseudo_previous_regs so we avoid reallocating it to the same
3884 hard reg in a later pass. */
3885 if (reg_renumber[i] < 0)
3886 abort ();
3887 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
32131a9c
RK
3888 /* Mark it as no longer having a hard register home. */
3889 reg_renumber[i] = -1;
3890 /* We will need to scan everything again. */
3891 something_changed = 1;
03acd8f8 3892 }
7609e720 3893
03acd8f8
BS
3894 /* Retry global register allocation if possible. */
3895 if (global)
3896 {
3897 bzero ((char *) pseudo_forbidden_regs, max_regno * sizeof (HARD_REG_SET));
3898 /* For every insn that needs reloads, set the registers used as spill
3899 regs in pseudo_forbidden_regs for every pseudo live across the
3900 insn. */
3901 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3902 {
3903 EXECUTE_IF_SET_IN_REG_SET
3904 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
3905 {
3906 ior_hard_reg_set (pseudo_forbidden_regs + i,
3907 &chain->used_spill_regs);
3908 });
3909 EXECUTE_IF_SET_IN_REG_SET
3910 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
3911 {
3912 ior_hard_reg_set (pseudo_forbidden_regs + i,
3913 &chain->used_spill_regs);
3914 });
3915 }
7609e720 3916
03acd8f8
BS
3917 /* Retry allocating the spilled pseudos. For each reg, merge the
3918 various reg sets that indicate which hard regs can't be used,
3919 and call retry_global_alloc.
05d10675 3920 We change spill_pseudos here to only contain pseudos that did not
03acd8f8
BS
3921 get a new hard register. */
3922 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3923 if (reg_old_renumber[i] != reg_renumber[i])
32131a9c 3924 {
03acd8f8
BS
3925 HARD_REG_SET forbidden;
3926 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3927 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3928 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3929 retry_global_alloc (i, forbidden);
3930 if (reg_renumber[i] >= 0)
3931 CLEAR_REGNO_REG_SET (spilled_pseudos, i);
32131a9c 3932 }
03acd8f8 3933 }
7609e720 3934
03acd8f8
BS
3935 /* Fix up the register information in the insn chain.
3936 This involves deleting those of the spilled pseudos which did not get
3937 a new hard register home from the live_{before,after} sets. */
7609e720
BS
3938 for (chain = reload_insn_chain; chain; chain = chain->next)
3939 {
03acd8f8
BS
3940 HARD_REG_SET used_by_pseudos;
3941 HARD_REG_SET used_by_pseudos2;
3942
7609e720
BS
3943 AND_COMPL_REG_SET (chain->live_before, spilled_pseudos);
3944 AND_COMPL_REG_SET (chain->live_after, spilled_pseudos);
03acd8f8
BS
3945
3946 /* Mark any unallocated hard regs as available for spills. That
3947 makes inheritance work somewhat better. */
3948 if (chain->need_reload)
3949 {
3950 REG_SET_TO_HARD_REG_SET (used_by_pseudos, chain->live_before);
3951 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, chain->live_after);
3952 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3953
3954 /* Save the old value for the sanity test below. */
3955 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3956
3957 compute_use_by_pseudos (&used_by_pseudos, chain->live_before);
3958 compute_use_by_pseudos (&used_by_pseudos, chain->live_after);
3959 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3960 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3961
3962 /* Make sure we only enlarge the set. */
3963 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3964 abort ();
3965 ok:;
3966 }
7609e720 3967 }
03acd8f8
BS
3968
3969 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3970 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3971 {
3972 int regno = reg_renumber[i];
3973 if (reg_old_renumber[i] == regno)
3974 continue;
05d10675 3975
03acd8f8
BS
3976 alter_reg (i, reg_old_renumber[i]);
3977 reg_old_renumber[i] = regno;
3978 if (dumpfile)
3979 {
3980 if (regno == -1)
3981 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3982 else
3983 fprintf (dumpfile, " Register %d now in %d.\n\n",
3984 i, reg_renumber[i]);
3985 }
3986 }
3987
3988 return something_changed;
7609e720 3989}
32131a9c 3990\f
05d10675 3991/* Find all paradoxical subregs within X and update reg_max_ref_width.
56f58d3a
RK
3992 Also mark any hard registers used to store user variables as
3993 forbidden from being used for spill registers. */
32131a9c
RK
3994
3995static void
3996scan_paradoxical_subregs (x)
3997 register rtx x;
3998{
3999 register int i;
6f7d635c 4000 register const char *fmt;
32131a9c
RK
4001 register enum rtx_code code = GET_CODE (x);
4002
4003 switch (code)
4004 {
56f58d3a 4005 case REG:
03acd8f8 4006#if 0
e9a25f70 4007 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
f95182a4 4008 && REG_USERVAR_P (x))
03acd8f8
BS
4009 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
4010#endif
56f58d3a
RK
4011 return;
4012
32131a9c
RK
4013 case CONST_INT:
4014 case CONST:
4015 case SYMBOL_REF:
4016 case LABEL_REF:
4017 case CONST_DOUBLE:
4018 case CC0:
4019 case PC:
32131a9c
RK
4020 case USE:
4021 case CLOBBER:
4022 return;
4023
4024 case SUBREG:
4025 if (GET_CODE (SUBREG_REG (x)) == REG
4026 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4027 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4028 = GET_MODE_SIZE (GET_MODE (x));
4029 return;
05d10675 4030
e9a25f70
JL
4031 default:
4032 break;
32131a9c
RK
4033 }
4034
4035 fmt = GET_RTX_FORMAT (code);
4036 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4037 {
4038 if (fmt[i] == 'e')
4039 scan_paradoxical_subregs (XEXP (x, i));
4040 else if (fmt[i] == 'E')
4041 {
4042 register int j;
4043 for (j = XVECLEN (x, i) - 1; j >=0; j--)
4044 scan_paradoxical_subregs (XVECEXP (x, i, j));
4045 }
4046 }
4047}
4048\f
32131a9c 4049static int
788a0818 4050hard_reg_use_compare (p1p, p2p)
e1b6684c
KG
4051 const PTR p1p;
4052 const PTR p2p;
05d10675 4053{
03acd8f8
BS
4054 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p;
4055 struct hard_reg_n_uses *p2 = (struct hard_reg_n_uses *)p2p;
4056 int bad1 = TEST_HARD_REG_BIT (bad_spill_regs, p1->regno);
4057 int bad2 = TEST_HARD_REG_BIT (bad_spill_regs, p2->regno);
4058 if (bad1 && bad2)
4059 return p1->regno - p2->regno;
4060 if (bad1)
4061 return 1;
4062 if (bad2)
4063 return -1;
4064 if (p1->uses > p2->uses)
4065 return 1;
4066 if (p1->uses < p2->uses)
4067 return -1;
32131a9c
RK
4068 /* If regs are equally good, sort by regno,
4069 so that the results of qsort leave nothing to chance. */
4070 return p1->regno - p2->regno;
4071}
4072
03acd8f8
BS
4073/* Used for communication between order_regs_for_reload and count_pseudo.
4074 Used to avoid counting one pseudo twice. */
4075static regset pseudos_counted;
4076
4077/* Update the costs in N_USES, considering that pseudo REG is live. */
4078static void
4079count_pseudo (n_uses, reg)
4080 struct hard_reg_n_uses *n_uses;
4081 int reg;
4082{
4083 int r = reg_renumber[reg];
4084 int nregs;
4085
4086 if (REGNO_REG_SET_P (pseudos_counted, reg))
4087 return;
4088 SET_REGNO_REG_SET (pseudos_counted, reg);
4089
4090 if (r < 0)
4091 abort ();
4092
4093 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
4094 while (nregs-- > 0)
05d10675 4095 n_uses[r++].uses += REG_N_REFS (reg);
03acd8f8 4096}
32131a9c
RK
4097/* Choose the order to consider regs for use as reload registers
4098 based on how much trouble would be caused by spilling one.
4099 Store them in order of decreasing preference in potential_reload_regs. */
4100
4101static void
03acd8f8
BS
4102order_regs_for_reload (chain)
4103 struct insn_chain *chain;
32131a9c 4104{
03acd8f8 4105 register int i;
32131a9c 4106 register int o = 0;
32131a9c
RK
4107 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
4108
03acd8f8 4109 pseudos_counted = ALLOCA_REG_SET ();
32131a9c 4110
03acd8f8 4111 COPY_HARD_REG_SET (bad_spill_regs, bad_spill_regs_global);
32131a9c
RK
4112
4113 /* Count number of uses of each hard reg by pseudo regs allocated to it
4114 and then order them by decreasing use. */
4115
4116 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4117 {
2feb9704
BS
4118 hard_reg_n_uses[i].regno = i;
4119 hard_reg_n_uses[i].uses = 0;
03acd8f8 4120
03acd8f8
BS
4121 /* Test the various reasons why we can't use a register for
4122 spilling in this insn. */
4123 if (fixed_regs[i]
4124 || REGNO_REG_SET_P (chain->live_before, i)
4125 || REGNO_REG_SET_P (chain->live_after, i))
2feb9704
BS
4126 SET_HARD_REG_BIT (bad_spill_regs, i);
4127 }
32131a9c 4128
2feb9704
BS
4129 /* Now compute hard_reg_n_uses. */
4130 CLEAR_REG_SET (pseudos_counted);
03acd8f8 4131
2feb9704
BS
4132 EXECUTE_IF_SET_IN_REG_SET
4133 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
4134 {
4135 count_pseudo (hard_reg_n_uses, i);
4136 });
4137 EXECUTE_IF_SET_IN_REG_SET
4138 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
4139 {
4140 count_pseudo (hard_reg_n_uses, i);
4141 });
03acd8f8
BS
4142
4143 FREE_REG_SET (pseudos_counted);
32131a9c
RK
4144
4145 /* Prefer registers not so far used, for use in temporary loading.
4146 Among them, if REG_ALLOC_ORDER is defined, use that order.
4147 Otherwise, prefer registers not preserved by calls. */
4148
4149#ifdef REG_ALLOC_ORDER
4150 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4151 {
4152 int regno = reg_alloc_order[i];
4153
03acd8f8
BS
4154 if (hard_reg_n_uses[regno].uses == 0
4155 && ! TEST_HARD_REG_BIT (bad_spill_regs, regno))
32131a9c
RK
4156 potential_reload_regs[o++] = regno;
4157 }
4158#else
4159 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4160 {
03acd8f8
BS
4161 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i]
4162 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
32131a9c
RK
4163 potential_reload_regs[o++] = i;
4164 }
4165 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4166 {
03acd8f8
BS
4167 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i]
4168 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
32131a9c
RK
4169 potential_reload_regs[o++] = i;
4170 }
4171#endif
4172
4173 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
4174 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
4175
4176 /* Now add the regs that are already used,
4177 preferring those used less often. The fixed and otherwise forbidden
4178 registers will be at the end of this list. */
4179
4180 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
03acd8f8
BS
4181 if (hard_reg_n_uses[i].uses != 0
4182 && ! TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
4183 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4184 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4185 if (TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
32131a9c
RK
4186 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4187}
4188\f
4189/* Reload pseudo-registers into hard regs around each insn as needed.
4190 Additional register load insns are output before the insn that needs it
4191 and perhaps store insns after insns that modify the reloaded pseudo reg.
4192
4193 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 4194 which registers are already available in reload registers.
32131a9c
RK
4195 We update these for the reloads that we perform,
4196 as the insns are scanned. */
4197
4198static void
7609e720 4199reload_as_needed (live_known)
32131a9c
RK
4200 int live_known;
4201{
7609e720 4202 struct insn_chain *chain;
973838fd 4203#if defined (AUTO_INC_DEC) || defined (INSN_CLOBBERS_REGNO_P)
32131a9c 4204 register int i;
973838fd 4205#endif
32131a9c 4206 rtx x;
32131a9c 4207
4c9a05bc
RK
4208 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
4209 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
32131a9c 4210 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 4211 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
32131a9c 4212 reg_has_output_reload = (char *) alloca (max_regno);
e6e52be0 4213 CLEAR_HARD_REG_SET (reg_reloaded_valid);
32131a9c 4214
1f3b1e1a 4215 set_initial_elim_offsets ();
32131a9c 4216
7609e720 4217 for (chain = reload_insn_chain; chain; chain = chain->next)
32131a9c 4218 {
03acd8f8 4219 rtx prev;
7609e720
BS
4220 rtx insn = chain->insn;
4221 rtx old_next = NEXT_INSN (insn);
32131a9c
RK
4222
4223 /* If we pass a label, copy the offsets from the label information
4224 into the current offsets of each elimination. */
4225 if (GET_CODE (insn) == CODE_LABEL)
1f3b1e1a 4226 set_offsets_for_label (insn);
32131a9c
RK
4227
4228 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4229 {
0639444f 4230 rtx oldpat = PATTERN (insn);
32131a9c 4231
2758481d
RS
4232 /* If this is a USE and CLOBBER of a MEM, ensure that any
4233 references to eliminable registers have been removed. */
4234
4235 if ((GET_CODE (PATTERN (insn)) == USE
4236 || GET_CODE (PATTERN (insn)) == CLOBBER)
4237 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4238 XEXP (XEXP (PATTERN (insn), 0), 0)
4239 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
29ae5012 4240 GET_MODE (XEXP (PATTERN (insn), 0)),
1914f5da 4241 NULL_RTX);
2758481d 4242
32131a9c
RK
4243 /* If we need to do register elimination processing, do so.
4244 This might delete the insn, in which case we are done. */
2b49ee39 4245 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
32131a9c
RK
4246 {
4247 eliminate_regs_in_insn (insn, 1);
4248 if (GET_CODE (insn) == NOTE)
cb2afeb3
R
4249 {
4250 update_eliminable_offsets ();
4251 continue;
4252 }
32131a9c
RK
4253 }
4254
7609e720
BS
4255 /* If need_elim is nonzero but need_reload is zero, one might think
4256 that we could simply set n_reloads to 0. However, find_reloads
4257 could have done some manipulation of the insn (such as swapping
4258 commutative operands), and these manipulations are lost during
4259 the first pass for every insn that needs register elimination.
4260 So the actions of find_reloads must be redone here. */
4261
03acd8f8
BS
4262 if (! chain->need_elim && ! chain->need_reload
4263 && ! chain->need_operand_change)
32131a9c
RK
4264 n_reloads = 0;
4265 /* First find the pseudo regs that must be reloaded for this insn.
4266 This info is returned in the tables reload_... (see reload.h).
4267 Also modify the body of INSN by substituting RELOAD
4268 rtx's for those pseudo regs. */
4269 else
4270 {
4271 bzero (reg_has_output_reload, max_regno);
4272 CLEAR_HARD_REG_SET (reg_is_output_reload);
4273
4274 find_reloads (insn, 1, spill_indirect_levels, live_known,
4275 spill_reg_order);
4276 }
4277
dd6acd1b 4278 if (num_eliminable && chain->need_elim)
cb2afeb3
R
4279 update_eliminable_offsets ();
4280
32131a9c
RK
4281 if (n_reloads > 0)
4282 {
cb2afeb3 4283 rtx next = NEXT_INSN (insn);
3c3eeea6 4284 rtx p;
32131a9c 4285
cb2afeb3
R
4286 prev = PREV_INSN (insn);
4287
32131a9c
RK
4288 /* Now compute which reload regs to reload them into. Perhaps
4289 reusing reload regs from previous insns, or else output
4290 load insns to reload them. Maybe output store insns too.
4291 Record the choices of reload reg in reload_reg_rtx. */
03acd8f8 4292 choose_reload_regs (chain);
32131a9c 4293
05d10675 4294 /* Merge any reloads that we didn't combine for fear of
546b63fb
RK
4295 increasing the number of spill registers needed but now
4296 discover can be safely merged. */
f95182a4
ILT
4297 if (SMALL_REGISTER_CLASSES)
4298 merge_assigned_reloads (insn);
546b63fb 4299
32131a9c
RK
4300 /* Generate the insns to reload operands into or out of
4301 their reload regs. */
7609e720 4302 emit_reload_insns (chain);
32131a9c
RK
4303
4304 /* Substitute the chosen reload regs from reload_reg_rtx
4305 into the insn's body (or perhaps into the bodies of other
4306 load and store insn that we just made for reloading
4307 and that we moved the structure into). */
4308 subst_reloads ();
3c3eeea6
RK
4309
4310 /* If this was an ASM, make sure that all the reload insns
4311 we have generated are valid. If not, give an error
4312 and delete them. */
4313
4314 if (asm_noperands (PATTERN (insn)) >= 0)
4315 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4316 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4317 && (recog_memoized (p) < 0
0eadeb15 4318 || (extract_insn (p), ! constrain_operands (1))))
3c3eeea6
RK
4319 {
4320 error_for_asm (insn,
4321 "`asm' operand requires impossible reload");
4322 PUT_CODE (p, NOTE);
4323 NOTE_SOURCE_FILE (p) = 0;
4324 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4325 }
32131a9c
RK
4326 }
4327 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4328 is no longer validly lying around to save a future reload.
4329 Note that this does not detect pseudos that were reloaded
4330 for this insn in order to be stored in
4331 (obeying register constraints). That is correct; such reload
4332 registers ARE still valid. */
0639444f 4333 note_stores (oldpat, forget_old_reloads_1);
32131a9c
RK
4334
4335 /* There may have been CLOBBER insns placed after INSN. So scan
4336 between INSN and NEXT and use them to forget old reloads. */
7609e720 4337 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
32131a9c
RK
4338 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4339 note_stores (PATTERN (x), forget_old_reloads_1);
4340
4341#ifdef AUTO_INC_DEC
cb2afeb3
R
4342 /* Likewise for regs altered by auto-increment in this insn.
4343 REG_INC notes have been changed by reloading:
4344 find_reloads_address_1 records substitutions for them,
4345 which have been performed by subst_reloads above. */
4346 for (i = n_reloads - 1; i >= 0; i--)
4347 {
eceef4c9 4348 rtx in_reg = rld[i].in_reg;
cb2afeb3
R
4349 if (in_reg)
4350 {
4351 enum rtx_code code = GET_CODE (in_reg);
4352 /* PRE_INC / PRE_DEC will have the reload register ending up
4353 with the same value as the stack slot, but that doesn't
4354 hold true for POST_INC / POST_DEC. Either we have to
4355 convert the memory access to a true POST_INC / POST_DEC,
4356 or we can't use the reload register for inheritance. */
4357 if ((code == POST_INC || code == POST_DEC)
4358 && TEST_HARD_REG_BIT (reg_reloaded_valid,
eceef4c9 4359 REGNO (rld[i].reg_rtx))
04bbb0c5
JW
4360 /* Make sure it is the inc/dec pseudo, and not
4361 some other (e.g. output operand) pseudo. */
eceef4c9 4362 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
04bbb0c5 4363 == REGNO (XEXP (in_reg, 0))))
05d10675 4364
cb2afeb3 4365 {
eceef4c9 4366 rtx reload_reg = rld[i].reg_rtx;
cb2afeb3
R
4367 enum machine_mode mode = GET_MODE (reload_reg);
4368 int n = 0;
4369 rtx p;
4370
4371 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4372 {
4373 /* We really want to ignore REG_INC notes here, so
4374 use PATTERN (p) as argument to reg_set_p . */
4375 if (reg_set_p (reload_reg, PATTERN (p)))
4376 break;
4377 n = count_occurrences (PATTERN (p), reload_reg);
4378 if (! n)
4379 continue;
4380 if (n == 1)
f67c2384
JL
4381 {
4382 n = validate_replace_rtx (reload_reg,
4383 gen_rtx (code, mode,
4384 reload_reg),
4385 p);
4386
4387 /* We must also verify that the constraints
4388 are met after the replacement. */
4389 extract_insn (p);
4390 if (n)
4391 n = constrain_operands (1);
4392 else
4393 break;
4394
4395 /* If the constraints were not met, then
4396 undo the replacement. */
4397 if (!n)
4398 {
4399 validate_replace_rtx (gen_rtx (code, mode,
4400 reload_reg),
4401 reload_reg, p);
4402 break;
4403 }
05d10675 4404
f67c2384 4405 }
cb2afeb3
R
4406 break;
4407 }
4408 if (n == 1)
02eb1393
R
4409 {
4410 REG_NOTES (p)
4411 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4412 REG_NOTES (p));
4413 /* Mark this as having an output reload so that the
4414 REG_INC processing code below won't invalidate
4415 the reload for inheritance. */
4416 SET_HARD_REG_BIT (reg_is_output_reload,
4417 REGNO (reload_reg));
4418 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4419 }
cb2afeb3
R
4420 else
4421 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX);
4422 }
02eb1393
R
4423 else if ((code == PRE_INC || code == PRE_DEC)
4424 && TEST_HARD_REG_BIT (reg_reloaded_valid,
eceef4c9 4425 REGNO (rld[i].reg_rtx))
02eb1393
R
4426 /* Make sure it is the inc/dec pseudo, and not
4427 some other (e.g. output operand) pseudo. */
eceef4c9 4428 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
02eb1393
R
4429 == REGNO (XEXP (in_reg, 0))))
4430 {
4431 SET_HARD_REG_BIT (reg_is_output_reload,
eceef4c9 4432 REGNO (rld[i].reg_rtx));
02eb1393
R
4433 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4434 }
cb2afeb3
R
4435 }
4436 }
02eb1393
R
4437 /* If a pseudo that got a hard register is auto-incremented,
4438 we must purge records of copying it into pseudos without
4439 hard registers. */
32131a9c
RK
4440 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4441 if (REG_NOTE_KIND (x) == REG_INC)
4442 {
4443 /* See if this pseudo reg was reloaded in this insn.
4444 If so, its last-reload info is still valid
4445 because it is based on this insn's reload. */
4446 for (i = 0; i < n_reloads; i++)
eceef4c9 4447 if (rld[i].out == XEXP (x, 0))
32131a9c
RK
4448 break;
4449
08fb99fa 4450 if (i == n_reloads)
9a881562 4451 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
4452 }
4453#endif
4454 }
4455 /* A reload reg's contents are unknown after a label. */
4456 if (GET_CODE (insn) == CODE_LABEL)
e6e52be0 4457 CLEAR_HARD_REG_SET (reg_reloaded_valid);
32131a9c
RK
4458
4459 /* Don't assume a reload reg is still good after a call insn
4460 if it is a call-used reg. */
546b63fb 4461 else if (GET_CODE (insn) == CALL_INSN)
e6e52be0 4462 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
32131a9c
RK
4463
4464 /* In case registers overlap, allow certain insns to invalidate
4465 particular hard registers. */
4466
4467#ifdef INSN_CLOBBERS_REGNO_P
e6e52be0
R
4468 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4469 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4470 && INSN_CLOBBERS_REGNO_P (insn, i))
4471 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
32131a9c
RK
4472#endif
4473
32131a9c
RK
4474#ifdef USE_C_ALLOCA
4475 alloca (0);
4476#endif
4477 }
4478}
4479
4480/* Discard all record of any value reloaded from X,
4481 or reloaded in X from someplace else;
4482 unless X is an output reload reg of the current insn.
4483
4484 X may be a hard reg (the reload reg)
4485 or it may be a pseudo reg that was reloaded from. */
4486
4487static void
9a881562 4488forget_old_reloads_1 (x, ignored)
32131a9c 4489 rtx x;
487a6e06 4490 rtx ignored ATTRIBUTE_UNUSED;
32131a9c
RK
4491{
4492 register int regno;
4493 int nr;
0a2e51a9
RS
4494 int offset = 0;
4495
4496 /* note_stores does give us subregs of hard regs. */
4497 while (GET_CODE (x) == SUBREG)
4498 {
4499 offset += SUBREG_WORD (x);
4500 x = SUBREG_REG (x);
4501 }
32131a9c
RK
4502
4503 if (GET_CODE (x) != REG)
4504 return;
4505
0a2e51a9 4506 regno = REGNO (x) + offset;
32131a9c
RK
4507
4508 if (regno >= FIRST_PSEUDO_REGISTER)
4509 nr = 1;
4510 else
4511 {
4512 int i;
4513 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4514 /* Storing into a spilled-reg invalidates its contents.
4515 This can happen if a block-local pseudo is allocated to that reg
4516 and it wasn't spilled because this block's total need is 0.
4517 Then some insn might have an optional reload and use this reg. */
4518 for (i = 0; i < nr; i++)
e6e52be0
R
4519 /* But don't do this if the reg actually serves as an output
4520 reload reg in the current instruction. */
4521 if (n_reloads == 0
4522 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4523 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
32131a9c
RK
4524 }
4525
4526 /* Since value of X has changed,
4527 forget any value previously copied from it. */
4528
4529 while (nr-- > 0)
4530 /* But don't forget a copy if this is the output reload
4531 that establishes the copy's validity. */
4532 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4533 reg_last_reload_reg[regno + nr] = 0;
4534}
4535\f
4536/* For each reload, the mode of the reload register. */
4537static enum machine_mode reload_mode[MAX_RELOADS];
4538
4539/* For each reload, the largest number of registers it will require. */
4540static int reload_nregs[MAX_RELOADS];
4541
4542/* Comparison function for qsort to decide which of two reloads
4543 should be handled first. *P1 and *P2 are the reload numbers. */
4544
4545static int
788a0818 4546reload_reg_class_lower (r1p, r2p)
e1b6684c
KG
4547 const PTR r1p;
4548 const PTR r2p;
32131a9c 4549{
788a0818 4550 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
32131a9c 4551 register int t;
a8fdc208 4552
32131a9c 4553 /* Consider required reloads before optional ones. */
eceef4c9 4554 t = rld[r1].optional - rld[r2].optional;
32131a9c
RK
4555 if (t != 0)
4556 return t;
4557
4558 /* Count all solitary classes before non-solitary ones. */
eceef4c9
BS
4559 t = ((reg_class_size[(int) rld[r2].class] == 1)
4560 - (reg_class_size[(int) rld[r1].class] == 1));
32131a9c
RK
4561 if (t != 0)
4562 return t;
4563
4564 /* Aside from solitaires, consider all multi-reg groups first. */
4565 t = reload_nregs[r2] - reload_nregs[r1];
4566 if (t != 0)
4567 return t;
4568
4569 /* Consider reloads in order of increasing reg-class number. */
eceef4c9 4570 t = (int) rld[r1].class - (int) rld[r2].class;
32131a9c
RK
4571 if (t != 0)
4572 return t;
4573
4574 /* If reloads are equally urgent, sort by reload number,
4575 so that the results of qsort leave nothing to chance. */
4576 return r1 - r2;
4577}
4578\f
4579/* The following HARD_REG_SETs indicate when each hard register is
4580 used for a reload of various parts of the current insn. */
4581
4582/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4583static HARD_REG_SET reload_reg_used;
546b63fb
RK
4584/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4585static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
47c8cf91
ILT
4586/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4587static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
4588/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4589static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
47c8cf91
ILT
4590/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4591static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
4592/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4593static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4594/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4595static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
4596/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4597static HARD_REG_SET reload_reg_used_in_op_addr;
893bc853
RK
4598/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4599static HARD_REG_SET reload_reg_used_in_op_addr_reload;
546b63fb
RK
4600/* If reg is in use for a RELOAD_FOR_INSN reload. */
4601static HARD_REG_SET reload_reg_used_in_insn;
4602/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4603static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
4604
4605/* If reg is in use as a reload reg for any sort of reload. */
4606static HARD_REG_SET reload_reg_used_at_all;
4607
be7ae2a4
RK
4608/* If reg is use as an inherited reload. We just mark the first register
4609 in the group. */
4610static HARD_REG_SET reload_reg_used_for_inherit;
4611
f1db3576
JL
4612/* Records which hard regs are used in any way, either as explicit use or
4613 by being allocated to a pseudo during any point of the current insn. */
4614static HARD_REG_SET reg_used_in_insn;
297927a8 4615
546b63fb
RK
4616/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4617 TYPE. MODE is used to indicate how many consecutive regs are
4618 actually used. */
32131a9c
RK
4619
4620static void
546b63fb 4621mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 4622 int regno;
546b63fb
RK
4623 int opnum;
4624 enum reload_type type;
32131a9c
RK
4625 enum machine_mode mode;
4626{
4627 int nregs = HARD_REGNO_NREGS (regno, mode);
4628 int i;
4629
4630 for (i = regno; i < nregs + regno; i++)
4631 {
546b63fb 4632 switch (type)
32131a9c
RK
4633 {
4634 case RELOAD_OTHER:
4635 SET_HARD_REG_BIT (reload_reg_used, i);
4636 break;
4637
546b63fb
RK
4638 case RELOAD_FOR_INPUT_ADDRESS:
4639 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4640 break;
4641
47c8cf91
ILT
4642 case RELOAD_FOR_INPADDR_ADDRESS:
4643 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4644 break;
4645
546b63fb
RK
4646 case RELOAD_FOR_OUTPUT_ADDRESS:
4647 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4648 break;
4649
47c8cf91
ILT
4650 case RELOAD_FOR_OUTADDR_ADDRESS:
4651 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4652 break;
4653
32131a9c
RK
4654 case RELOAD_FOR_OPERAND_ADDRESS:
4655 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4656 break;
4657
893bc853
RK
4658 case RELOAD_FOR_OPADDR_ADDR:
4659 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4660 break;
4661
546b63fb
RK
4662 case RELOAD_FOR_OTHER_ADDRESS:
4663 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4664 break;
4665
32131a9c 4666 case RELOAD_FOR_INPUT:
546b63fb 4667 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4668 break;
4669
4670 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4671 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4672 break;
4673
4674 case RELOAD_FOR_INSN:
4675 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4676 break;
4677 }
4678
4679 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4680 }
4681}
4682
be7ae2a4
RK
4683/* Similarly, but show REGNO is no longer in use for a reload. */
4684
4685static void
4686clear_reload_reg_in_use (regno, opnum, type, mode)
4687 int regno;
4688 int opnum;
4689 enum reload_type type;
4690 enum machine_mode mode;
4691{
4692 int nregs = HARD_REGNO_NREGS (regno, mode);
cb2afeb3 4693 int start_regno, end_regno;
be7ae2a4 4694 int i;
cb2afeb3
R
4695 /* A complication is that for some reload types, inheritance might
4696 allow multiple reloads of the same types to share a reload register.
4697 We set check_opnum if we have to check only reloads with the same
4698 operand number, and check_any if we have to check all reloads. */
4699 int check_opnum = 0;
4700 int check_any = 0;
4701 HARD_REG_SET *used_in_set;
be7ae2a4 4702
cb2afeb3 4703 switch (type)
be7ae2a4 4704 {
cb2afeb3
R
4705 case RELOAD_OTHER:
4706 used_in_set = &reload_reg_used;
4707 break;
be7ae2a4 4708
cb2afeb3
R
4709 case RELOAD_FOR_INPUT_ADDRESS:
4710 used_in_set = &reload_reg_used_in_input_addr[opnum];
4711 break;
be7ae2a4 4712
cb2afeb3
R
4713 case RELOAD_FOR_INPADDR_ADDRESS:
4714 check_opnum = 1;
4715 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4716 break;
47c8cf91 4717
cb2afeb3
R
4718 case RELOAD_FOR_OUTPUT_ADDRESS:
4719 used_in_set = &reload_reg_used_in_output_addr[opnum];
4720 break;
be7ae2a4 4721
cb2afeb3
R
4722 case RELOAD_FOR_OUTADDR_ADDRESS:
4723 check_opnum = 1;
4724 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4725 break;
47c8cf91 4726
cb2afeb3
R
4727 case RELOAD_FOR_OPERAND_ADDRESS:
4728 used_in_set = &reload_reg_used_in_op_addr;
4729 break;
be7ae2a4 4730
cb2afeb3
R
4731 case RELOAD_FOR_OPADDR_ADDR:
4732 check_any = 1;
4733 used_in_set = &reload_reg_used_in_op_addr_reload;
4734 break;
893bc853 4735
cb2afeb3
R
4736 case RELOAD_FOR_OTHER_ADDRESS:
4737 used_in_set = &reload_reg_used_in_other_addr;
4738 check_any = 1;
4739 break;
be7ae2a4 4740
cb2afeb3
R
4741 case RELOAD_FOR_INPUT:
4742 used_in_set = &reload_reg_used_in_input[opnum];
4743 break;
be7ae2a4 4744
cb2afeb3
R
4745 case RELOAD_FOR_OUTPUT:
4746 used_in_set = &reload_reg_used_in_output[opnum];
4747 break;
be7ae2a4 4748
cb2afeb3
R
4749 case RELOAD_FOR_INSN:
4750 used_in_set = &reload_reg_used_in_insn;
4751 break;
4752 default:
4753 abort ();
4754 }
4755 /* We resolve conflicts with remaining reloads of the same type by
4756 excluding the intervals of of reload registers by them from the
4757 interval of freed reload registers. Since we only keep track of
4758 one set of interval bounds, we might have to exclude somewhat
4759 more then what would be necessary if we used a HARD_REG_SET here.
4760 But this should only happen very infrequently, so there should
4761 be no reason to worry about it. */
05d10675 4762
cb2afeb3
R
4763 start_regno = regno;
4764 end_regno = regno + nregs;
4765 if (check_opnum || check_any)
4766 {
4767 for (i = n_reloads - 1; i >= 0; i--)
4768 {
eceef4c9
BS
4769 if (rld[i].when_needed == type
4770 && (check_any || rld[i].opnum == opnum)
4771 && rld[i].reg_rtx)
cb2afeb3 4772 {
eceef4c9 4773 int conflict_start = true_regnum (rld[i].reg_rtx);
cb2afeb3
R
4774 int conflict_end
4775 = (conflict_start
4776 + HARD_REGNO_NREGS (conflict_start, reload_mode[i]));
4777
4778 /* If there is an overlap with the first to-be-freed register,
4779 adjust the interval start. */
4780 if (conflict_start <= start_regno && conflict_end > start_regno)
4781 start_regno = conflict_end;
4782 /* Otherwise, if there is a conflict with one of the other
4783 to-be-freed registers, adjust the interval end. */
4784 if (conflict_start > start_regno && conflict_start < end_regno)
4785 end_regno = conflict_start;
4786 }
be7ae2a4
RK
4787 }
4788 }
cb2afeb3
R
4789 for (i = start_regno; i < end_regno; i++)
4790 CLEAR_HARD_REG_BIT (*used_in_set, i);
be7ae2a4
RK
4791}
4792
32131a9c 4793/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4794 specified by OPNUM and TYPE. */
32131a9c
RK
4795
4796static int
546b63fb 4797reload_reg_free_p (regno, opnum, type)
32131a9c 4798 int regno;
546b63fb
RK
4799 int opnum;
4800 enum reload_type type;
32131a9c 4801{
546b63fb
RK
4802 int i;
4803
2edc8d65
RK
4804 /* In use for a RELOAD_OTHER means it's not available for anything. */
4805 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4806 return 0;
546b63fb
RK
4807
4808 switch (type)
32131a9c
RK
4809 {
4810 case RELOAD_OTHER:
2edc8d65
RK
4811 /* In use for anything means we can't use it for RELOAD_OTHER. */
4812 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
224f1d71
RK
4813 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4814 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4815 return 0;
4816
4817 for (i = 0; i < reload_n_operands; i++)
4818 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4819 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
224f1d71 4820 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4821 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
224f1d71
RK
4822 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4823 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4824 return 0;
4825
4826 return 1;
32131a9c 4827
32131a9c 4828 case RELOAD_FOR_INPUT:
546b63fb
RK
4829 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4830 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4831 return 0;
4832
893bc853
RK
4833 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4834 return 0;
4835
546b63fb
RK
4836 /* If it is used for some other input, can't use it. */
4837 for (i = 0; i < reload_n_operands; i++)
4838 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4839 return 0;
4840
4841 /* If it is used in a later operand's address, can't use it. */
4842 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
4843 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4844 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
4845 return 0;
4846
4847 return 1;
4848
4849 case RELOAD_FOR_INPUT_ADDRESS:
4850 /* Can't use a register if it is used for an input address for this
4851 operand or used as an input in an earlier one. */
47c8cf91
ILT
4852 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4853 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4854 return 0;
4855
4856 for (i = 0; i < opnum; i++)
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4858 return 0;
4859
4860 return 1;
4861
4862 case RELOAD_FOR_INPADDR_ADDRESS:
4863 /* Can't use a register if it is used for an input address
05d10675
BS
4864 for this operand or used as an input in an earlier
4865 one. */
47c8cf91 4866 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
546b63fb
RK
4867 return 0;
4868
4869 for (i = 0; i < opnum; i++)
4870 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4871 return 0;
4872
4873 return 1;
4874
4875 case RELOAD_FOR_OUTPUT_ADDRESS:
4876 /* Can't use a register if it is used for an output address for this
4877 operand or used as an output in this or a later operand. */
4878 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4879 return 0;
4880
4881 for (i = opnum; i < reload_n_operands; i++)
4882 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4883 return 0;
4884
4885 return 1;
4886
47c8cf91
ILT
4887 case RELOAD_FOR_OUTADDR_ADDRESS:
4888 /* Can't use a register if it is used for an output address
05d10675
BS
4889 for this operand or used as an output in this or a
4890 later operand. */
47c8cf91
ILT
4891 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4892 return 0;
4893
4894 for (i = opnum; i < reload_n_operands; i++)
4895 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4896 return 0;
4897
4898 return 1;
4899
32131a9c 4900 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4901 for (i = 0; i < reload_n_operands; i++)
4902 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4903 return 0;
4904
4905 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4906 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4907
893bc853
RK
4908 case RELOAD_FOR_OPADDR_ADDR:
4909 for (i = 0; i < reload_n_operands; i++)
05d10675
BS
4910 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4911 return 0;
893bc853 4912
a94ce333 4913 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
893bc853 4914
32131a9c 4915 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4916 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4917 outputs, or an operand address for this or an earlier output. */
4918 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4919 return 0;
4920
4921 for (i = 0; i < reload_n_operands; i++)
4922 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4923 return 0;
4924
4925 for (i = 0; i <= opnum; i++)
47c8cf91
ILT
4926 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4927 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
546b63fb
RK
4928 return 0;
4929
4930 return 1;
4931
4932 case RELOAD_FOR_INSN:
4933 for (i = 0; i < reload_n_operands; i++)
4934 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4935 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4936 return 0;
4937
4938 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4939 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4940
4941 case RELOAD_FOR_OTHER_ADDRESS:
4942 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4943 }
4944 abort ();
4945}
4946
32131a9c 4947/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4948 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4949 is still available in REGNO at the end of the insn.
4950
4951 We can assume that the reload reg was already tested for availability
4952 at the time it is needed, and we should not check this again,
4953 in case the reg has already been marked in use. */
4954
4955static int
546b63fb 4956reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4957 int regno;
546b63fb
RK
4958 int opnum;
4959 enum reload_type type;
32131a9c 4960{
546b63fb
RK
4961 int i;
4962
4963 switch (type)
32131a9c
RK
4964 {
4965 case RELOAD_OTHER:
4966 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4967 its value must reach the end. */
4968 return 1;
4969
4970 /* If this use is for part of the insn,
05d10675 4971 its value reaches if no subsequent part uses the same register.
546b63fb
RK
4972 Just like the above function, don't try to do this with lots
4973 of fallthroughs. */
4974
4975 case RELOAD_FOR_OTHER_ADDRESS:
4976 /* Here we check for everything else, since these don't conflict
4977 with anything else and everything comes later. */
4978
4979 for (i = 0; i < reload_n_operands; i++)
4980 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4981 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4982 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4983 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4984 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
4985 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4986 return 0;
4987
4988 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4989 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4990 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4991
4992 case RELOAD_FOR_INPUT_ADDRESS:
47c8cf91 4993 case RELOAD_FOR_INPADDR_ADDRESS:
546b63fb
RK
4994 /* Similar, except that we check only for this and subsequent inputs
4995 and the address of only subsequent inputs and we do not need
4996 to check for RELOAD_OTHER objects since they are known not to
4997 conflict. */
4998
4999 for (i = opnum; i < reload_n_operands; i++)
5000 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5001 return 0;
5002
5003 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
5004 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5005 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
5006 return 0;
5007
5008 for (i = 0; i < reload_n_operands; i++)
5009 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 5010 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
5011 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5012 return 0;
5013
893bc853
RK
5014 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5015 return 0;
5016
546b63fb
RK
5017 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5018 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
5019
32131a9c 5020 case RELOAD_FOR_INPUT:
546b63fb 5021 /* Similar to input address, except we start at the next operand for
05d10675 5022 both input and input address and we do not check for
546b63fb
RK
5023 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5024 would conflict. */
5025
5026 for (i = opnum + 1; i < reload_n_operands; i++)
5027 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 5028 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
5029 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5030 return 0;
5031
0f41302f 5032 /* ... fall through ... */
546b63fb 5033
32131a9c 5034 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5035 /* Check outputs and their addresses. */
5036
5037 for (i = 0; i < reload_n_operands; i++)
5038 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 5039 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
5040 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5041 return 0;
5042
5043 return 1;
5044
893bc853
RK
5045 case RELOAD_FOR_OPADDR_ADDR:
5046 for (i = 0; i < reload_n_operands; i++)
5047 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 5048 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
893bc853
RK
5049 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5050 return 0;
5051
a94ce333
JW
5052 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5053 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
893bc853 5054
546b63fb 5055 case RELOAD_FOR_INSN:
893bc853 5056 /* These conflict with other outputs with RELOAD_OTHER. So
546b63fb
RK
5057 we need only check for output addresses. */
5058
5059 opnum = -1;
5060
0f41302f 5061 /* ... fall through ... */
546b63fb 5062
32131a9c 5063 case RELOAD_FOR_OUTPUT:
546b63fb 5064 case RELOAD_FOR_OUTPUT_ADDRESS:
47c8cf91 5065 case RELOAD_FOR_OUTADDR_ADDRESS:
546b63fb
RK
5066 /* We already know these can't conflict with a later output. So the
5067 only thing to check are later output addresses. */
5068 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
5069 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5070 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
546b63fb
RK
5071 return 0;
5072
32131a9c
RK
5073 return 1;
5074 }
546b63fb 5075
32131a9c
RK
5076 abort ();
5077}
5078\f
351aa1c1
RK
5079/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5080 Return 0 otherwise.
5081
5082 This function uses the same algorithm as reload_reg_free_p above. */
5083
f5963e61 5084int
351aa1c1
RK
5085reloads_conflict (r1, r2)
5086 int r1, r2;
5087{
eceef4c9
BS
5088 enum reload_type r1_type = rld[r1].when_needed;
5089 enum reload_type r2_type = rld[r2].when_needed;
5090 int r1_opnum = rld[r1].opnum;
5091 int r2_opnum = rld[r2].opnum;
351aa1c1 5092
2edc8d65
RK
5093 /* RELOAD_OTHER conflicts with everything. */
5094 if (r2_type == RELOAD_OTHER)
351aa1c1
RK
5095 return 1;
5096
5097 /* Otherwise, check conflicts differently for each type. */
5098
5099 switch (r1_type)
5100 {
5101 case RELOAD_FOR_INPUT:
05d10675 5102 return (r2_type == RELOAD_FOR_INSN
351aa1c1 5103 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
893bc853 5104 || r2_type == RELOAD_FOR_OPADDR_ADDR
351aa1c1 5105 || r2_type == RELOAD_FOR_INPUT
47c8cf91
ILT
5106 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5107 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5108 && r2_opnum > r1_opnum));
351aa1c1
RK
5109
5110 case RELOAD_FOR_INPUT_ADDRESS:
5111 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5112 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5113
47c8cf91
ILT
5114 case RELOAD_FOR_INPADDR_ADDRESS:
5115 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5116 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5117
351aa1c1
RK
5118 case RELOAD_FOR_OUTPUT_ADDRESS:
5119 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5120 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5121
47c8cf91
ILT
5122 case RELOAD_FOR_OUTADDR_ADDRESS:
5123 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5124 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5125
351aa1c1
RK
5126 case RELOAD_FOR_OPERAND_ADDRESS:
5127 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
a94ce333 5128 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
351aa1c1 5129
893bc853 5130 case RELOAD_FOR_OPADDR_ADDR:
05d10675 5131 return (r2_type == RELOAD_FOR_INPUT
a94ce333 5132 || r2_type == RELOAD_FOR_OPADDR_ADDR);
893bc853 5133
351aa1c1
RK
5134 case RELOAD_FOR_OUTPUT:
5135 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
47c8cf91
ILT
5136 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5137 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
351aa1c1
RK
5138 && r2_opnum >= r1_opnum));
5139
5140 case RELOAD_FOR_INSN:
5141 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5142 || r2_type == RELOAD_FOR_INSN
5143 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5144
5145 case RELOAD_FOR_OTHER_ADDRESS:
5146 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5147
adab4fc5 5148 case RELOAD_OTHER:
2edc8d65 5149 return 1;
adab4fc5 5150
351aa1c1
RK
5151 default:
5152 abort ();
5153 }
5154}
5155\f
32131a9c
RK
5156/* Vector of reload-numbers showing the order in which the reloads should
5157 be processed. */
5158short reload_order[MAX_RELOADS];
5159
5160/* Indexed by reload number, 1 if incoming value
5161 inherited from previous insns. */
5162char reload_inherited[MAX_RELOADS];
5163
5164/* For an inherited reload, this is the insn the reload was inherited from,
5165 if we know it. Otherwise, this is 0. */
5166rtx reload_inheritance_insn[MAX_RELOADS];
5167
5168/* If non-zero, this is a place to get the value of the reload,
5169 rather than using reload_in. */
5170rtx reload_override_in[MAX_RELOADS];
5171
e6e52be0
R
5172/* For each reload, the hard register number of the register used,
5173 or -1 if we did not need a register for this reload. */
32131a9c
RK
5174int reload_spill_index[MAX_RELOADS];
5175
6e684430
R
5176/* Return 1 if the value in reload reg REGNO, as used by a reload
5177 needed for the part of the insn specified by OPNUM and TYPE,
5178 may be used to load VALUE into it.
f5470689
R
5179
5180 Other read-only reloads with the same value do not conflict
5181 unless OUT is non-zero and these other reloads have to live while
5182 output reloads live.
dfe96118
R
5183 If OUT is CONST0_RTX, this is a special case: it means that the
5184 test should not be for using register REGNO as reload register, but
5185 for copying from register REGNO into the reload register.
f5470689
R
5186
5187 RELOADNUM is the number of the reload we want to load this value for;
5188 a reload does not conflict with itself.
5189
dfe96118
R
5190 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5191 reloads that load an address for the very reload we are considering.
5192
6e684430
R
5193 The caller has to make sure that there is no conflict with the return
5194 register. */
5195static int
dfe96118 5196reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum,
05d10675 5197 ignore_address_reloads)
6e684430
R
5198 int regno;
5199 int opnum;
5200 enum reload_type type;
f5470689
R
5201 rtx value, out;
5202 int reloadnum;
5828374f 5203 int ignore_address_reloads;
6e684430
R
5204{
5205 int time1;
5206 int i;
dfe96118
R
5207 int copy = 0;
5208
5209 if (out == const0_rtx)
5210 {
5211 copy = 1;
5212 out = NULL_RTX;
5213 }
6e684430
R
5214
5215 /* We use some pseudo 'time' value to check if the lifetimes of the
5216 new register use would overlap with the one of a previous reload
5217 that is not read-only or uses a different value.
5218 The 'time' used doesn't have to be linear in any shape or form, just
5219 monotonic.
5220 Some reload types use different 'buckets' for each operand.
5221 So there are MAX_RECOG_OPERANDS different time values for each
cecbf6e2
R
5222 such reload type.
5223 We compute TIME1 as the time when the register for the prospective
5224 new reload ceases to be live, and TIME2 for each existing
5225 reload as the time when that the reload register of that reload
5226 becomes live.
5227 Where there is little to be gained by exact lifetime calculations,
5228 we just make conservative assumptions, i.e. a longer lifetime;
5229 this is done in the 'default:' cases. */
6e684430
R
5230 switch (type)
5231 {
5232 case RELOAD_FOR_OTHER_ADDRESS:
5233 time1 = 0;
5234 break;
dfe96118
R
5235 case RELOAD_OTHER:
5236 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5237 break;
05d10675
BS
5238 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5239 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5240 respectively, to the time values for these, we get distinct time
5241 values. To get distinct time values for each operand, we have to
5242 multiply opnum by at least three. We round that up to four because
5243 multiply by four is often cheaper. */
6e684430 5244 case RELOAD_FOR_INPADDR_ADDRESS:
dfe96118 5245 time1 = opnum * 4 + 2;
6e684430
R
5246 break;
5247 case RELOAD_FOR_INPUT_ADDRESS:
dfe96118
R
5248 time1 = opnum * 4 + 3;
5249 break;
5250 case RELOAD_FOR_INPUT:
5251 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5252 executes (inclusive). */
5253 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
6e684430 5254 break;
cb2afeb3 5255 case RELOAD_FOR_OPADDR_ADDR:
05d10675
BS
5256 /* opnum * 4 + 4
5257 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
cb2afeb3
R
5258 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5259 break;
5260 case RELOAD_FOR_OPERAND_ADDRESS:
5261 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5262 is executed. */
dfe96118
R
5263 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5264 break;
5265 case RELOAD_FOR_OUTADDR_ADDRESS:
5266 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
6e684430 5267 break;
6e684430 5268 case RELOAD_FOR_OUTPUT_ADDRESS:
dfe96118 5269 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
6e684430
R
5270 break;
5271 default:
dfe96118 5272 time1 = MAX_RECOG_OPERANDS * 5 + 5;
6e684430
R
5273 }
5274
5275 for (i = 0; i < n_reloads; i++)
5276 {
eceef4c9 5277 rtx reg = rld[i].reg_rtx;
6e684430
R
5278 if (reg && GET_CODE (reg) == REG
5279 && ((unsigned) regno - true_regnum (reg)
83e0821b 5280 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
f5470689 5281 && i != reloadnum)
6e684430 5282 {
eceef4c9
BS
5283 if (! rld[i].in || ! rtx_equal_p (rld[i].in, value)
5284 || rld[i].out || out)
6e684430 5285 {
f5470689 5286 int time2;
eceef4c9 5287 switch (rld[i].when_needed)
f5470689
R
5288 {
5289 case RELOAD_FOR_OTHER_ADDRESS:
5290 time2 = 0;
5291 break;
5292 case RELOAD_FOR_INPADDR_ADDRESS:
cb2afeb3
R
5293 /* find_reloads makes sure that a
5294 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5295 by at most one - the first -
5296 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5297 address reload is inherited, the address address reload
5298 goes away, so we can ignore this conflict. */
dfe96118
R
5299 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5300 && ignore_address_reloads
5301 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5302 Then the address address is still needed to store
5303 back the new address. */
eceef4c9 5304 && ! rld[reloadnum].out)
cb2afeb3 5305 continue;
dfe96118
R
5306 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5307 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5308 reloads go away. */
eceef4c9 5309 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
dfe96118
R
5310 && ignore_address_reloads
5311 /* Unless we are reloading an auto_inc expression. */
eceef4c9 5312 && ! rld[reloadnum].out)
dfe96118 5313 continue;
eceef4c9 5314 time2 = rld[i].opnum * 4 + 2;
f5470689
R
5315 break;
5316 case RELOAD_FOR_INPUT_ADDRESS:
eceef4c9 5317 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
dfe96118 5318 && ignore_address_reloads
eceef4c9 5319 && ! rld[reloadnum].out)
dfe96118 5320 continue;
eceef4c9 5321 time2 = rld[i].opnum * 4 + 3;
f5470689
R
5322 break;
5323 case RELOAD_FOR_INPUT:
eceef4c9 5324 time2 = rld[i].opnum * 4 + 4;
f5470689 5325 break;
eceef4c9 5326 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
05d10675 5327 == MAX_RECOG_OPERAND * 4 */
cb2afeb3 5328 case RELOAD_FOR_OPADDR_ADDR:
dfe96118
R
5329 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5330 && ignore_address_reloads
eceef4c9 5331 && ! rld[reloadnum].out)
cb2afeb3 5332 continue;
dfe96118 5333 time2 = MAX_RECOG_OPERANDS * 4 + 1;
cb2afeb3
R
5334 break;
5335 case RELOAD_FOR_OPERAND_ADDRESS:
dfe96118
R
5336 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5337 break;
5338 case RELOAD_FOR_INSN:
5339 time2 = MAX_RECOG_OPERANDS * 4 + 3;
cb2afeb3 5340 break;
f5470689 5341 case RELOAD_FOR_OUTPUT:
05d10675
BS
5342 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5343 instruction is executed. */
dfe96118 5344 time2 = MAX_RECOG_OPERANDS * 4 + 4;
f5470689 5345 break;
05d10675
BS
5346 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5347 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5348 value. */
cb2afeb3 5349 case RELOAD_FOR_OUTADDR_ADDRESS:
dfe96118
R
5350 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5351 && ignore_address_reloads
eceef4c9 5352 && ! rld[reloadnum].out)
cb2afeb3 5353 continue;
eceef4c9 5354 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
dfe96118 5355 break;
f5470689 5356 case RELOAD_FOR_OUTPUT_ADDRESS:
eceef4c9 5357 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
f5470689
R
5358 break;
5359 case RELOAD_OTHER:
dfe96118
R
5360 /* If there is no conflict in the input part, handle this
5361 like an output reload. */
eceef4c9 5362 if (! rld[i].in || rtx_equal_p (rld[i].in, value))
f5470689 5363 {
dfe96118 5364 time2 = MAX_RECOG_OPERANDS * 4 + 4;
f5470689
R
5365 break;
5366 }
dfe96118
R
5367 time2 = 1;
5368 /* RELOAD_OTHER might be live beyond instruction execution,
5369 but this is not obvious when we set time2 = 1. So check
5370 here if there might be a problem with the new reload
5371 clobbering the register used by the RELOAD_OTHER. */
5372 if (out)
5373 return 0;
5374 break;
f5470689 5375 default:
dfe96118 5376 return 0;
f5470689 5377 }
25963977 5378 if ((time1 >= time2
eceef4c9
BS
5379 && (! rld[i].in || rld[i].out
5380 || ! rtx_equal_p (rld[i].in, value)))
5381 || (out && rld[reloadnum].out_reg
701d55e8 5382 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
f5470689 5383 return 0;
6e684430 5384 }
6e684430
R
5385 }
5386 }
5387 return 1;
5388}
5389
32131a9c
RK
5390/* Find a spill register to use as a reload register for reload R.
5391 LAST_RELOAD is non-zero if this is the last reload for the insn being
5392 processed.
5393
eceef4c9 5394 Set rld[R].reg_rtx to the register allocated.
32131a9c
RK
5395
5396 If NOERROR is nonzero, we return 1 if successful,
5397 or 0 if we couldn't find a spill reg and we didn't change anything. */
5398
5399static int
7609e720
BS
5400allocate_reload_reg (chain, r, last_reload, noerror)
5401 struct insn_chain *chain;
32131a9c 5402 int r;
32131a9c
RK
5403 int last_reload;
5404 int noerror;
5405{
7609e720 5406 rtx insn = chain->insn;
03acd8f8 5407 int i, pass, count, regno;
32131a9c 5408 rtx new;
32131a9c
RK
5409
5410 /* If we put this reload ahead, thinking it is a group,
5411 then insist on finding a group. Otherwise we can grab a
a8fdc208 5412 reg that some other reload needs.
32131a9c
RK
5413 (That can happen when we have a 68000 DATA_OR_FP_REG
5414 which is a group of data regs or one fp reg.)
5415 We need not be so restrictive if there are no more reloads
5416 for this insn.
5417
5418 ??? Really it would be nicer to have smarter handling
5419 for that kind of reg class, where a problem like this is normal.
5420 Perhaps those classes should be avoided for reloading
5421 by use of more alternatives. */
5422
5423 int force_group = reload_nregs[r] > 1 && ! last_reload;
5424
5425 /* If we want a single register and haven't yet found one,
5426 take any reg in the right class and not in use.
5427 If we want a consecutive group, here is where we look for it.
5428
5429 We use two passes so we can first look for reload regs to
5430 reuse, which are already in use for other reloads in this insn,
5431 and only then use additional registers.
5432 I think that maximizing reuse is needed to make sure we don't
5433 run out of reload regs. Suppose we have three reloads, and
5434 reloads A and B can share regs. These need two regs.
5435 Suppose A and B are given different regs.
5436 That leaves none for C. */
5437 for (pass = 0; pass < 2; pass++)
5438 {
5439 /* I is the index in spill_regs.
5440 We advance it round-robin between insns to use all spill regs
5441 equally, so that inherited reloads have a chance
a5339699
RK
5442 of leapfrogging each other. Don't do this, however, when we have
5443 group needs and failure would be fatal; if we only have a relatively
5444 small number of spill registers, and more than one of them has
05d10675 5445 group needs, then by starting in the middle, we may end up
a5339699
RK
5446 allocating the first one in such a way that we are not left with
5447 sufficient groups to handle the rest. */
5448
5449 if (noerror || ! force_group)
5450 i = last_spill_reg;
5451 else
5452 i = -1;
05d10675 5453
a5339699 5454 for (count = 0; count < n_spills; count++)
32131a9c 5455 {
eceef4c9 5456 int class = (int) rld[r].class;
03acd8f8 5457 int regnum;
32131a9c 5458
03acd8f8
BS
5459 i++;
5460 if (i >= n_spills)
5461 i -= n_spills;
5462 regnum = spill_regs[i];
32131a9c 5463
eceef4c9
BS
5464 if ((reload_reg_free_p (regnum, rld[r].opnum,
5465 rld[r].when_needed)
5466 || (rld[r].in
05d10675
BS
5467 /* We check reload_reg_used to make sure we
5468 don't clobber the return register. */
03acd8f8
BS
5469 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5470 && reload_reg_free_for_value_p (regnum,
eceef4c9
BS
5471 rld[r].opnum,
5472 rld[r].when_needed,
5473 rld[r].in,
5474 rld[r].out, r, 1)))
03acd8f8
BS
5475 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5476 && HARD_REGNO_MODE_OK (regnum, reload_mode[r])
be7ae2a4
RK
5477 /* Look first for regs to share, then for unshared. But
5478 don't share regs used for inherited reloads; they are
5479 the ones we want to preserve. */
5480 && (pass
5481 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
03acd8f8 5482 regnum)
be7ae2a4 5483 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
03acd8f8 5484 regnum))))
32131a9c 5485 {
03acd8f8 5486 int nr = HARD_REGNO_NREGS (regnum, reload_mode[r]);
32131a9c
RK
5487 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5488 (on 68000) got us two FP regs. If NR is 1,
5489 we would reject both of them. */
5490 if (force_group)
eceef4c9 5491 nr = CLASS_MAX_NREGS (rld[r].class, reload_mode[r]);
32131a9c
RK
5492 /* If we need only one reg, we have already won. */
5493 if (nr == 1)
5494 {
5495 /* But reject a single reg if we demand a group. */
5496 if (force_group)
5497 continue;
5498 break;
5499 }
5500 /* Otherwise check that as many consecutive regs as we need
5501 are available here.
5502 Also, don't use for a group registers that are
5503 needed for nongroups. */
03acd8f8 5504 if (! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regnum))
32131a9c
RK
5505 while (nr > 1)
5506 {
03acd8f8 5507 regno = regnum + nr - 1;
32131a9c
RK
5508 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5509 && spill_reg_order[regno] >= 0
eceef4c9
BS
5510 && reload_reg_free_p (regno, rld[r].opnum,
5511 rld[r].when_needed)
03acd8f8 5512 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups,
32131a9c
RK
5513 regno)))
5514 break;
5515 nr--;
5516 }
5517 if (nr == 1)
5518 break;
5519 }
5520 }
5521
5522 /* If we found something on pass 1, omit pass 2. */
5523 if (count < n_spills)
5524 break;
5525 }
5526
5527 /* We should have found a spill register by now. */
5528 if (count == n_spills)
5529 {
5530 if (noerror)
5531 return 0;
139fc12e 5532 goto failure;
32131a9c
RK
5533 }
5534
be7ae2a4
RK
5535 /* I is the index in SPILL_REG_RTX of the reload register we are to
5536 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
5537
5538 new = spill_reg_rtx[i];
5539
5540 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4 5541 spill_reg_rtx[i] = new
38a448ca 5542 = gen_rtx_REG (reload_mode[r], spill_regs[i]);
05d10675 5543
32131a9c
RK
5544 regno = true_regnum (new);
5545
5546 /* Detect when the reload reg can't hold the reload mode.
5547 This used to be one `if', but Sequent compiler can't handle that. */
5548 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5549 {
5550 enum machine_mode test_mode = VOIDmode;
eceef4c9
BS
5551 if (rld[r].in)
5552 test_mode = GET_MODE (rld[r].in);
5553 /* If rld[r].in has VOIDmode, it means we will load it
32131a9c
RK
5554 in whatever mode the reload reg has: to wit, reload_mode[r].
5555 We have already tested that for validity. */
5556 /* Aside from that, we need to test that the expressions
5557 to reload from or into have modes which are valid for this
5558 reload register. Otherwise the reload insns would be invalid. */
eceef4c9 5559 if (! (rld[r].in != 0 && test_mode != VOIDmode
32131a9c 5560 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
eceef4c9
BS
5561 if (! (rld[r].out != 0
5562 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
be7ae2a4
RK
5563 {
5564 /* The reg is OK. */
5565 last_spill_reg = i;
5566
5567 /* Mark as in use for this insn the reload regs we use
5568 for this. */
eceef4c9
BS
5569 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5570 rld[r].when_needed, reload_mode[r]);
be7ae2a4 5571
eceef4c9 5572 rld[r].reg_rtx = new;
e6e52be0 5573 reload_spill_index[r] = spill_regs[i];
be7ae2a4
RK
5574 return 1;
5575 }
32131a9c
RK
5576 }
5577
5578 /* The reg is not OK. */
5579 if (noerror)
5580 return 0;
5581
139fc12e 5582 failure:
32131a9c
RK
5583 if (asm_noperands (PATTERN (insn)) < 0)
5584 /* It's the compiler's fault. */
a89b2cc4 5585 fatal_insn ("Could not find a spill register", insn);
32131a9c
RK
5586
5587 /* It's the user's fault; the operand's mode and constraint
5588 don't match. Disable this reload so we don't crash in final. */
5589 error_for_asm (insn,
5590 "`asm' operand constraint incompatible with operand size");
eceef4c9
BS
5591 rld[r].in = 0;
5592 rld[r].out = 0;
5593 rld[r].reg_rtx = 0;
5594 rld[r].optional = 1;
5595 rld[r].secondary_p = 1;
32131a9c
RK
5596
5597 return 1;
5598}
5599\f
5600/* Assign hard reg targets for the pseudo-registers we must reload
5601 into hard regs for this insn.
5602 Also output the instructions to copy them in and out of the hard regs.
5603
5604 For machines with register classes, we are responsible for
5605 finding a reload reg in the proper class. */
5606
5607static void
03acd8f8 5608choose_reload_regs (chain)
7609e720 5609 struct insn_chain *chain;
32131a9c 5610{
7609e720 5611 rtx insn = chain->insn;
32131a9c
RK
5612 register int i, j;
5613 int max_group_size = 1;
5614 enum reg_class group_class = NO_REGS;
5615 int inheritance;
cb2afeb3 5616 int pass;
32131a9c
RK
5617
5618 rtx save_reload_reg_rtx[MAX_RELOADS];
5619 char save_reload_inherited[MAX_RELOADS];
5620 rtx save_reload_inheritance_insn[MAX_RELOADS];
5621 rtx save_reload_override_in[MAX_RELOADS];
5622 int save_reload_spill_index[MAX_RELOADS];
5623 HARD_REG_SET save_reload_reg_used;
546b63fb 5624 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
47c8cf91 5625 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
546b63fb 5626 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
47c8cf91 5627 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
5628 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5629 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 5630 HARD_REG_SET save_reload_reg_used_in_op_addr;
893bc853 5631 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
546b63fb
RK
5632 HARD_REG_SET save_reload_reg_used_in_insn;
5633 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
5634 HARD_REG_SET save_reload_reg_used_at_all;
5635
5636 bzero (reload_inherited, MAX_RELOADS);
4c9a05bc
RK
5637 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5638 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
32131a9c
RK
5639
5640 CLEAR_HARD_REG_SET (reload_reg_used);
5641 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 5642 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
893bc853 5643 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
546b63fb
RK
5644 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5645 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 5646
f1db3576
JL
5647 CLEAR_HARD_REG_SET (reg_used_in_insn);
5648 {
5649 HARD_REG_SET tmp;
5650 REG_SET_TO_HARD_REG_SET (tmp, chain->live_before);
5651 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5652 REG_SET_TO_HARD_REG_SET (tmp, chain->live_after);
5653 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5654 compute_use_by_pseudos (&reg_used_in_insn, chain->live_before);
5655 compute_use_by_pseudos (&reg_used_in_insn, chain->live_after);
5656 }
546b63fb
RK
5657 for (i = 0; i < reload_n_operands; i++)
5658 {
5659 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5660 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5661 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
47c8cf91 5662 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
546b63fb 5663 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
47c8cf91 5664 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
546b63fb 5665 }
32131a9c 5666
03acd8f8 5667 IOR_COMPL_HARD_REG_SET (reload_reg_used, chain->used_spill_regs);
05d10675 5668
32131a9c
RK
5669#if 0 /* Not needed, now that we can always retry without inheritance. */
5670 /* See if we have more mandatory reloads than spill regs.
5671 If so, then we cannot risk optimizations that could prevent
a8fdc208 5672 reloads from sharing one spill register.
32131a9c
RK
5673
5674 Since we will try finding a better register than reload_reg_rtx
5675 unless it is equal to reload_in or reload_out, count such reloads. */
5676
5677 {
03acd8f8 5678 int tem = 0;
32131a9c 5679 for (j = 0; j < n_reloads; j++)
eceef4c9
BS
5680 if (! rld[j].optional
5681 && (rld[j].in != 0 || rld[j].out != 0 || rld[j].secondary_p)
5682 && (rld[j].reg_rtx == 0
5683 || (! rtx_equal_p (rld[j].reg_rtx, rld[j].in)
5684 && ! rtx_equal_p (rld[j].reg_rtx, rld[j].out))))
32131a9c
RK
5685 tem++;
5686 if (tem > n_spills)
5687 must_reuse = 1;
5688 }
5689#endif
5690
32131a9c
RK
5691 /* In order to be certain of getting the registers we need,
5692 we must sort the reloads into order of increasing register class.
5693 Then our grabbing of reload registers will parallel the process
a8fdc208 5694 that provided the reload registers.
32131a9c
RK
5695
5696 Also note whether any of the reloads wants a consecutive group of regs.
5697 If so, record the maximum size of the group desired and what
5698 register class contains all the groups needed by this insn. */
5699
5700 for (j = 0; j < n_reloads; j++)
5701 {
5702 reload_order[j] = j;
5703 reload_spill_index[j] = -1;
5704
eceef4c9
BS
5705 reload_mode[j] = ((rld[j].inmode == VOIDmode
5706 || (GET_MODE_SIZE (rld[j].outmode)
5707 > GET_MODE_SIZE (rld[j].inmode)))
5708 ? rld[j].outmode : rld[j].inmode);
32131a9c 5709
eceef4c9 5710 reload_nregs[j] = CLASS_MAX_NREGS (rld[j].class, reload_mode[j]);
32131a9c
RK
5711
5712 if (reload_nregs[j] > 1)
5713 {
5714 max_group_size = MAX (reload_nregs[j], max_group_size);
eceef4c9 5715 group_class = reg_class_superunion[(int)rld[j].class][(int)group_class];
32131a9c
RK
5716 }
5717
eceef4c9 5718 save_reload_reg_rtx[j] = rld[j].reg_rtx;
32131a9c
RK
5719 /* If we have already decided to use a certain register,
5720 don't use it in another way. */
eceef4c9
BS
5721 if (rld[j].reg_rtx)
5722 mark_reload_reg_in_use (REGNO (rld[j].reg_rtx), rld[j].opnum,
5723 rld[j].when_needed, reload_mode[j]);
32131a9c
RK
5724 }
5725
5726 if (n_reloads > 1)
5727 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5728
32131a9c 5729 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4c9a05bc
RK
5730 bcopy ((char *) reload_inheritance_insn,
5731 (char *) save_reload_inheritance_insn,
32131a9c 5732 sizeof reload_inheritance_insn);
4c9a05bc 5733 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
32131a9c 5734 sizeof reload_override_in);
4c9a05bc 5735 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
32131a9c
RK
5736 sizeof reload_spill_index);
5737 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5738 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
5739 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5740 reload_reg_used_in_op_addr);
893bc853
RK
5741
5742 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5743 reload_reg_used_in_op_addr_reload);
5744
546b63fb
RK
5745 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5746 reload_reg_used_in_insn);
5747 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5748 reload_reg_used_in_other_addr);
5749
5750 for (i = 0; i < reload_n_operands; i++)
5751 {
5752 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5753 reload_reg_used_in_output[i]);
5754 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5755 reload_reg_used_in_input[i]);
5756 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5757 reload_reg_used_in_input_addr[i]);
47c8cf91
ILT
5758 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5759 reload_reg_used_in_inpaddr_addr[i]);
546b63fb
RK
5760 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5761 reload_reg_used_in_output_addr[i]);
47c8cf91
ILT
5762 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5763 reload_reg_used_in_outaddr_addr[i]);
546b63fb 5764 }
32131a9c 5765
58b1581b
RS
5766 /* If -O, try first with inheritance, then turning it off.
5767 If not -O, don't do inheritance.
5768 Using inheritance when not optimizing leads to paradoxes
5769 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5770 because one side of the comparison might be inherited. */
32131a9c 5771
58b1581b 5772 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
5773 {
5774 /* Process the reloads in order of preference just found.
5775 Beyond this point, subregs can be found in reload_reg_rtx.
5776
5777 This used to look for an existing reloaded home for all
5778 of the reloads, and only then perform any new reloads.
5779 But that could lose if the reloads were done out of reg-class order
5780 because a later reload with a looser constraint might have an old
5781 home in a register needed by an earlier reload with a tighter constraint.
5782
5783 To solve this, we make two passes over the reloads, in the order
5784 described above. In the first pass we try to inherit a reload
5785 from a previous insn. If there is a later reload that needs a
5786 class that is a proper subset of the class being processed, we must
5787 also allocate a spill register during the first pass.
5788
5789 Then make a second pass over the reloads to allocate any reloads
5790 that haven't been given registers yet. */
5791
be7ae2a4
RK
5792 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5793
32131a9c
RK
5794 for (j = 0; j < n_reloads; j++)
5795 {
5796 register int r = reload_order[j];
8593b745 5797 rtx search_equiv = NULL_RTX;
32131a9c
RK
5798
5799 /* Ignore reloads that got marked inoperative. */
eceef4c9
BS
5800 if (rld[r].out == 0 && rld[r].in == 0
5801 && ! rld[r].secondary_p)
32131a9c
RK
5802 continue;
5803
b29514ee 5804 /* If find_reloads chose to use reload_in or reload_out as a reload
b080c137
RK
5805 register, we don't need to chose one. Otherwise, try even if it
5806 found one since we might save an insn if we find the value lying
b29514ee
R
5807 around.
5808 Try also when reload_in is a pseudo without a hard reg. */
eceef4c9
BS
5809 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5810 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5811 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5812 && GET_CODE (rld[r].in) != MEM
5813 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
32131a9c
RK
5814 continue;
5815
5816#if 0 /* No longer needed for correct operation.
5817 It might give better code, or might not; worth an experiment? */
5818 /* If this is an optional reload, we can't inherit from earlier insns
5819 until we are sure that any non-optional reloads have been allocated.
5820 The following code takes advantage of the fact that optional reloads
5821 are at the end of reload_order. */
eceef4c9 5822 if (rld[r].optional != 0)
32131a9c 5823 for (i = 0; i < j; i++)
eceef4c9
BS
5824 if ((rld[reload_order[i]].out != 0
5825 || rld[reload_order[i]].in != 0
5826 || rld[reload_order[i]].secondary_p)
5827 && ! rld[reload_order[i]].optional
5828 && rld[reload_order[i]].reg_rtx == 0)
7609e720 5829 allocate_reload_reg (chain, reload_order[i], 0, inheritance);
32131a9c
RK
5830#endif
5831
5832 /* First see if this pseudo is already available as reloaded
5833 for a previous insn. We cannot try to inherit for reloads
5834 that are smaller than the maximum number of registers needed
5835 for groups unless the register we would allocate cannot be used
5836 for the groups.
5837
5838 We could check here to see if this is a secondary reload for
5839 an object that is already in a register of the desired class.
5840 This would avoid the need for the secondary reload register.
5841 But this is complex because we can't easily determine what
b080c137
RK
5842 objects might want to be loaded via this reload. So let a
5843 register be allocated here. In `emit_reload_insns' we suppress
5844 one of the loads in the case described above. */
32131a9c
RK
5845
5846 if (inheritance)
5847 {
cb2afeb3 5848 int word = 0;
32131a9c 5849 register int regno = -1;
6a651371 5850 enum machine_mode mode = VOIDmode;
32131a9c 5851
eceef4c9 5852 if (rld[r].in == 0)
32131a9c 5853 ;
eceef4c9 5854 else if (GET_CODE (rld[r].in) == REG)
db660765 5855 {
eceef4c9
BS
5856 regno = REGNO (rld[r].in);
5857 mode = GET_MODE (rld[r].in);
db660765 5858 }
eceef4c9 5859 else if (GET_CODE (rld[r].in_reg) == REG)
db660765 5860 {
eceef4c9
BS
5861 regno = REGNO (rld[r].in_reg);
5862 mode = GET_MODE (rld[r].in_reg);
db660765 5863 }
eceef4c9
BS
5864 else if (GET_CODE (rld[r].in_reg) == SUBREG
5865 && GET_CODE (SUBREG_REG (rld[r].in_reg)) == REG)
b60a8416 5866 {
eceef4c9
BS
5867 word = SUBREG_WORD (rld[r].in_reg);
5868 regno = REGNO (SUBREG_REG (rld[r].in_reg));
cb2afeb3
R
5869 if (regno < FIRST_PSEUDO_REGISTER)
5870 regno += word;
eceef4c9 5871 mode = GET_MODE (rld[r].in_reg);
cb2afeb3
R
5872 }
5873#ifdef AUTO_INC_DEC
eceef4c9
BS
5874 else if ((GET_CODE (rld[r].in_reg) == PRE_INC
5875 || GET_CODE (rld[r].in_reg) == PRE_DEC
5876 || GET_CODE (rld[r].in_reg) == POST_INC
5877 || GET_CODE (rld[r].in_reg) == POST_DEC)
5878 && GET_CODE (XEXP (rld[r].in_reg, 0)) == REG)
cb2afeb3 5879 {
eceef4c9
BS
5880 regno = REGNO (XEXP (rld[r].in_reg, 0));
5881 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5882 rld[r].out = rld[r].in;
b60a8416 5883 }
cb2afeb3 5884#endif
32131a9c
RK
5885#if 0
5886 /* This won't work, since REGNO can be a pseudo reg number.
5887 Also, it takes much more hair to keep track of all the things
5888 that can invalidate an inherited reload of part of a pseudoreg. */
eceef4c9
BS
5889 else if (GET_CODE (rld[r].in) == SUBREG
5890 && GET_CODE (SUBREG_REG (rld[r].in)) == REG)
5891 regno = REGNO (SUBREG_REG (rld[r].in)) + SUBREG_WORD (rld[r].in);
32131a9c
RK
5892#endif
5893
5894 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5895 {
eceef4c9 5896 enum reg_class class = rld[r].class, last_class;
cb2afeb3 5897 rtx last_reg = reg_last_reload_reg[regno];
05d10675 5898
cb2afeb3
R
5899 i = REGNO (last_reg) + word;
5900 last_class = REGNO_REG_CLASS (i);
5901 if ((GET_MODE_SIZE (GET_MODE (last_reg))
5902 >= GET_MODE_SIZE (mode) + word * UNITS_PER_WORD)
5903 && reg_reloaded_contents[i] == regno
e6e52be0 5904 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
e6e52be0 5905 && HARD_REGNO_MODE_OK (i, reload_mode[r])
cb2afeb3
R
5906 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5907 /* Even if we can't use this register as a reload
5908 register, we might use it for reload_override_in,
5909 if copying it to the desired class is cheap
5910 enough. */
5911 || ((REGISTER_MOVE_COST (last_class, class)
5912 < MEMORY_MOVE_COST (mode, class, 1))
5913#ifdef SECONDARY_INPUT_RELOAD_CLASS
5914 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5915 last_reg)
5916 == NO_REGS)
5917#endif
5918#ifdef SECONDARY_MEMORY_NEEDED
5919 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5920 mode)
5921#endif
5922 ))
5923
32131a9c
RK
5924 && (reload_nregs[r] == max_group_size
5925 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
e6e52be0 5926 i))
eceef4c9
BS
5927 && reload_reg_free_for_value_p (i, rld[r].opnum,
5928 rld[r].when_needed,
5929 rld[r].in,
dfe96118 5930 const0_rtx, r, 1))
32131a9c
RK
5931 {
5932 /* If a group is needed, verify that all the subsequent
0f41302f 5933 registers still have their values intact. */
32131a9c 5934 int nr
e6e52be0 5935 = HARD_REGNO_NREGS (i, reload_mode[r]);
32131a9c
RK
5936 int k;
5937
5938 for (k = 1; k < nr; k++)
e6e52be0
R
5939 if (reg_reloaded_contents[i + k] != regno
5940 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
32131a9c
RK
5941 break;
5942
5943 if (k == nr)
5944 {
c74fa651
RS
5945 int i1;
5946
cb2afeb3
R
5947 last_reg = (GET_MODE (last_reg) == mode
5948 ? last_reg : gen_rtx_REG (mode, i));
5949
c74fa651
RS
5950 /* We found a register that contains the
5951 value we need. If this register is the
5952 same as an `earlyclobber' operand of the
5953 current insn, just mark it as a place to
5954 reload from since we can't use it as the
5955 reload register itself. */
5956
5957 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5958 if (reg_overlap_mentioned_for_reload_p
5959 (reg_last_reload_reg[regno],
5960 reload_earlyclobbers[i1]))
5961 break;
5962
8908158d 5963 if (i1 != n_earlyclobbers
dfe96118 5964 || ! (reload_reg_free_for_value_p
eceef4c9
BS
5965 (i, rld[r].opnum, rld[r].when_needed,
5966 rld[r].in, rld[r].out, r, 1))
e6e52be0 5967 /* Don't use it if we'd clobber a pseudo reg. */
f1db3576 5968 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
eceef4c9 5969 && rld[r].out
e6e52be0 5970 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
0c7f2259
R
5971 /* Don't clobber the frame pointer. */
5972 || (i == HARD_FRAME_POINTER_REGNUM
eceef4c9 5973 && rld[r].out)
8908158d
RS
5974 /* Don't really use the inherited spill reg
5975 if we need it wider than we've got it. */
5976 || (GET_MODE_SIZE (reload_mode[r])
b29514ee 5977 > GET_MODE_SIZE (mode))
eceef4c9 5978 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
cb2afeb3
R
5979 i)
5980
b29514ee
R
5981 /* If find_reloads chose reload_out as reload
5982 register, stay with it - that leaves the
5983 inherited register for subsequent reloads. */
eceef4c9
BS
5984 || (rld[r].out && rld[r].reg_rtx
5985 && rtx_equal_p (rld[r].out,
5986 rld[r].reg_rtx)))
cb2afeb3
R
5987 {
5988 reload_override_in[r] = last_reg;
5989 reload_inheritance_insn[r]
5990 = reg_reloaded_insn[i];
5991 }
c74fa651
RS
5992 else
5993 {
54c40e68 5994 int k;
c74fa651
RS
5995 /* We can use this as a reload reg. */
5996 /* Mark the register as in use for this part of
5997 the insn. */
e6e52be0 5998 mark_reload_reg_in_use (i,
eceef4c9
BS
5999 rld[r].opnum,
6000 rld[r].when_needed,
c74fa651 6001 reload_mode[r]);
eceef4c9 6002 rld[r].reg_rtx = last_reg;
c74fa651
RS
6003 reload_inherited[r] = 1;
6004 reload_inheritance_insn[r]
6005 = reg_reloaded_insn[i];
6006 reload_spill_index[r] = i;
54c40e68
RS
6007 for (k = 0; k < nr; k++)
6008 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
e6e52be0 6009 i + k);
c74fa651 6010 }
32131a9c
RK
6011 }
6012 }
6013 }
6014 }
6015
6016 /* Here's another way to see if the value is already lying around. */
6017 if (inheritance
eceef4c9 6018 && rld[r].in != 0
32131a9c 6019 && ! reload_inherited[r]
eceef4c9
BS
6020 && rld[r].out == 0
6021 && (CONSTANT_P (rld[r].in)
6022 || GET_CODE (rld[r].in) == PLUS
6023 || GET_CODE (rld[r].in) == REG
6024 || GET_CODE (rld[r].in) == MEM)
32131a9c 6025 && (reload_nregs[r] == max_group_size
eceef4c9
BS
6026 || ! reg_classes_intersect_p (rld[r].class, group_class)))
6027 search_equiv = rld[r].in;
8593b745
R
6028 /* If this is an output reload from a simple move insn, look
6029 if an equivalence for the input is available. */
eceef4c9 6030 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
8593b745
R
6031 {
6032 rtx set = single_set (insn);
6033
6034 if (set
eceef4c9 6035 && rtx_equal_p (rld[r].out, SET_DEST (set))
8593b745
R
6036 && CONSTANT_P (SET_SRC (set)))
6037 search_equiv = SET_SRC (set);
6038 }
6039
6040 if (search_equiv)
32131a9c
RK
6041 {
6042 register rtx equiv
eceef4c9 6043 = find_equiv_reg (search_equiv, insn, rld[r].class,
fb3821f7 6044 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
6045 int regno;
6046
6047 if (equiv != 0)
6048 {
6049 if (GET_CODE (equiv) == REG)
6050 regno = REGNO (equiv);
6051 else if (GET_CODE (equiv) == SUBREG)
6052 {
f8a9e02b
RK
6053 /* This must be a SUBREG of a hard register.
6054 Make a new REG since this might be used in an
6055 address and not all machines support SUBREGs
6056 there. */
6057 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
38a448ca 6058 equiv = gen_rtx_REG (reload_mode[r], regno);
32131a9c
RK
6059 }
6060 else
6061 abort ();
6062 }
6063
6064 /* If we found a spill reg, reject it unless it is free
6065 and of the desired class. */
6066 if (equiv != 0
cb2afeb3 6067 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
eceef4c9
BS
6068 && ! reload_reg_free_for_value_p (regno, rld[r].opnum,
6069 rld[r].when_needed,
6070 rld[r].in,
6071 rld[r].out, r, 1))
6072 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
32131a9c
RK
6073 regno)))
6074 equiv = 0;
6075
32131a9c
RK
6076 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
6077 equiv = 0;
6078
6079 /* We found a register that contains the value we need.
6080 If this register is the same as an `earlyclobber' operand
6081 of the current insn, just mark it as a place to reload from
6082 since we can't use it as the reload register itself. */
6083
6084 if (equiv != 0)
6085 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
6086 if (reg_overlap_mentioned_for_reload_p (equiv,
6087 reload_earlyclobbers[i]))
32131a9c
RK
6088 {
6089 reload_override_in[r] = equiv;
6090 equiv = 0;
6091 break;
6092 }
6093
3c785e47
R
6094 /* If the equiv register we have found is explicitly clobbered
6095 in the current insn, it depends on the reload type if we
6096 can use it, use it for reload_override_in, or not at all.
6097 In particular, we then can't use EQUIV for a
6098 RELOAD_FOR_OUTPUT_ADDRESS reload. */
32131a9c
RK
6099
6100 if (equiv != 0 && regno_clobbered_p (regno, insn))
6101 {
eceef4c9 6102 switch (rld[r].when_needed)
3c785e47
R
6103 {
6104 case RELOAD_FOR_OTHER_ADDRESS:
6105 case RELOAD_FOR_INPADDR_ADDRESS:
6106 case RELOAD_FOR_INPUT_ADDRESS:
6107 case RELOAD_FOR_OPADDR_ADDR:
6108 break;
6109 case RELOAD_OTHER:
6110 case RELOAD_FOR_INPUT:
6111 case RELOAD_FOR_OPERAND_ADDRESS:
6112 reload_override_in[r] = equiv;
6113 /* Fall through. */
6114 default:
6115 equiv = 0;
6116 break;
6117 }
32131a9c
RK
6118 }
6119
6120 /* If we found an equivalent reg, say no code need be generated
6121 to load it, and use it as our reload reg. */
3ec2ea3e 6122 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c 6123 {
100338df
JL
6124 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
6125 int k;
eceef4c9 6126 rld[r].reg_rtx = equiv;
32131a9c 6127 reload_inherited[r] = 1;
100338df 6128
91d7e7ac
R
6129 /* If reg_reloaded_valid is not set for this register,
6130 there might be a stale spill_reg_store lying around.
6131 We must clear it, since otherwise emit_reload_insns
6132 might delete the store. */
6133 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6134 spill_reg_store[regno] = NULL_RTX;
100338df
JL
6135 /* If any of the hard registers in EQUIV are spill
6136 registers, mark them as in use for this insn. */
6137 for (k = 0; k < nr; k++)
be7ae2a4 6138 {
100338df
JL
6139 i = spill_reg_order[regno + k];
6140 if (i >= 0)
6141 {
eceef4c9
BS
6142 mark_reload_reg_in_use (regno, rld[r].opnum,
6143 rld[r].when_needed,
100338df
JL
6144 reload_mode[r]);
6145 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6146 regno + k);
6147 }
be7ae2a4 6148 }
32131a9c
RK
6149 }
6150 }
6151
6152 /* If we found a register to use already, or if this is an optional
6153 reload, we are done. */
eceef4c9 6154 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
32131a9c
RK
6155 continue;
6156
6157#if 0 /* No longer needed for correct operation. Might or might not
6158 give better code on the average. Want to experiment? */
6159
6160 /* See if there is a later reload that has a class different from our
6161 class that intersects our class or that requires less register
6162 than our reload. If so, we must allocate a register to this
6163 reload now, since that reload might inherit a previous reload
6164 and take the only available register in our class. Don't do this
6165 for optional reloads since they will force all previous reloads
6166 to be allocated. Also don't do this for reloads that have been
6167 turned off. */
6168
6169 for (i = j + 1; i < n_reloads; i++)
6170 {
6171 int s = reload_order[i];
6172
eceef4c9
BS
6173 if ((rld[s].in == 0 && rld[s].out == 0
6174 && ! rld[s].secondary_p)
6175 || rld[s].optional)
32131a9c
RK
6176 continue;
6177
eceef4c9
BS
6178 if ((rld[s].class != rld[r].class
6179 && reg_classes_intersect_p (rld[r].class,
6180 rld[s].class))
32131a9c 6181 || reload_nregs[s] < reload_nregs[r])
05d10675 6182 break;
32131a9c
RK
6183 }
6184
6185 if (i == n_reloads)
6186 continue;
6187
7609e720 6188 allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance);
32131a9c
RK
6189#endif
6190 }
6191
6192 /* Now allocate reload registers for anything non-optional that
6193 didn't get one yet. */
6194 for (j = 0; j < n_reloads; j++)
6195 {
6196 register int r = reload_order[j];
6197
6198 /* Ignore reloads that got marked inoperative. */
eceef4c9 6199 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
32131a9c
RK
6200 continue;
6201
6202 /* Skip reloads that already have a register allocated or are
0f41302f 6203 optional. */
eceef4c9 6204 if (rld[r].reg_rtx != 0 || rld[r].optional)
32131a9c
RK
6205 continue;
6206
7609e720 6207 if (! allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance))
32131a9c
RK
6208 break;
6209 }
6210
6211 /* If that loop got all the way, we have won. */
6212 if (j == n_reloads)
6213 break;
6214
32131a9c
RK
6215 /* Loop around and try without any inheritance. */
6216 /* First undo everything done by the failed attempt
6217 to allocate with inheritance. */
eceef4c9
BS
6218 for (i = 0; i < n_reloads; i++)
6219 rld[i].reg_rtx = save_reload_reg_rtx[i];
4c9a05bc
RK
6220 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
6221 sizeof reload_inherited);
6222 bcopy ((char *) save_reload_inheritance_insn,
6223 (char *) reload_inheritance_insn,
32131a9c 6224 sizeof reload_inheritance_insn);
4c9a05bc 6225 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
32131a9c 6226 sizeof reload_override_in);
4c9a05bc 6227 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
32131a9c
RK
6228 sizeof reload_spill_index);
6229 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
6230 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
6231 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
6232 save_reload_reg_used_in_op_addr);
893bc853
RK
6233 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
6234 save_reload_reg_used_in_op_addr_reload);
546b63fb
RK
6235 COPY_HARD_REG_SET (reload_reg_used_in_insn,
6236 save_reload_reg_used_in_insn);
6237 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
6238 save_reload_reg_used_in_other_addr);
6239
6240 for (i = 0; i < reload_n_operands; i++)
6241 {
6242 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
6243 save_reload_reg_used_in_input[i]);
6244 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
6245 save_reload_reg_used_in_output[i]);
6246 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
6247 save_reload_reg_used_in_input_addr[i]);
47c8cf91
ILT
6248 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
6249 save_reload_reg_used_in_inpaddr_addr[i]);
546b63fb
RK
6250 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
6251 save_reload_reg_used_in_output_addr[i]);
47c8cf91
ILT
6252 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
6253 save_reload_reg_used_in_outaddr_addr[i]);
546b63fb 6254 }
32131a9c
RK
6255 }
6256
6257 /* If we thought we could inherit a reload, because it seemed that
6258 nothing else wanted the same reload register earlier in the insn,
cb2afeb3
R
6259 verify that assumption, now that all reloads have been assigned.
6260 Likewise for reloads where reload_override_in has been set. */
32131a9c 6261
cb2afeb3
R
6262 /* If doing expensive optimizations, do one preliminary pass that doesn't
6263 cancel any inheritance, but removes reloads that have been needed only
6264 for reloads that we know can be inherited. */
6265 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
32131a9c 6266 {
cb2afeb3 6267 for (j = 0; j < n_reloads; j++)
029b38ff 6268 {
cb2afeb3
R
6269 register int r = reload_order[j];
6270 rtx check_reg;
eceef4c9
BS
6271 if (reload_inherited[r] && rld[r].reg_rtx)
6272 check_reg = rld[r].reg_rtx;
cb2afeb3
R
6273 else if (reload_override_in[r]
6274 && (GET_CODE (reload_override_in[r]) == REG
05d10675 6275 || GET_CODE (reload_override_in[r]) == SUBREG))
cb2afeb3
R
6276 check_reg = reload_override_in[r];
6277 else
6278 continue;
dfe96118 6279 if (! reload_reg_free_for_value_p (true_regnum (check_reg),
eceef4c9
BS
6280 rld[r].opnum,
6281 rld[r].when_needed,
6282 rld[r].in,
05d10675 6283 (reload_inherited[r]
eceef4c9 6284 ? rld[r].out : const0_rtx),
dfe96118 6285 r, 1))
029b38ff 6286 {
cb2afeb3
R
6287 if (pass)
6288 continue;
6289 reload_inherited[r] = 0;
6290 reload_override_in[r] = 0;
029b38ff 6291 }
cb2afeb3
R
6292 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6293 reload_override_in, then we do not need its related
6294 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6295 likewise for other reload types.
6296 We handle this by removing a reload when its only replacement
6297 is mentioned in reload_in of the reload we are going to inherit.
6298 A special case are auto_inc expressions; even if the input is
6299 inherited, we still need the address for the output. We can
fe92fe26 6300 recognize them because they have RELOAD_OUT set to RELOAD_IN.
cb2afeb3
R
6301 If we suceeded removing some reload and we are doing a preliminary
6302 pass just to remove such reloads, make another pass, since the
6303 removal of one reload might allow us to inherit another one. */
eceef4c9
BS
6304 else if (rld[r].in
6305 && rld[r].out != rld[r].in
6306 && remove_address_replacements (rld[r].in) && pass)
cb2afeb3 6307 pass = 2;
32131a9c
RK
6308 }
6309 }
6310
6311 /* Now that reload_override_in is known valid,
6312 actually override reload_in. */
6313 for (j = 0; j < n_reloads; j++)
6314 if (reload_override_in[j])
eceef4c9 6315 rld[j].in = reload_override_in[j];
32131a9c
RK
6316
6317 /* If this reload won't be done because it has been cancelled or is
6318 optional and not inherited, clear reload_reg_rtx so other
6319 routines (such as subst_reloads) don't get confused. */
6320 for (j = 0; j < n_reloads; j++)
eceef4c9
BS
6321 if (rld[j].reg_rtx != 0
6322 && ((rld[j].optional && ! reload_inherited[j])
6323 || (rld[j].in == 0 && rld[j].out == 0
6324 && ! rld[j].secondary_p)))
be7ae2a4 6325 {
eceef4c9 6326 int regno = true_regnum (rld[j].reg_rtx);
be7ae2a4
RK
6327
6328 if (spill_reg_order[regno] >= 0)
eceef4c9
BS
6329 clear_reload_reg_in_use (regno, rld[j].opnum,
6330 rld[j].when_needed, reload_mode[j]);
6331 rld[j].reg_rtx = 0;
be7ae2a4 6332 }
32131a9c
RK
6333
6334 /* Record which pseudos and which spill regs have output reloads. */
6335 for (j = 0; j < n_reloads; j++)
6336 {
6337 register int r = reload_order[j];
6338
6339 i = reload_spill_index[r];
6340
e6e52be0 6341 /* I is nonneg if this reload uses a register.
eceef4c9 6342 If rld[r].reg_rtx is 0, this is an optional reload
32131a9c 6343 that we opted to ignore. */
eceef4c9
BS
6344 if (rld[r].out_reg != 0 && GET_CODE (rld[r].out_reg) == REG
6345 && rld[r].reg_rtx != 0)
32131a9c 6346 {
eceef4c9 6347 register int nregno = REGNO (rld[r].out_reg);
372e033b
RS
6348 int nr = 1;
6349
6350 if (nregno < FIRST_PSEUDO_REGISTER)
6351 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
6352
6353 while (--nr >= 0)
372e033b
RS
6354 reg_has_output_reload[nregno + nr] = 1;
6355
6356 if (i >= 0)
32131a9c 6357 {
e6e52be0 6358 nr = HARD_REGNO_NREGS (i, reload_mode[r]);
372e033b 6359 while (--nr >= 0)
e6e52be0 6360 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
32131a9c
RK
6361 }
6362
eceef4c9
BS
6363 if (rld[r].when_needed != RELOAD_OTHER
6364 && rld[r].when_needed != RELOAD_FOR_OUTPUT
6365 && rld[r].when_needed != RELOAD_FOR_INSN)
32131a9c
RK
6366 abort ();
6367 }
6368 }
6369}
cb2afeb3
R
6370
6371/* Deallocate the reload register for reload R. This is called from
6372 remove_address_replacements. */
6373void
6374deallocate_reload_reg (r)
6375 int r;
6376{
6377 int regno;
6378
eceef4c9 6379 if (! rld[r].reg_rtx)
cb2afeb3 6380 return;
eceef4c9
BS
6381 regno = true_regnum (rld[r].reg_rtx);
6382 rld[r].reg_rtx = 0;
cb2afeb3 6383 if (spill_reg_order[regno] >= 0)
eceef4c9 6384 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
cb2afeb3
R
6385 reload_mode[r]);
6386 reload_spill_index[r] = -1;
6387}
32131a9c 6388\f
e9a25f70 6389/* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
546b63fb
RK
6390 reloads of the same item for fear that we might not have enough reload
6391 registers. However, normally they will get the same reload register
05d10675 6392 and hence actually need not be loaded twice.
546b63fb
RK
6393
6394 Here we check for the most common case of this phenomenon: when we have
6395 a number of reloads for the same object, each of which were allocated
6396 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6397 reload, and is not modified in the insn itself. If we find such,
6398 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6399 This will not increase the number of spill registers needed and will
6400 prevent redundant code. */
6401
546b63fb
RK
6402static void
6403merge_assigned_reloads (insn)
6404 rtx insn;
6405{
6406 int i, j;
6407
6408 /* Scan all the reloads looking for ones that only load values and
6409 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6410 assigned and not modified by INSN. */
6411
6412 for (i = 0; i < n_reloads; i++)
6413 {
d668e863
R
6414 int conflicting_input = 0;
6415 int max_input_address_opnum = -1;
6416 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6417
eceef4c9
BS
6418 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6419 || rld[i].out != 0 || rld[i].reg_rtx == 0
6420 || reg_set_p (rld[i].reg_rtx, insn))
546b63fb
RK
6421 continue;
6422
6423 /* Look at all other reloads. Ensure that the only use of this
6424 reload_reg_rtx is in a reload that just loads the same value
6425 as we do. Note that any secondary reloads must be of the identical
6426 class since the values, modes, and result registers are the
6427 same, so we need not do anything with any secondary reloads. */
6428
6429 for (j = 0; j < n_reloads; j++)
6430 {
eceef4c9
BS
6431 if (i == j || rld[j].reg_rtx == 0
6432 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6433 rld[i].reg_rtx))
546b63fb
RK
6434 continue;
6435
eceef4c9
BS
6436 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6437 && rld[j].opnum > max_input_address_opnum)
6438 max_input_address_opnum = rld[j].opnum;
d668e863 6439
546b63fb 6440 /* If the reload regs aren't exactly the same (e.g, different modes)
d668e863
R
6441 or if the values are different, we can't merge this reload.
6442 But if it is an input reload, we might still merge
6443 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
546b63fb 6444
eceef4c9
BS
6445 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6446 || rld[j].out != 0 || rld[j].in == 0
6447 || ! rtx_equal_p (rld[i].in, rld[j].in))
d668e863 6448 {
eceef4c9
BS
6449 if (rld[j].when_needed != RELOAD_FOR_INPUT
6450 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6451 || rld[i].opnum > rld[j].opnum)
6452 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
d668e863
R
6453 break;
6454 conflicting_input = 1;
eceef4c9
BS
6455 if (min_conflicting_input_opnum > rld[j].opnum)
6456 min_conflicting_input_opnum = rld[j].opnum;
d668e863 6457 }
546b63fb
RK
6458 }
6459
6460 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6461 we, in fact, found any matching reloads. */
6462
d668e863
R
6463 if (j == n_reloads
6464 && max_input_address_opnum <= min_conflicting_input_opnum)
546b63fb
RK
6465 {
6466 for (j = 0; j < n_reloads; j++)
eceef4c9
BS
6467 if (i != j && rld[j].reg_rtx != 0
6468 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
d668e863 6469 && (! conflicting_input
eceef4c9
BS
6470 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6471 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
546b63fb 6472 {
eceef4c9
BS
6473 rld[i].when_needed = RELOAD_OTHER;
6474 rld[j].in = 0;
efdb3590 6475 reload_spill_index[j] = -1;
546b63fb
RK
6476 transfer_replacements (i, j);
6477 }
6478
6479 /* If this is now RELOAD_OTHER, look for any reloads that load
6480 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6481 if they were for inputs, RELOAD_OTHER for outputs. Note that
6482 this test is equivalent to looking for reloads for this operand
6483 number. */
6484
eceef4c9 6485 if (rld[i].when_needed == RELOAD_OTHER)
546b63fb 6486 for (j = 0; j < n_reloads; j++)
eceef4c9
BS
6487 if (rld[j].in != 0
6488 && rld[i].when_needed != RELOAD_OTHER
6489 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6490 rld[i].in))
6491 rld[j].when_needed
6492 = ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
6493 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
47c8cf91 6494 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
546b63fb
RK
6495 }
6496 }
05d10675 6497}
e9a25f70 6498
546b63fb 6499\f
32131a9c
RK
6500/* Output insns to reload values in and out of the chosen reload regs. */
6501
6502static void
7609e720
BS
6503emit_reload_insns (chain)
6504 struct insn_chain *chain;
32131a9c 6505{
7609e720
BS
6506 rtx insn = chain->insn;
6507
32131a9c 6508 register int j;
546b63fb
RK
6509 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6510 rtx other_input_address_reload_insns = 0;
6511 rtx other_input_reload_insns = 0;
6512 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
47c8cf91 6513 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
546b63fb
RK
6514 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6515 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
47c8cf91 6516 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
546b63fb 6517 rtx operand_reload_insns = 0;
893bc853 6518 rtx other_operand_reload_insns = 0;
befa01b9 6519 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
32131a9c 6520 rtx following_insn = NEXT_INSN (insn);
c93b03c2 6521 rtx before_insn = PREV_INSN (insn);
32131a9c
RK
6522 int special;
6523 /* Values to be put in spill_reg_store are put here first. */
6524 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
e6e52be0
R
6525 HARD_REG_SET reg_reloaded_died;
6526
6527 CLEAR_HARD_REG_SET (reg_reloaded_died);
32131a9c 6528
546b63fb
RK
6529 for (j = 0; j < reload_n_operands; j++)
6530 input_reload_insns[j] = input_address_reload_insns[j]
47c8cf91 6531 = inpaddr_address_reload_insns[j]
befa01b9 6532 = output_reload_insns[j] = output_address_reload_insns[j]
47c8cf91 6533 = outaddr_address_reload_insns[j]
befa01b9 6534 = other_output_reload_insns[j] = 0;
546b63fb 6535
32131a9c
RK
6536 /* Now output the instructions to copy the data into and out of the
6537 reload registers. Do these in the order that the reloads were reported,
6538 since reloads of base and index registers precede reloads of operands
6539 and the operands may need the base and index registers reloaded. */
6540
6541 for (j = 0; j < n_reloads; j++)
6542 {
6543 register rtx old;
6544 rtx oldequiv_reg = 0;
80d92002 6545 rtx this_reload_insn = 0;
b60a8416 6546 int expect_occurrences = 1;
73b2ad9e 6547
eceef4c9
BS
6548 if (rld[j].reg_rtx
6549 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
6550 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
32131a9c 6551
eceef4c9
BS
6552 old = (rld[j].in && GET_CODE (rld[j].in) == MEM
6553 ? rld[j].in_reg : rld[j].in);
cb2afeb3
R
6554
6555 if (old != 0
6556 /* AUTO_INC reloads need to be handled even if inherited. We got an
6557 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
eceef4c9
BS
6558 && (! reload_inherited[j] || (rld[j].out && ! rld[j].out_reg))
6559 && ! rtx_equal_p (rld[j].reg_rtx, old)
6560 && rld[j].reg_rtx != 0)
32131a9c 6561 {
eceef4c9 6562 register rtx reloadreg = rld[j].reg_rtx;
32131a9c
RK
6563 rtx oldequiv = 0;
6564 enum machine_mode mode;
546b63fb 6565 rtx *where;
32131a9c
RK
6566
6567 /* Determine the mode to reload in.
6568 This is very tricky because we have three to choose from.
eceef4c9 6569 There is the mode the insn operand wants (rld[J].inmode).
32131a9c
RK
6570 There is the mode of the reload register RELOADREG.
6571 There is the intrinsic mode of the operand, which we could find
6572 by stripping some SUBREGs.
6573 It turns out that RELOADREG's mode is irrelevant:
6574 we can change that arbitrarily.
6575
6576 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6577 then the reload reg may not support QImode moves, so use SImode.
6578 If foo is in memory due to spilling a pseudo reg, this is safe,
6579 because the QImode value is in the least significant part of a
6580 slot big enough for a SImode. If foo is some other sort of
6581 memory reference, then it is impossible to reload this case,
6582 so previous passes had better make sure this never happens.
6583
6584 Then consider a one-word union which has SImode and one of its
6585 members is a float, being fetched as (SUBREG:SF union:SI).
6586 We must fetch that as SFmode because we could be loading into
6587 a float-only register. In this case OLD's mode is correct.
6588
6589 Consider an immediate integer: it has VOIDmode. Here we need
6590 to get a mode from something else.
6591
6592 In some cases, there is a fourth mode, the operand's
6593 containing mode. If the insn specifies a containing mode for
6594 this operand, it overrides all others.
6595
6596 I am not sure whether the algorithm here is always right,
6597 but it does the right things in those cases. */
6598
6599 mode = GET_MODE (old);
6600 if (mode == VOIDmode)
eceef4c9 6601 mode = rld[j].inmode;
32131a9c
RK
6602
6603#ifdef SECONDARY_INPUT_RELOAD_CLASS
6604 /* If we need a secondary register for this operation, see if
6605 the value is already in a register in that class. Don't
6606 do this if the secondary register will be used as a scratch
6607 register. */
6608
eceef4c9
BS
6609 if (rld[j].secondary_in_reload >= 0
6610 && rld[j].secondary_in_icode == CODE_FOR_nothing
58b1581b 6611 && optimize)
32131a9c
RK
6612 oldequiv
6613 = find_equiv_reg (old, insn,
eceef4c9 6614 rld[rld[j].secondary_in_reload].class,
fb3821f7 6615 -1, NULL_PTR, 0, mode);
32131a9c
RK
6616#endif
6617
6618 /* If reloading from memory, see if there is a register
6619 that already holds the same value. If so, reload from there.
6620 We can pass 0 as the reload_reg_p argument because
6621 any other reload has either already been emitted,
6622 in which case find_equiv_reg will see the reload-insn,
6623 or has yet to be emitted, in which case it doesn't matter
6624 because we will use this equiv reg right away. */
6625
58b1581b 6626 if (oldequiv == 0 && optimize
32131a9c
RK
6627 && (GET_CODE (old) == MEM
6628 || (GET_CODE (old) == REG
6629 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6630 && reg_renumber[REGNO (old)] < 0)))
546b63fb 6631 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 6632 -1, NULL_PTR, 0, mode);
32131a9c
RK
6633
6634 if (oldequiv)
6635 {
6636 int regno = true_regnum (oldequiv);
6637
dfe96118
R
6638 /* Don't use OLDEQUIV if any other reload changes it at an
6639 earlier stage of this insn or at this stage. */
eceef4c9
BS
6640 if (! reload_reg_free_for_value_p (regno, rld[j].opnum,
6641 rld[j].when_needed,
6642 rld[j].in, const0_rtx, j,
dfe96118 6643 0))
32131a9c
RK
6644 oldequiv = 0;
6645
546b63fb
RK
6646 /* If it is no cheaper to copy from OLDEQUIV into the
6647 reload register than it would be to move from memory,
6648 don't use it. Likewise, if we need a secondary register
6649 or memory. */
6650
6651 if (oldequiv != 0
eceef4c9 6652 && ((REGNO_REG_CLASS (regno) != rld[j].class
546b63fb 6653 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
eceef4c9
BS
6654 rld[j].class)
6655 >= MEMORY_MOVE_COST (mode, rld[j].class, 1)))
546b63fb 6656#ifdef SECONDARY_INPUT_RELOAD_CLASS
eceef4c9 6657 || (SECONDARY_INPUT_RELOAD_CLASS (rld[j].class,
546b63fb
RK
6658 mode, oldequiv)
6659 != NO_REGS)
6660#endif
6661#ifdef SECONDARY_MEMORY_NEEDED
370b1b83 6662 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
eceef4c9 6663 rld[j].class,
546b63fb
RK
6664 mode)
6665#endif
6666 ))
6667 oldequiv = 0;
32131a9c
RK
6668 }
6669
cb2afeb3
R
6670 /* delete_output_reload is only invoked properly if old contains
6671 the original pseudo register. Since this is replaced with a
6672 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6673 find the pseudo in RELOAD_IN_REG. */
6674 if (oldequiv == 0
6675 && reload_override_in[j]
eceef4c9 6676 && GET_CODE (rld[j].in_reg) == REG)
cb2afeb3
R
6677 {
6678 oldequiv = old;
eceef4c9 6679 old = rld[j].in_reg;
cb2afeb3 6680 }
32131a9c
RK
6681 if (oldequiv == 0)
6682 oldequiv = old;
6683 else if (GET_CODE (oldequiv) == REG)
6684 oldequiv_reg = oldequiv;
6685 else if (GET_CODE (oldequiv) == SUBREG)
6686 oldequiv_reg = SUBREG_REG (oldequiv);
6687
76182796
RK
6688 /* If we are reloading from a register that was recently stored in
6689 with an output-reload, see if we can prove there was
6690 actually no need to store the old value in it. */
6691
6692 if (optimize && GET_CODE (oldequiv) == REG
6693 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
e6e52be0 6694 && spill_reg_store[REGNO (oldequiv)]
cb2afeb3
R
6695 && GET_CODE (old) == REG
6696 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6697 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
eceef4c9 6698 rld[j].out_reg)))
cb2afeb3 6699 delete_output_reload (insn, j, REGNO (oldequiv));
76182796 6700
32131a9c 6701 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
6702 then load RELOADREG from OLDEQUIV. Note that we cannot use
6703 gen_lowpart_common since it can do the wrong thing when
6704 RELOADREG has a multi-word mode. Note that RELOADREG
6705 must always be a REG here. */
32131a9c
RK
6706
6707 if (GET_MODE (reloadreg) != mode)
38a448ca 6708 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
32131a9c
RK
6709 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6710 oldequiv = SUBREG_REG (oldequiv);
6711 if (GET_MODE (oldequiv) != VOIDmode
6712 && mode != GET_MODE (oldequiv))
38a448ca 6713 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
32131a9c 6714
546b63fb 6715 /* Switch to the right place to emit the reload insns. */
eceef4c9 6716 switch (rld[j].when_needed)
32131a9c 6717 {
32131a9c 6718 case RELOAD_OTHER:
546b63fb
RK
6719 where = &other_input_reload_insns;
6720 break;
6721 case RELOAD_FOR_INPUT:
eceef4c9 6722 where = &input_reload_insns[rld[j].opnum];
32131a9c 6723 break;
546b63fb 6724 case RELOAD_FOR_INPUT_ADDRESS:
eceef4c9 6725 where = &input_address_reload_insns[rld[j].opnum];
32131a9c 6726 break;
47c8cf91 6727 case RELOAD_FOR_INPADDR_ADDRESS:
eceef4c9 6728 where = &inpaddr_address_reload_insns[rld[j].opnum];
47c8cf91 6729 break;
546b63fb 6730 case RELOAD_FOR_OUTPUT_ADDRESS:
eceef4c9 6731 where = &output_address_reload_insns[rld[j].opnum];
32131a9c 6732 break;
47c8cf91 6733 case RELOAD_FOR_OUTADDR_ADDRESS:
eceef4c9 6734 where = &outaddr_address_reload_insns[rld[j].opnum];
47c8cf91 6735 break;
32131a9c 6736 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
6737 where = &operand_reload_insns;
6738 break;
893bc853
RK
6739 case RELOAD_FOR_OPADDR_ADDR:
6740 where = &other_operand_reload_insns;
6741 break;
546b63fb
RK
6742 case RELOAD_FOR_OTHER_ADDRESS:
6743 where = &other_input_address_reload_insns;
6744 break;
6745 default:
6746 abort ();
32131a9c
RK
6747 }
6748
546b63fb 6749 push_to_sequence (*where);
32131a9c
RK
6750 special = 0;
6751
6752 /* Auto-increment addresses must be reloaded in a special way. */
eceef4c9 6753 if (rld[j].out && ! rld[j].out_reg)
32131a9c
RK
6754 {
6755 /* We are not going to bother supporting the case where a
6756 incremented register can't be copied directly from
6757 OLDEQUIV since this seems highly unlikely. */
eceef4c9 6758 if (rld[j].secondary_in_reload >= 0)
32131a9c 6759 abort ();
cb2afeb3
R
6760
6761 if (reload_inherited[j])
6762 oldequiv = reloadreg;
6763
eceef4c9 6764 old = XEXP (rld[j].in_reg, 0);
cb2afeb3
R
6765
6766 if (optimize && GET_CODE (oldequiv) == REG
6767 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6768 && spill_reg_store[REGNO (oldequiv)]
6769 && GET_CODE (old) == REG
6770 && (dead_or_set_p (insn,
6771 spill_reg_stored_to[REGNO (oldequiv)])
6772 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6773 old)))
6774 delete_output_reload (insn, j, REGNO (oldequiv));
6775
32131a9c
RK
6776 /* Prevent normal processing of this reload. */
6777 special = 1;
6778 /* Output a special code sequence for this case. */
cb2afeb3 6779 new_spill_reg_store[REGNO (reloadreg)]
eceef4c9
BS
6780 = inc_for_reload (reloadreg, oldequiv, rld[j].out,
6781 rld[j].inc);
32131a9c
RK
6782 }
6783
6784 /* If we are reloading a pseudo-register that was set by the previous
6785 insn, see if we can get rid of that pseudo-register entirely
6786 by redirecting the previous insn into our reload register. */
6787
6788 else if (optimize && GET_CODE (old) == REG
6789 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6790 && dead_or_set_p (insn, old)
6791 /* This is unsafe if some other reload
6792 uses the same reg first. */
dfe96118 6793 && reload_reg_free_for_value_p (REGNO (reloadreg),
eceef4c9
BS
6794 rld[j].opnum,
6795 rld[j].when_needed,
6796 old, rld[j].out,
dfe96118 6797 j, 0))
32131a9c
RK
6798 {
6799 rtx temp = PREV_INSN (insn);
6800 while (temp && GET_CODE (temp) == NOTE)
6801 temp = PREV_INSN (temp);
6802 if (temp
6803 && GET_CODE (temp) == INSN
6804 && GET_CODE (PATTERN (temp)) == SET
6805 && SET_DEST (PATTERN (temp)) == old
6806 /* Make sure we can access insn_operand_constraint. */
6807 && asm_noperands (PATTERN (temp)) < 0
6808 /* This is unsafe if prev insn rejects our reload reg. */
a995e389 6809 && constraint_accepts_reg_p (insn_data[recog_memoized (temp)].operand[0].constraint,
32131a9c
RK
6810 reloadreg)
6811 /* This is unsafe if operand occurs more than once in current
6812 insn. Perhaps some occurrences aren't reloaded. */
6813 && count_occurrences (PATTERN (insn), old) == 1
6814 /* Don't risk splitting a matching pair of operands. */
6815 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6816 {
6817 /* Store into the reload register instead of the pseudo. */
6818 SET_DEST (PATTERN (temp)) = reloadreg;
d30e8ef0
BS
6819
6820 /* If the previous insn is an output reload, the source is
6821 a reload register, and its spill_reg_store entry will
6822 contain the previous destination. This is now
6823 invalid. */
6824 if (GET_CODE (SET_SRC (PATTERN (temp))) == REG
6825 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6826 {
6827 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6828 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6829 }
6830
32131a9c
RK
6831 /* If these are the only uses of the pseudo reg,
6832 pretend for GDB it lives in the reload reg we used. */
b1f21e0a
MM
6833 if (REG_N_DEATHS (REGNO (old)) == 1
6834 && REG_N_SETS (REGNO (old)) == 1)
32131a9c 6835 {
eceef4c9 6836 reg_renumber[REGNO (old)] = REGNO (rld[j].reg_rtx);
32131a9c
RK
6837 alter_reg (REGNO (old), -1);
6838 }
6839 special = 1;
6840 }
6841 }
6842
546b63fb
RK
6843 /* We can't do that, so output an insn to load RELOADREG. */
6844
32131a9c
RK
6845 if (! special)
6846 {
6847#ifdef SECONDARY_INPUT_RELOAD_CLASS
6848 rtx second_reload_reg = 0;
6849 enum insn_code icode;
6850
6851 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
6852 and icode, if any. If OLDEQUIV and OLD are different or
6853 if this is an in-out reload, recompute whether or not we
6854 still need a secondary register and what the icode should
6855 be. If we still need a secondary register and the class or
6856 icode is different, go back to reloading from OLD if using
6857 OLDEQUIV means that we got the wrong type of register. We
6858 cannot have different class or icode due to an in-out reload
6859 because we don't make such reloads when both the input and
6860 output need secondary reload registers. */
32131a9c 6861
eceef4c9 6862 if (rld[j].secondary_in_reload >= 0)
32131a9c 6863 {
eceef4c9 6864 int secondary_reload = rld[j].secondary_in_reload;
1554c2c6
RK
6865 rtx real_oldequiv = oldequiv;
6866 rtx real_old = old;
4eea1672 6867 rtx tmp;
1554c2c6
RK
6868
6869 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6870 and similarly for OLD.
b80bba27 6871 See comments in get_secondary_reload in reload.c. */
cb2afeb3
R
6872 /* If it is a pseudo that cannot be replaced with its
6873 equivalent MEM, we must fall back to reload_in, which
d62dab41
R
6874 will have all the necessary substitutions registered.
6875 Likewise for a pseudo that can't be replaced with its
05d10675 6876 equivalent constant.
4eea1672
RH
6877
6878 Take extra care for subregs of such pseudos. Note that
6879 we cannot use reg_equiv_mem in this case because it is
6880 not in the right mode. */
05d10675 6881
4eea1672
RH
6882 tmp = oldequiv;
6883 if (GET_CODE (tmp) == SUBREG)
6884 tmp = SUBREG_REG (tmp);
6885 if (GET_CODE (tmp) == REG
6886 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6887 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6888 || reg_equiv_constant[REGNO (tmp)] != 0))
cb2afeb3 6889 {
4eea1672
RH
6890 if (! reg_equiv_mem[REGNO (tmp)]
6891 || num_not_at_initial_offset
6892 || GET_CODE (oldequiv) == SUBREG)
eceef4c9 6893 real_oldequiv = rld[j].in;
cb2afeb3 6894 else
4eea1672 6895 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
cb2afeb3 6896 }
1554c2c6 6897
4eea1672
RH
6898 tmp = old;
6899 if (GET_CODE (tmp) == SUBREG)
6900 tmp = SUBREG_REG (tmp);
6901 if (GET_CODE (tmp) == REG
6902 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6903 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6904 || reg_equiv_constant[REGNO (tmp)] != 0))
cb2afeb3 6905 {
4eea1672
RH
6906 if (! reg_equiv_mem[REGNO (tmp)]
6907 || num_not_at_initial_offset
6908 || GET_CODE (old) == SUBREG)
eceef4c9 6909 real_old = rld[j].in;
cb2afeb3 6910 else
4eea1672 6911 real_old = reg_equiv_mem[REGNO (tmp)];
cb2afeb3 6912 }
1554c2c6 6913
eceef4c9
BS
6914 second_reload_reg = rld[secondary_reload].reg_rtx;
6915 icode = rld[j].secondary_in_icode;
32131a9c 6916
d445b551 6917 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
eceef4c9 6918 || (rld[j].in != 0 && rld[j].out != 0))
32131a9c
RK
6919 {
6920 enum reg_class new_class
eceef4c9 6921 = SECONDARY_INPUT_RELOAD_CLASS (rld[j].class,
1554c2c6 6922 mode, real_oldequiv);
32131a9c
RK
6923
6924 if (new_class == NO_REGS)
6925 second_reload_reg = 0;
6926 else
6927 {
6928 enum insn_code new_icode;
6929 enum machine_mode new_mode;
6930
6931 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6932 REGNO (second_reload_reg)))
1554c2c6 6933 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6934 else
6935 {
6936 new_icode = reload_in_optab[(int) mode];
6937 if (new_icode != CODE_FOR_nothing
a995e389
RH
6938 && ((insn_data[(int) new_icode].operand[0].predicate
6939 && ! ((*insn_data[(int) new_icode].operand[0].predicate)
32131a9c 6940 (reloadreg, mode)))
a995e389
RH
6941 || (insn_data[(int) new_icode].operand[1].predicate
6942 && ! ((*insn_data[(int) new_icode].operand[1].predicate)
1554c2c6 6943 (real_oldequiv, mode)))))
32131a9c
RK
6944 new_icode = CODE_FOR_nothing;
6945
6946 if (new_icode == CODE_FOR_nothing)
6947 new_mode = mode;
6948 else
a995e389 6949 new_mode = insn_data[(int) new_icode].operand[2].mode;
32131a9c
RK
6950
6951 if (GET_MODE (second_reload_reg) != new_mode)
6952 {
6953 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6954 new_mode))
1554c2c6 6955 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6956 else
6957 second_reload_reg
38a448ca
RH
6958 = gen_rtx_REG (new_mode,
6959 REGNO (second_reload_reg));
32131a9c
RK
6960 }
6961 }
6962 }
6963 }
6964
6965 /* If we still need a secondary reload register, check
6966 to see if it is being used as a scratch or intermediate
1554c2c6
RK
6967 register and generate code appropriately. If we need
6968 a scratch register, use REAL_OLDEQUIV since the form of
05d10675 6969 the insn may depend on the actual address if it is
1554c2c6 6970 a MEM. */
32131a9c
RK
6971
6972 if (second_reload_reg)
6973 {
6974 if (icode != CODE_FOR_nothing)
6975 {
5e03c156
RK
6976 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6977 second_reload_reg));
32131a9c
RK
6978 special = 1;
6979 }
6980 else
6981 {
6982 /* See if we need a scratch register to load the
6983 intermediate register (a tertiary reload). */
6984 enum insn_code tertiary_icode
eceef4c9 6985 = rld[secondary_reload].secondary_in_icode;
32131a9c
RK
6986
6987 if (tertiary_icode != CODE_FOR_nothing)
6988 {
6989 rtx third_reload_reg
eceef4c9 6990 = rld[rld[secondary_reload].secondary_in_reload].reg_rtx;
32131a9c 6991
546b63fb
RK
6992 emit_insn ((GEN_FCN (tertiary_icode)
6993 (second_reload_reg, real_oldequiv,
6994 third_reload_reg)));
32131a9c
RK
6995 }
6996 else
cb2afeb3 6997 gen_reload (second_reload_reg, real_oldequiv,
eceef4c9
BS
6998 rld[j].opnum,
6999 rld[j].when_needed);
546b63fb
RK
7000
7001 oldequiv = second_reload_reg;
32131a9c
RK
7002 }
7003 }
7004 }
7005#endif
7006
2d182c6f 7007 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
cb2afeb3
R
7008 {
7009 rtx real_oldequiv = oldequiv;
7010
7011 if ((GET_CODE (oldequiv) == REG
7012 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
d62dab41
R
7013 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7014 || reg_equiv_constant[REGNO (oldequiv)] != 0))
cb2afeb3
R
7015 || (GET_CODE (oldequiv) == SUBREG
7016 && GET_CODE (SUBREG_REG (oldequiv)) == REG
7017 && (REGNO (SUBREG_REG (oldequiv))
7018 >= FIRST_PSEUDO_REGISTER)
d62dab41
R
7019 && ((reg_equiv_memory_loc
7020 [REGNO (SUBREG_REG (oldequiv))] != 0)
7021 || (reg_equiv_constant
7022 [REGNO (SUBREG_REG (oldequiv))] != 0))))
eceef4c9
BS
7023 real_oldequiv = rld[j].in;
7024 gen_reload (reloadreg, real_oldequiv, rld[j].opnum,
7025 rld[j].when_needed);
cb2afeb3 7026 }
32131a9c 7027
32131a9c
RK
7028 }
7029
80d92002 7030 this_reload_insn = get_last_insn ();
546b63fb
RK
7031 /* End this sequence. */
7032 *where = get_insns ();
7033 end_sequence ();
cb2afeb3
R
7034
7035 /* Update reload_override_in so that delete_address_reloads_1
7036 can see the actual register usage. */
7037 if (oldequiv_reg)
7038 reload_override_in[j] = oldequiv;
32131a9c
RK
7039 }
7040
eceef4c9 7041 /* When inheriting a wider reload, we have a MEM in rld[j].in,
b60a8416
R
7042 e.g. inheriting a SImode output reload for
7043 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
eceef4c9
BS
7044 if (optimize && reload_inherited[j] && rld[j].in
7045 && GET_CODE (rld[j].in) == MEM
7046 && GET_CODE (rld[j].in_reg) == MEM
b60a8416
R
7047 && reload_spill_index[j] >= 0
7048 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7049 {
7050 expect_occurrences
eceef4c9
BS
7051 = count_occurrences (PATTERN (insn), rld[j].in) == 1 ? 0 : -1;
7052 rld[j].in
b60a8416
R
7053 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7054 }
32131a9c
RK
7055
7056 /* If we are reloading a register that was recently stored in with an
7057 output-reload, see if we can prove there was
7058 actually no need to store the old value in it. */
7059
cb2afeb3
R
7060 if (optimize
7061 && (reload_inherited[j] || reload_override_in[j])
eceef4c9
BS
7062 && rld[j].reg_rtx
7063 && GET_CODE (rld[j].reg_rtx) == REG
7064 && spill_reg_store[REGNO (rld[j].reg_rtx)] != 0
32131a9c
RK
7065#if 0
7066 /* There doesn't seem to be any reason to restrict this to pseudos
7067 and doing so loses in the case where we are copying from a
7068 register of the wrong class. */
eceef4c9 7069 && (REGNO (spill_reg_stored_to[REGNO (rld[j].reg_rtx)])
05d10675 7070 >= FIRST_PSEUDO_REGISTER)
32131a9c 7071#endif
05d10675
BS
7072 /* The insn might have already some references to stackslots
7073 replaced by MEMs, while reload_out_reg still names the
7074 original pseudo. */
cb2afeb3 7075 && (dead_or_set_p (insn,
eceef4c9
BS
7076 spill_reg_stored_to[REGNO (rld[j].reg_rtx)])
7077 || rtx_equal_p (spill_reg_stored_to[REGNO (rld[j].reg_rtx)],
7078 rld[j].out_reg)))
7079 delete_output_reload (insn, j, REGNO (rld[j].reg_rtx));
32131a9c
RK
7080
7081 /* Input-reloading is done. Now do output-reloading,
7082 storing the value from the reload-register after the main insn
eceef4c9 7083 if rld[j].out is nonzero.
32131a9c
RK
7084
7085 ??? At some point we need to support handling output reloads of
7086 JUMP_INSNs or insns that set cc0. */
cb2afeb3
R
7087
7088 /* If this is an output reload that stores something that is
7089 not loaded in this same reload, see if we can eliminate a previous
7090 store. */
7091 {
eceef4c9 7092 rtx pseudo = rld[j].out_reg;
05d10675 7093
cb2afeb3
R
7094 if (pseudo
7095 && GET_CODE (pseudo) == REG
eceef4c9 7096 && ! rtx_equal_p (rld[j].in_reg, pseudo)
cb2afeb3
R
7097 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7098 && reg_last_reload_reg[REGNO (pseudo)])
7099 {
7100 int pseudo_no = REGNO (pseudo);
7101 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7102
7103 /* We don't need to test full validity of last_regno for
7104 inherit here; we only want to know if the store actually
7105 matches the pseudo. */
7106 if (reg_reloaded_contents[last_regno] == pseudo_no
7107 && spill_reg_store[last_regno]
7108 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7109 delete_output_reload (insn, j, last_regno);
7110 }
7111 }
7112
eceef4c9 7113 old = rld[j].out_reg;
32131a9c 7114 if (old != 0
eceef4c9
BS
7115 && rld[j].reg_rtx != old
7116 && rld[j].reg_rtx != 0)
32131a9c 7117 {
eceef4c9 7118 register rtx reloadreg = rld[j].reg_rtx;
29a82058 7119#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
32131a9c 7120 register rtx second_reloadreg = 0;
29a82058 7121#endif
32131a9c
RK
7122 rtx note, p;
7123 enum machine_mode mode;
7124 int special = 0;
7125
7126 /* An output operand that dies right away does need a reload,
7127 but need not be copied from it. Show the new location in the
7128 REG_UNUSED note. */
7129 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
7130 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7131 {
eceef4c9 7132 XEXP (note, 0) = rld[j].reg_rtx;
32131a9c
RK
7133 continue;
7134 }
a7911cd2
RK
7135 /* Likewise for a SUBREG of an operand that dies. */
7136 else if (GET_CODE (old) == SUBREG
7137 && GET_CODE (SUBREG_REG (old)) == REG
7138 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7139 SUBREG_REG (old))))
7140 {
7141 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
eceef4c9 7142 rld[j].reg_rtx);
a7911cd2
RK
7143 continue;
7144 }
32131a9c
RK
7145 else if (GET_CODE (old) == SCRATCH)
7146 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7147 but we don't want to make an output reload. */
7148 continue;
7149
7150#if 0
7151 /* Strip off of OLD any size-increasing SUBREGs such as
7152 (SUBREG:SI foo:QI 0). */
7153
7154 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
7155 && (GET_MODE_SIZE (GET_MODE (old))
7156 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
7157 old = SUBREG_REG (old);
7158#endif
7159
7160 /* If is a JUMP_INSN, we can't support output reloads yet. */
7161 if (GET_CODE (insn) == JUMP_INSN)
7162 abort ();
7163
eceef4c9 7164 if (rld[j].when_needed == RELOAD_OTHER)
5ca582cf 7165 start_sequence ();
d7e0324f 7166 else
eceef4c9 7167 push_to_sequence (output_reload_insns[rld[j].opnum]);
546b63fb 7168
eceef4c9 7169 old = rld[j].out;
cb2afeb3 7170
32131a9c
RK
7171 /* Determine the mode to reload in.
7172 See comments above (for input reloading). */
7173
7174 mode = GET_MODE (old);
7175 if (mode == VOIDmode)
79a365a7
RS
7176 {
7177 /* VOIDmode should never happen for an output. */
7178 if (asm_noperands (PATTERN (insn)) < 0)
7179 /* It's the compiler's fault. */
a89b2cc4 7180 fatal_insn ("VOIDmode on an output", insn);
79a365a7
RS
7181 error_for_asm (insn, "output operand is constant in `asm'");
7182 /* Prevent crash--use something we know is valid. */
7183 mode = word_mode;
38a448ca 7184 old = gen_rtx_REG (mode, REGNO (reloadreg));
79a365a7 7185 }
32131a9c 7186
32131a9c 7187 if (GET_MODE (reloadreg) != mode)
38a448ca 7188 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
32131a9c
RK
7189
7190#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7191
7192 /* If we need two reload regs, set RELOADREG to the intermediate
5e03c156 7193 one, since it will be stored into OLD. We might need a secondary
32131a9c
RK
7194 register only for an input reload, so check again here. */
7195
eceef4c9 7196 if (rld[j].secondary_out_reload >= 0)
32131a9c 7197 {
1554c2c6 7198 rtx real_old = old;
32131a9c 7199
1554c2c6
RK
7200 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
7201 && reg_equiv_mem[REGNO (old)] != 0)
7202 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 7203
eceef4c9 7204 if((SECONDARY_OUTPUT_RELOAD_CLASS (rld[j].class,
1554c2c6
RK
7205 mode, real_old)
7206 != NO_REGS))
7207 {
7208 second_reloadreg = reloadreg;
eceef4c9 7209 reloadreg = rld[rld[j].secondary_out_reload].reg_rtx;
32131a9c 7210
1554c2c6
RK
7211 /* See if RELOADREG is to be used as a scratch register
7212 or as an intermediate register. */
eceef4c9 7213 if (rld[j].secondary_out_icode != CODE_FOR_nothing)
32131a9c 7214 {
eceef4c9 7215 emit_insn ((GEN_FCN (rld[j].secondary_out_icode)
546b63fb 7216 (real_old, second_reloadreg, reloadreg)));
1554c2c6 7217 special = 1;
32131a9c
RK
7218 }
7219 else
1554c2c6
RK
7220 {
7221 /* See if we need both a scratch and intermediate reload
7222 register. */
5e03c156 7223
eceef4c9 7224 int secondary_reload = rld[j].secondary_out_reload;
1554c2c6 7225 enum insn_code tertiary_icode
eceef4c9 7226 = rld[secondary_reload].secondary_out_icode;
32131a9c 7227
1554c2c6 7228 if (GET_MODE (reloadreg) != mode)
38a448ca 7229 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
1554c2c6
RK
7230
7231 if (tertiary_icode != CODE_FOR_nothing)
7232 {
7233 rtx third_reloadreg
eceef4c9 7234 = rld[rld[secondary_reload].secondary_out_reload].reg_rtx;
a7911cd2 7235 rtx tem;
5e03c156
RK
7236
7237 /* Copy primary reload reg to secondary reload reg.
7238 (Note that these have been swapped above, then
7239 secondary reload reg to OLD using our insn. */
7240
a7911cd2
RK
7241 /* If REAL_OLD is a paradoxical SUBREG, remove it
7242 and try to put the opposite SUBREG on
7243 RELOADREG. */
7244 if (GET_CODE (real_old) == SUBREG
7245 && (GET_MODE_SIZE (GET_MODE (real_old))
7246 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7247 && 0 != (tem = gen_lowpart_common
7248 (GET_MODE (SUBREG_REG (real_old)),
7249 reloadreg)))
7250 real_old = SUBREG_REG (real_old), reloadreg = tem;
7251
5e03c156 7252 gen_reload (reloadreg, second_reloadreg,
eceef4c9 7253 rld[j].opnum, rld[j].when_needed);
5e03c156
RK
7254 emit_insn ((GEN_FCN (tertiary_icode)
7255 (real_old, reloadreg, third_reloadreg)));
7256 special = 1;
9ad5f9f6 7257 }
5e03c156 7258
1554c2c6 7259 else
5e03c156
RK
7260 /* Copy between the reload regs here and then to
7261 OUT later. */
1554c2c6 7262
5e03c156 7263 gen_reload (reloadreg, second_reloadreg,
eceef4c9 7264 rld[j].opnum, rld[j].when_needed);
1554c2c6 7265 }
32131a9c
RK
7266 }
7267 }
7268#endif
7269
7270 /* Output the last reload insn. */
7271 if (! special)
d7c2e385
L
7272 {
7273 rtx set;
7274
7275 /* Don't output the last reload if OLD is not the dest of
7276 INSN and is in the src and is clobbered by INSN. */
7277 if (! flag_expensive_optimizations
7278 || GET_CODE (old) != REG
7279 || !(set = single_set (insn))
7280 || rtx_equal_p (old, SET_DEST (set))
7281 || !reg_mentioned_p (old, SET_SRC (set))
7282 || !regno_clobbered_p (REGNO (old), insn))
eceef4c9
BS
7283 gen_reload (old, reloadreg, rld[j].opnum,
7284 rld[j].when_needed);
d7c2e385 7285 }
32131a9c 7286
32131a9c 7287 /* Look at all insns we emitted, just to be safe. */
546b63fb 7288 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
7289 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
7290 {
e6e52be0
R
7291 rtx pat = PATTERN (p);
7292
32131a9c
RK
7293 /* If this output reload doesn't come from a spill reg,
7294 clear any memory of reloaded copies of the pseudo reg.
7295 If this output reload comes from a spill reg,
7296 reg_has_output_reload will make this do nothing. */
e6e52be0
R
7297 note_stores (pat, forget_old_reloads_1);
7298
eceef4c9 7299 if (reg_mentioned_p (rld[j].reg_rtx, pat))
e6e52be0 7300 {
cb2afeb3 7301 rtx set = single_set (insn);
e6e52be0 7302 if (reload_spill_index[j] < 0
cb2afeb3 7303 && set
eceef4c9 7304 && SET_SRC (set) == rld[j].reg_rtx)
e6e52be0 7305 {
cb2afeb3 7306 int src = REGNO (SET_SRC (set));
32131a9c 7307
e6e52be0
R
7308 reload_spill_index[j] = src;
7309 SET_HARD_REG_BIT (reg_is_output_reload, src);
7310 if (find_regno_note (insn, REG_DEAD, src))
7311 SET_HARD_REG_BIT (reg_reloaded_died, src);
7312 }
eceef4c9 7313 if (REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
9da46522 7314 {
eceef4c9 7315 int s = rld[j].secondary_out_reload;
cb2afeb3 7316 set = single_set (p);
9da46522
R
7317 /* If this reload copies only to the secondary reload
7318 register, the secondary reload does the actual
7319 store. */
7320 if (s >= 0 && set == NULL_RTX)
7321 ; /* We can't tell what function the secondary reload
7322 has and where the actual store to the pseudo is
7323 made; leave new_spill_reg_store alone. */
7324 else if (s >= 0
eceef4c9
BS
7325 && SET_SRC (set) == rld[j].reg_rtx
7326 && SET_DEST (set) == rld[s].reg_rtx)
9da46522
R
7327 {
7328 /* Usually the next instruction will be the
7329 secondary reload insn; if we can confirm
7330 that it is, setting new_spill_reg_store to
7331 that insn will allow an extra optimization. */
eceef4c9 7332 rtx s_reg = rld[s].reg_rtx;
9da46522 7333 rtx next = NEXT_INSN (p);
eceef4c9
BS
7334 rld[s].out = rld[j].out;
7335 rld[s].out_reg = rld[j].out_reg;
9da46522
R
7336 set = single_set (next);
7337 if (set && SET_SRC (set) == s_reg
7338 && ! new_spill_reg_store[REGNO (s_reg)])
cb2afeb3
R
7339 {
7340 SET_HARD_REG_BIT (reg_is_output_reload,
7341 REGNO (s_reg));
7342 new_spill_reg_store[REGNO (s_reg)] = next;
7343 }
9da46522
R
7344 }
7345 else
eceef4c9 7346 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = p;
9da46522 7347 }
e6e52be0 7348 }
32131a9c
RK
7349 }
7350
eceef4c9 7351 if (rld[j].when_needed == RELOAD_OTHER)
befa01b9 7352 {
eceef4c9
BS
7353 emit_insns (other_output_reload_insns[rld[j].opnum]);
7354 other_output_reload_insns[rld[j].opnum] = get_insns ();
befa01b9
JW
7355 }
7356 else
eceef4c9 7357 output_reload_insns[rld[j].opnum] = get_insns ();
d7e0324f 7358
546b63fb 7359 end_sequence ();
32131a9c 7360 }
32131a9c
RK
7361 }
7362
546b63fb
RK
7363 /* Now write all the insns we made for reloads in the order expected by
7364 the allocation functions. Prior to the insn being reloaded, we write
7365 the following reloads:
7366
7367 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7368
2edc8d65 7369 RELOAD_OTHER reloads.
546b63fb 7370
47c8cf91
ILT
7371 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7372 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7373 RELOAD_FOR_INPUT reload for the operand.
546b63fb 7374
893bc853
RK
7375 RELOAD_FOR_OPADDR_ADDRS reloads.
7376
546b63fb
RK
7377 RELOAD_FOR_OPERAND_ADDRESS reloads.
7378
7379 After the insn being reloaded, we write the following:
7380
47c8cf91
ILT
7381 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7382 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7383 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7384 reloads for the operand. The RELOAD_OTHER output reloads are
7385 output in descending order by reload number. */
546b63fb 7386
c93b03c2
RH
7387 emit_insns_before (other_input_address_reload_insns, insn);
7388 emit_insns_before (other_input_reload_insns, insn);
546b63fb
RK
7389
7390 for (j = 0; j < reload_n_operands; j++)
7391 {
c93b03c2
RH
7392 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7393 emit_insns_before (input_address_reload_insns[j], insn);
7394 emit_insns_before (input_reload_insns[j], insn);
546b63fb
RK
7395 }
7396
c93b03c2
RH
7397 emit_insns_before (other_operand_reload_insns, insn);
7398 emit_insns_before (operand_reload_insns, insn);
546b63fb
RK
7399
7400 for (j = 0; j < reload_n_operands; j++)
7401 {
47c8cf91 7402 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
546b63fb
RK
7403 emit_insns_before (output_address_reload_insns[j], following_insn);
7404 emit_insns_before (output_reload_insns[j], following_insn);
befa01b9 7405 emit_insns_before (other_output_reload_insns[j], following_insn);
c93b03c2
RH
7406 }
7407
7408 /* Keep basic block info up to date. */
7409 if (n_basic_blocks)
7410 {
3b413743 7411 if (BLOCK_HEAD (chain->block) == insn)
05d10675 7412 BLOCK_HEAD (chain->block) = NEXT_INSN (before_insn);
3b413743 7413 if (BLOCK_END (chain->block) == insn)
05d10675 7414 BLOCK_END (chain->block) = PREV_INSN (following_insn);
546b63fb
RK
7415 }
7416
32131a9c
RK
7417 /* For all the spill regs newly reloaded in this instruction,
7418 record what they were reloaded from, so subsequent instructions
d445b551
RK
7419 can inherit the reloads.
7420
7421 Update spill_reg_store for the reloads of this insn.
e9e79d69 7422 Copy the elements that were updated in the loop above. */
32131a9c
RK
7423
7424 for (j = 0; j < n_reloads; j++)
7425 {
7426 register int r = reload_order[j];
7427 register int i = reload_spill_index[r];
7428
78a2bc08 7429 /* If this is a non-inherited input reload from a pseudo, we must
05d10675
BS
7430 clear any memory of a previous store to the same pseudo. Only do
7431 something if there will not be an output reload for the pseudo
7432 being reloaded. */
eceef4c9 7433 if (rld[r].in_reg != 0
05d10675
BS
7434 && ! (reload_inherited[r] || reload_override_in[r]))
7435 {
eceef4c9 7436 rtx reg = rld[r].in_reg;
78a2bc08 7437
05d10675 7438 if (GET_CODE (reg) == SUBREG)
78a2bc08 7439 reg = SUBREG_REG (reg);
05d10675
BS
7440
7441 if (GET_CODE (reg) == REG
78a2bc08
R
7442 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7443 && ! reg_has_output_reload[REGNO (reg)])
7444 {
7445 int nregno = REGNO (reg);
7446
7447 if (reg_last_reload_reg[nregno])
05d10675
BS
7448 {
7449 int last_regno = REGNO (reg_last_reload_reg[nregno]);
78a2bc08 7450
05d10675 7451 if (reg_reloaded_contents[last_regno] == nregno)
78a2bc08 7452 spill_reg_store[last_regno] = 0;
05d10675 7453 }
78a2bc08
R
7454 }
7455 }
05d10675 7456
e6e52be0 7457 /* I is nonneg if this reload used a register.
eceef4c9 7458 If rld[r].reg_rtx is 0, this is an optional reload
51f0c3b7 7459 that we opted to ignore. */
d445b551 7460
eceef4c9 7461 if (i >= 0 && rld[r].reg_rtx != 0)
32131a9c 7462 {
32131a9c 7463 int nr
eceef4c9 7464 = HARD_REGNO_NREGS (i, GET_MODE (rld[r].reg_rtx));
32131a9c 7465 int k;
51f0c3b7
JW
7466 int part_reaches_end = 0;
7467 int all_reaches_end = 1;
32131a9c 7468
51f0c3b7
JW
7469 /* For a multi register reload, we need to check if all or part
7470 of the value lives to the end. */
32131a9c
RK
7471 for (k = 0; k < nr; k++)
7472 {
eceef4c9
BS
7473 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7474 rld[r].when_needed))
51f0c3b7
JW
7475 part_reaches_end = 1;
7476 else
7477 all_reaches_end = 0;
32131a9c
RK
7478 }
7479
51f0c3b7
JW
7480 /* Ignore reloads that don't reach the end of the insn in
7481 entirety. */
7482 if (all_reaches_end)
32131a9c 7483 {
51f0c3b7
JW
7484 /* First, clear out memory of what used to be in this spill reg.
7485 If consecutive registers are used, clear them all. */
d08ea79f 7486
32131a9c 7487 for (k = 0; k < nr; k++)
e6e52be0 7488 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
d08ea79f 7489
51f0c3b7 7490 /* Maybe the spill reg contains a copy of reload_out. */
eceef4c9
BS
7491 if (rld[r].out != 0
7492 && (GET_CODE (rld[r].out) == REG
cb2afeb3 7493#ifdef AUTO_INC_DEC
eceef4c9 7494 || ! rld[r].out_reg
cb2afeb3 7495#endif
eceef4c9 7496 || GET_CODE (rld[r].out_reg) == REG))
51f0c3b7 7497 {
eceef4c9
BS
7498 rtx out = (GET_CODE (rld[r].out) == REG
7499 ? rld[r].out
7500 : rld[r].out_reg
7501 ? rld[r].out_reg
7502/* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
cb2afeb3 7503 register int nregno = REGNO (out);
51f0c3b7
JW
7504 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7505 : HARD_REGNO_NREGS (nregno,
eceef4c9 7506 GET_MODE (rld[r].reg_rtx)));
51f0c3b7
JW
7507
7508 spill_reg_store[i] = new_spill_reg_store[i];
cb2afeb3 7509 spill_reg_stored_to[i] = out;
eceef4c9 7510 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
51f0c3b7
JW
7511
7512 /* If NREGNO is a hard register, it may occupy more than
05d10675 7513 one register. If it does, say what is in the
51f0c3b7
JW
7514 rest of the registers assuming that both registers
7515 agree on how many words the object takes. If not,
7516 invalidate the subsequent registers. */
7517
7518 if (nregno < FIRST_PSEUDO_REGISTER)
7519 for (k = 1; k < nnr; k++)
7520 reg_last_reload_reg[nregno + k]
7521 = (nr == nnr
eceef4c9
BS
7522 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7523 REGNO (rld[r].reg_rtx) + k)
51f0c3b7
JW
7524 : 0);
7525
7526 /* Now do the inverse operation. */
7527 for (k = 0; k < nr; k++)
7528 {
e6e52be0
R
7529 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7530 reg_reloaded_contents[i + k]
51f0c3b7
JW
7531 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7532 ? nregno
7533 : nregno + k);
e6e52be0
R
7534 reg_reloaded_insn[i + k] = insn;
7535 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
51f0c3b7
JW
7536 }
7537 }
d08ea79f 7538
51f0c3b7
JW
7539 /* Maybe the spill reg contains a copy of reload_in. Only do
7540 something if there will not be an output reload for
7541 the register being reloaded. */
eceef4c9
BS
7542 else if (rld[r].out_reg == 0
7543 && rld[r].in != 0
7544 && ((GET_CODE (rld[r].in) == REG
7545 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7546 && ! reg_has_output_reload[REGNO (rld[r].in)])
7547 || (GET_CODE (rld[r].in_reg) == REG
7548 && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
7549 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
51f0c3b7
JW
7550 {
7551 register int nregno;
7552 int nnr;
d445b551 7553
eceef4c9
BS
7554 if (GET_CODE (rld[r].in) == REG
7555 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7556 nregno = REGNO (rld[r].in);
7557 else if (GET_CODE (rld[r].in_reg) == REG)
7558 nregno = REGNO (rld[r].in_reg);
cb2afeb3 7559 else
eceef4c9 7560 nregno = REGNO (XEXP (rld[r].in_reg, 0));
d08ea79f 7561
51f0c3b7
JW
7562 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7563 : HARD_REGNO_NREGS (nregno,
eceef4c9 7564 GET_MODE (rld[r].reg_rtx)));
05d10675 7565
eceef4c9 7566 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
51f0c3b7
JW
7567
7568 if (nregno < FIRST_PSEUDO_REGISTER)
7569 for (k = 1; k < nnr; k++)
7570 reg_last_reload_reg[nregno + k]
7571 = (nr == nnr
eceef4c9
BS
7572 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7573 REGNO (rld[r].reg_rtx) + k)
51f0c3b7
JW
7574 : 0);
7575
7576 /* Unless we inherited this reload, show we haven't
cb2afeb3
R
7577 recently done a store.
7578 Previous stores of inherited auto_inc expressions
7579 also have to be discarded. */
7580 if (! reload_inherited[r]
eceef4c9 7581 || (rld[r].out && ! rld[r].out_reg))
51f0c3b7
JW
7582 spill_reg_store[i] = 0;
7583
7584 for (k = 0; k < nr; k++)
7585 {
e6e52be0
R
7586 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7587 reg_reloaded_contents[i + k]
51f0c3b7
JW
7588 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7589 ? nregno
7590 : nregno + k);
e6e52be0
R
7591 reg_reloaded_insn[i + k] = insn;
7592 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
51f0c3b7
JW
7593 }
7594 }
7595 }
d445b551 7596
51f0c3b7
JW
7597 /* However, if part of the reload reaches the end, then we must
7598 invalidate the old info for the part that survives to the end. */
7599 else if (part_reaches_end)
7600 {
546b63fb 7601 for (k = 0; k < nr; k++)
e6e52be0 7602 if (reload_reg_reaches_end_p (i + k,
eceef4c9
BS
7603 rld[r].opnum,
7604 rld[r].when_needed))
e6e52be0 7605 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
32131a9c
RK
7606 }
7607 }
7608
7609 /* The following if-statement was #if 0'd in 1.34 (or before...).
7610 It's reenabled in 1.35 because supposedly nothing else
7611 deals with this problem. */
7612
7613 /* If a register gets output-reloaded from a non-spill register,
7614 that invalidates any previous reloaded copy of it.
7615 But forget_old_reloads_1 won't get to see it, because
7616 it thinks only about the original insn. So invalidate it here. */
eceef4c9
BS
7617 if (i < 0 && rld[r].out != 0
7618 && (GET_CODE (rld[r].out) == REG
7619 || (GET_CODE (rld[r].out) == MEM
7620 && GET_CODE (rld[r].out_reg) == REG)))
32131a9c 7621 {
eceef4c9
BS
7622 rtx out = (GET_CODE (rld[r].out) == REG
7623 ? rld[r].out : rld[r].out_reg);
cb2afeb3 7624 register int nregno = REGNO (out);
c7093272 7625 if (nregno >= FIRST_PSEUDO_REGISTER)
cb2afeb3 7626 {
6a651371 7627 rtx src_reg, store_insn = NULL_RTX;
cb2afeb3
R
7628
7629 reg_last_reload_reg[nregno] = 0;
7630
7631 /* If we can find a hard register that is stored, record
7632 the storing insn so that we may delete this insn with
7633 delete_output_reload. */
eceef4c9 7634 src_reg = rld[r].reg_rtx;
cb2afeb3
R
7635
7636 /* If this is an optional reload, try to find the source reg
7637 from an input reload. */
7638 if (! src_reg)
7639 {
7640 rtx set = single_set (insn);
eceef4c9 7641 if (set && SET_DEST (set) == rld[r].out)
cb2afeb3
R
7642 {
7643 int k;
7644
7645 src_reg = SET_SRC (set);
7646 store_insn = insn;
7647 for (k = 0; k < n_reloads; k++)
7648 {
eceef4c9 7649 if (rld[k].in == src_reg)
cb2afeb3 7650 {
eceef4c9 7651 src_reg = rld[k].reg_rtx;
cb2afeb3
R
7652 break;
7653 }
7654 }
7655 }
7656 }
7657 else
7658 store_insn = new_spill_reg_store[REGNO (src_reg)];
7659 if (src_reg && GET_CODE (src_reg) == REG
7660 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7661 {
7662 int src_regno = REGNO (src_reg);
7663 int nr = HARD_REGNO_NREGS (src_regno, reload_mode[r]);
7664 /* The place where to find a death note varies with
7665 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7666 necessarily checked exactly in the code that moves
7667 notes, so just check both locations. */
7668 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7669 if (! note)
7670 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7671 while (nr-- > 0)
7672 {
7673 spill_reg_store[src_regno + nr] = store_insn;
7674 spill_reg_stored_to[src_regno + nr] = out;
7675 reg_reloaded_contents[src_regno + nr] = nregno;
7676 reg_reloaded_insn[src_regno + nr] = store_insn;
00f9f1bc 7677 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
cb2afeb3
R
7678 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7679 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7680 if (note)
7681 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7682 else
7683 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7684 }
7685 reg_last_reload_reg[nregno] = src_reg;
7686 }
7687 }
c7093272
RK
7688 else
7689 {
eceef4c9 7690 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (rld[r].out));
36281332 7691
c7093272
RK
7692 while (num_regs-- > 0)
7693 reg_last_reload_reg[nregno + num_regs] = 0;
7694 }
32131a9c
RK
7695 }
7696 }
e6e52be0 7697 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
32131a9c
RK
7698}
7699\f
5e03c156
RK
7700/* Emit code to perform a reload from IN (which may be a reload register) to
7701 OUT (which may also be a reload register). IN or OUT is from operand
05d10675 7702 OPNUM with reload type TYPE.
546b63fb 7703
3c3eeea6 7704 Returns first insn emitted. */
32131a9c
RK
7705
7706rtx
5e03c156
RK
7707gen_reload (out, in, opnum, type)
7708 rtx out;
32131a9c 7709 rtx in;
546b63fb
RK
7710 int opnum;
7711 enum reload_type type;
32131a9c 7712{
546b63fb 7713 rtx last = get_last_insn ();
7a5b18b0
RK
7714 rtx tem;
7715
7716 /* If IN is a paradoxical SUBREG, remove it and try to put the
7717 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7718 if (GET_CODE (in) == SUBREG
7719 && (GET_MODE_SIZE (GET_MODE (in))
7720 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7721 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7722 in = SUBREG_REG (in), out = tem;
7723 else if (GET_CODE (out) == SUBREG
eceef4c9
BS
7724 && (GET_MODE_SIZE (GET_MODE (out))
7725 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7726 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7a5b18b0 7727 out = SUBREG_REG (out), in = tem;
32131a9c 7728
a8fdc208 7729 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
7730 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7731 register that didn't get a hard register. In that case we can just
7732 call emit_move_insn.
7733
a7fd196c
JW
7734 We can also be asked to reload a PLUS that adds a register or a MEM to
7735 another register, constant or MEM. This can occur during frame pointer
7736 elimination and while reloading addresses. This case is handled by
7737 trying to emit a single insn to perform the add. If it is not valid,
7738 we use a two insn sequence.
32131a9c
RK
7739
7740 Finally, we could be called to handle an 'o' constraint by putting
7741 an address into a register. In that case, we first try to do this
7742 with a named pattern of "reload_load_address". If no such pattern
7743 exists, we just emit a SET insn and hope for the best (it will normally
7744 be valid on machines that use 'o').
7745
7746 This entire process is made complex because reload will never
7747 process the insns we generate here and so we must ensure that
7748 they will fit their constraints and also by the fact that parts of
7749 IN might be being reloaded separately and replaced with spill registers.
7750 Because of this, we are, in some sense, just guessing the right approach
7751 here. The one listed above seems to work.
7752
7753 ??? At some point, this whole thing needs to be rethought. */
7754
7755 if (GET_CODE (in) == PLUS
a7fd196c 7756 && (GET_CODE (XEXP (in, 0)) == REG
5c6b1bd2 7757 || GET_CODE (XEXP (in, 0)) == SUBREG
a7fd196c
JW
7758 || GET_CODE (XEXP (in, 0)) == MEM)
7759 && (GET_CODE (XEXP (in, 1)) == REG
5c6b1bd2 7760 || GET_CODE (XEXP (in, 1)) == SUBREG
a7fd196c
JW
7761 || CONSTANT_P (XEXP (in, 1))
7762 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 7763 {
a7fd196c
JW
7764 /* We need to compute the sum of a register or a MEM and another
7765 register, constant, or MEM, and put it into the reload
3002e160
JW
7766 register. The best possible way of doing this is if the machine
7767 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
7768
7769 The simplest approach is to try to generate such an insn and see if it
7770 is recognized and matches its constraints. If so, it can be used.
7771
7772 It might be better not to actually emit the insn unless it is valid,
0009eff2 7773 but we need to pass the insn as an operand to `recog' and
0eadeb15 7774 `extract_insn' and it is simpler to emit and then delete the insn if
0009eff2 7775 not valid than to dummy things up. */
a8fdc208 7776
af929c62 7777 rtx op0, op1, tem, insn;
32131a9c 7778 int code;
a8fdc208 7779
af929c62
RK
7780 op0 = find_replacement (&XEXP (in, 0));
7781 op1 = find_replacement (&XEXP (in, 1));
7782
32131a9c
RK
7783 /* Since constraint checking is strict, commutativity won't be
7784 checked, so we need to do that here to avoid spurious failure
7785 if the add instruction is two-address and the second operand
7786 of the add is the same as the reload reg, which is frequently
7787 the case. If the insn would be A = B + A, rearrange it so
0f41302f 7788 it will be A = A + B as constrain_operands expects. */
a8fdc208 7789
32131a9c 7790 if (GET_CODE (XEXP (in, 1)) == REG
5e03c156 7791 && REGNO (out) == REGNO (XEXP (in, 1)))
af929c62
RK
7792 tem = op0, op0 = op1, op1 = tem;
7793
7794 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
38a448ca 7795 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
32131a9c 7796
38a448ca 7797 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
32131a9c
RK
7798 code = recog_memoized (insn);
7799
7800 if (code >= 0)
7801 {
0eadeb15 7802 extract_insn (insn);
32131a9c
RK
7803 /* We want constrain operands to treat this insn strictly in
7804 its validity determination, i.e., the way it would after reload
7805 has completed. */
0eadeb15 7806 if (constrain_operands (1))
32131a9c
RK
7807 return insn;
7808 }
7809
546b63fb 7810 delete_insns_since (last);
32131a9c
RK
7811
7812 /* If that failed, we must use a conservative two-insn sequence.
09522f21
FS
7813
7814 Use a move to copy one operand into the reload register. Prefer
7815 to reload a constant, MEM or pseudo since the move patterns can
7816 handle an arbitrary operand. If OP1 is not a constant, MEM or
7817 pseudo and OP1 is not a valid operand for an add instruction, then
7818 reload OP1.
7819
7820 After reloading one of the operands into the reload register, add
7821 the reload register to the output register.
32131a9c
RK
7822
7823 If there is another way to do this for a specific machine, a
7824 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7825 we emit below. */
7826
09522f21
FS
7827 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7828
5c6b1bd2 7829 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
af929c62 7830 || (GET_CODE (op1) == REG
09522f21
FS
7831 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7832 || (code != CODE_FOR_nothing
a995e389
RH
7833 && ! ((*insn_data[code].operand[2].predicate)
7834 (op1, insn_data[code].operand[2].mode))))
af929c62 7835 tem = op0, op0 = op1, op1 = tem;
32131a9c 7836
5c6b1bd2 7837 gen_reload (out, op0, opnum, type);
39b56c2a 7838
5e03c156 7839 /* If OP0 and OP1 are the same, we can use OUT for OP1.
39b56c2a
RK
7840 This fixes a problem on the 32K where the stack pointer cannot
7841 be used as an operand of an add insn. */
7842
7843 if (rtx_equal_p (op0, op1))
5e03c156 7844 op1 = out;
39b56c2a 7845
5e03c156 7846 insn = emit_insn (gen_add2_insn (out, op1));
c77c9766
RK
7847
7848 /* If that failed, copy the address register to the reload register.
0f41302f 7849 Then add the constant to the reload register. */
c77c9766
RK
7850
7851 code = recog_memoized (insn);
7852
7853 if (code >= 0)
7854 {
0eadeb15 7855 extract_insn (insn);
c77c9766
RK
7856 /* We want constrain operands to treat this insn strictly in
7857 its validity determination, i.e., the way it would after reload
7858 has completed. */
0eadeb15 7859 if (constrain_operands (1))
4117a96b
R
7860 {
7861 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7862 REG_NOTES (insn)
9e6a5703 7863 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
4117a96b
R
7864 return insn;
7865 }
c77c9766
RK
7866 }
7867
7868 delete_insns_since (last);
7869
5c6b1bd2 7870 gen_reload (out, op1, opnum, type);
4117a96b 7871 insn = emit_insn (gen_add2_insn (out, op0));
9e6a5703 7872 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
32131a9c
RK
7873 }
7874
0dadecf6
RK
7875#ifdef SECONDARY_MEMORY_NEEDED
7876 /* If we need a memory location to do the move, do it that way. */
7877 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
5e03c156 7878 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
0dadecf6 7879 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
5e03c156
RK
7880 REGNO_REG_CLASS (REGNO (out)),
7881 GET_MODE (out)))
0dadecf6
RK
7882 {
7883 /* Get the memory to use and rewrite both registers to its mode. */
5e03c156 7884 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
0dadecf6 7885
5e03c156 7886 if (GET_MODE (loc) != GET_MODE (out))
38a448ca 7887 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
0dadecf6
RK
7888
7889 if (GET_MODE (loc) != GET_MODE (in))
38a448ca 7890 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
0dadecf6 7891
5c6b1bd2
RK
7892 gen_reload (loc, in, opnum, type);
7893 gen_reload (out, loc, opnum, type);
0dadecf6
RK
7894 }
7895#endif
7896
32131a9c
RK
7897 /* If IN is a simple operand, use gen_move_insn. */
7898 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
5e03c156 7899 emit_insn (gen_move_insn (out, in));
32131a9c
RK
7900
7901#ifdef HAVE_reload_load_address
7902 else if (HAVE_reload_load_address)
5e03c156 7903 emit_insn (gen_reload_load_address (out, in));
32131a9c
RK
7904#endif
7905
5e03c156 7906 /* Otherwise, just write (set OUT IN) and hope for the best. */
32131a9c 7907 else
38a448ca 7908 emit_insn (gen_rtx_SET (VOIDmode, out, in));
32131a9c
RK
7909
7910 /* Return the first insn emitted.
546b63fb 7911 We can not just return get_last_insn, because there may have
32131a9c
RK
7912 been multiple instructions emitted. Also note that gen_move_insn may
7913 emit more than one insn itself, so we can not assume that there is one
7914 insn emitted per emit_insn_before call. */
7915
546b63fb 7916 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
7917}
7918\f
7919/* Delete a previously made output-reload
7920 whose result we now believe is not needed.
7921 First we double-check.
7922
7923 INSN is the insn now being processed.
cb2afeb3
R
7924 LAST_RELOAD_REG is the hard register number for which we want to delete
7925 the last output reload.
7926 J is the reload-number that originally used REG. The caller has made
7927 certain that reload J doesn't use REG any longer for input. */
32131a9c
RK
7928
7929static void
cb2afeb3 7930delete_output_reload (insn, j, last_reload_reg)
32131a9c
RK
7931 rtx insn;
7932 int j;
cb2afeb3 7933 int last_reload_reg;
32131a9c 7934{
cb2afeb3
R
7935 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7936 rtx reg = spill_reg_stored_to[last_reload_reg];
7937 int k;
7938 int n_occurrences;
7939 int n_inherited = 0;
32131a9c 7940 register rtx i1;
cb2afeb3 7941 rtx substed;
05d10675 7942
32131a9c
RK
7943 /* Get the raw pseudo-register referred to. */
7944
32131a9c
RK
7945 while (GET_CODE (reg) == SUBREG)
7946 reg = SUBREG_REG (reg);
cb2afeb3
R
7947 substed = reg_equiv_memory_loc[REGNO (reg)];
7948
7949 /* This is unsafe if the operand occurs more often in the current
7950 insn than it is inherited. */
7951 for (k = n_reloads - 1; k >= 0; k--)
7952 {
eceef4c9 7953 rtx reg2 = rld[k].in;
cb2afeb3
R
7954 if (! reg2)
7955 continue;
7956 if (GET_CODE (reg2) == MEM || reload_override_in[k])
eceef4c9 7957 reg2 = rld[k].in_reg;
cb2afeb3 7958#ifdef AUTO_INC_DEC
eceef4c9
BS
7959 if (rld[k].out && ! rld[k].out_reg)
7960 reg2 = XEXP (rld[k].in_reg, 0);
cb2afeb3
R
7961#endif
7962 while (GET_CODE (reg2) == SUBREG)
7963 reg2 = SUBREG_REG (reg2);
7964 if (rtx_equal_p (reg2, reg))
2eb6dac7
AS
7965 {
7966 if (reload_inherited[k] || reload_override_in[k] || k == j)
7967 {
cb2afeb3 7968 n_inherited++;
eceef4c9 7969 reg2 = rld[k].out_reg;
2eb6dac7
AS
7970 if (! reg2)
7971 continue;
7972 while (GET_CODE (reg2) == SUBREG)
7973 reg2 = XEXP (reg2, 0);
7974 if (rtx_equal_p (reg2, reg))
7975 n_inherited++;
7976 }
7977 else
7978 return;
7979 }
cb2afeb3
R
7980 }
7981 n_occurrences = count_occurrences (PATTERN (insn), reg);
7982 if (substed)
7983 n_occurrences += count_occurrences (PATTERN (insn), substed);
7984 if (n_occurrences > n_inherited)
7985 return;
32131a9c
RK
7986
7987 /* If the pseudo-reg we are reloading is no longer referenced
7988 anywhere between the store into it and here,
7989 and no jumps or labels intervene, then the value can get
7990 here through the reload reg alone.
7991 Otherwise, give up--return. */
7992 for (i1 = NEXT_INSN (output_reload_insn);
7993 i1 != insn; i1 = NEXT_INSN (i1))
7994 {
7995 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7996 return;
7997 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7998 && reg_mentioned_p (reg, PATTERN (i1)))
aa6498c2 7999 {
cb2afeb3
R
8000 /* If this is USE in front of INSN, we only have to check that
8001 there are no more references than accounted for by inheritance. */
8002 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
aa6498c2 8003 {
cb2afeb3 8004 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
aa6498c2
R
8005 i1 = NEXT_INSN (i1);
8006 }
cb2afeb3 8007 if (n_occurrences <= n_inherited && i1 == insn)
aa6498c2
R
8008 break;
8009 return;
8010 }
32131a9c
RK
8011 }
8012
aa6498c2
R
8013 /* The caller has already checked that REG dies or is set in INSN.
8014 It has also checked that we are optimizing, and thus some inaccurancies
8015 in the debugging information are acceptable.
8016 So we could just delete output_reload_insn.
8017 But in some cases we can improve the debugging information without
8018 sacrificing optimization - maybe even improving the code:
8019 See if the pseudo reg has been completely replaced
32131a9c
RK
8020 with reload regs. If so, delete the store insn
8021 and forget we had a stack slot for the pseudo. */
eceef4c9 8022 if (rld[j].out != rld[j].in
aa6498c2 8023 && REG_N_DEATHS (REGNO (reg)) == 1
a3a24aa6 8024 && REG_N_SETS (REGNO (reg)) == 1
aa6498c2
R
8025 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
8026 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
32131a9c
RK
8027 {
8028 rtx i2;
8029
8030 /* We know that it was used only between here
8031 and the beginning of the current basic block.
8032 (We also know that the last use before INSN was
8033 the output reload we are thinking of deleting, but never mind that.)
8034 Search that range; see if any ref remains. */
8035 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8036 {
d445b551
RK
8037 rtx set = single_set (i2);
8038
32131a9c
RK
8039 /* Uses which just store in the pseudo don't count,
8040 since if they are the only uses, they are dead. */
d445b551 8041 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
8042 continue;
8043 if (GET_CODE (i2) == CODE_LABEL
8044 || GET_CODE (i2) == JUMP_INSN)
8045 break;
8046 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
8047 && reg_mentioned_p (reg, PATTERN (i2)))
aa6498c2
R
8048 {
8049 /* Some other ref remains; just delete the output reload we
8050 know to be dead. */
cb2afeb3
R
8051 delete_address_reloads (output_reload_insn, insn);
8052 PUT_CODE (output_reload_insn, NOTE);
8053 NOTE_SOURCE_FILE (output_reload_insn) = 0;
8054 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
aa6498c2
R
8055 return;
8056 }
32131a9c
RK
8057 }
8058
8059 /* Delete the now-dead stores into this pseudo. */
8060 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8061 {
d445b551
RK
8062 rtx set = single_set (i2);
8063
8064 if (set != 0 && SET_DEST (set) == reg)
5507b94b 8065 {
cb2afeb3 8066 delete_address_reloads (i2, insn);
5507b94b
RK
8067 /* This might be a basic block head,
8068 thus don't use delete_insn. */
8069 PUT_CODE (i2, NOTE);
8070 NOTE_SOURCE_FILE (i2) = 0;
8071 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
8072 }
32131a9c
RK
8073 if (GET_CODE (i2) == CODE_LABEL
8074 || GET_CODE (i2) == JUMP_INSN)
8075 break;
8076 }
8077
8078 /* For the debugging info,
8079 say the pseudo lives in this reload reg. */
eceef4c9 8080 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
32131a9c
RK
8081 alter_reg (REGNO (reg), -1);
8082 }
cb2afeb3
R
8083 delete_address_reloads (output_reload_insn, insn);
8084 PUT_CODE (output_reload_insn, NOTE);
8085 NOTE_SOURCE_FILE (output_reload_insn) = 0;
8086 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
8087
8088}
8089
8090/* We are going to delete DEAD_INSN. Recursively delete loads of
8091 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8092 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8093static void
8094delete_address_reloads (dead_insn, current_insn)
8095 rtx dead_insn, current_insn;
8096{
8097 rtx set = single_set (dead_insn);
8098 rtx set2, dst, prev, next;
8099 if (set)
8100 {
8101 rtx dst = SET_DEST (set);
8102 if (GET_CODE (dst) == MEM)
8103 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8104 }
8105 /* If we deleted the store from a reloaded post_{in,de}c expression,
8106 we can delete the matching adds. */
8107 prev = PREV_INSN (dead_insn);
8108 next = NEXT_INSN (dead_insn);
8109 if (! prev || ! next)
8110 return;
8111 set = single_set (next);
8112 set2 = single_set (prev);
8113 if (! set || ! set2
8114 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8115 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8116 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8117 return;
8118 dst = SET_DEST (set);
8119 if (! rtx_equal_p (dst, SET_DEST (set2))
8120 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8121 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8122 || (INTVAL (XEXP (SET_SRC (set), 1))
8123 != - INTVAL (XEXP (SET_SRC (set2), 1))))
8124 return;
8125 delete_insn (prev);
8126 delete_insn (next);
8127}
8128
8129/* Subfunction of delete_address_reloads: process registers found in X. */
8130static void
8131delete_address_reloads_1 (dead_insn, x, current_insn)
8132 rtx dead_insn, x, current_insn;
8133{
8134 rtx prev, set, dst, i2;
8135 int i, j;
8136 enum rtx_code code = GET_CODE (x);
8137
8138 if (code != REG)
8139 {
6f7d635c 8140 const char *fmt= GET_RTX_FORMAT (code);
cb2afeb3
R
8141 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8142 {
8143 if (fmt[i] == 'e')
8144 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8145 else if (fmt[i] == 'E')
8146 {
8147 for (j = XVECLEN (x, i) - 1; j >=0; j--)
8148 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8149 current_insn);
8150 }
8151 }
8152 return;
8153 }
8154
8155 if (spill_reg_order[REGNO (x)] < 0)
8156 return;
aa6498c2 8157
cb2afeb3
R
8158 /* Scan backwards for the insn that sets x. This might be a way back due
8159 to inheritance. */
8160 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8161 {
8162 code = GET_CODE (prev);
8163 if (code == CODE_LABEL || code == JUMP_INSN)
8164 return;
8165 if (GET_RTX_CLASS (code) != 'i')
8166 continue;
8167 if (reg_set_p (x, PATTERN (prev)))
8168 break;
8169 if (reg_referenced_p (x, PATTERN (prev)))
8170 return;
8171 }
8172 if (! prev || INSN_UID (prev) < reload_first_uid)
8173 return;
8174 /* Check that PREV only sets the reload register. */
8175 set = single_set (prev);
8176 if (! set)
8177 return;
8178 dst = SET_DEST (set);
8179 if (GET_CODE (dst) != REG
8180 || ! rtx_equal_p (dst, x))
8181 return;
8182 if (! reg_set_p (dst, PATTERN (dead_insn)))
8183 {
8184 /* Check if DST was used in a later insn -
8185 it might have been inherited. */
8186 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8187 {
8188 if (GET_CODE (i2) == CODE_LABEL)
8189 break;
8190 if (GET_RTX_CLASS (GET_CODE (i2)) != 'i')
8191 continue;
8192 if (reg_referenced_p (dst, PATTERN (i2)))
8193 {
8194 /* If there is a reference to the register in the current insn,
8195 it might be loaded in a non-inherited reload. If no other
8196 reload uses it, that means the register is set before
8197 referenced. */
8198 if (i2 == current_insn)
8199 {
8200 for (j = n_reloads - 1; j >= 0; j--)
eceef4c9 8201 if ((rld[j].reg_rtx == dst && reload_inherited[j])
cb2afeb3
R
8202 || reload_override_in[j] == dst)
8203 return;
8204 for (j = n_reloads - 1; j >= 0; j--)
eceef4c9 8205 if (rld[j].in && rld[j].reg_rtx == dst)
cb2afeb3
R
8206 break;
8207 if (j >= 0)
8208 break;
8209 }
8210 return;
8211 }
8212 if (GET_CODE (i2) == JUMP_INSN)
8213 break;
cb2afeb3 8214 /* If DST is still live at CURRENT_INSN, check if it is used for
3900dc09
R
8215 any reload. Note that even if CURRENT_INSN sets DST, we still
8216 have to check the reloads. */
cb2afeb3
R
8217 if (i2 == current_insn)
8218 {
8219 for (j = n_reloads - 1; j >= 0; j--)
eceef4c9 8220 if ((rld[j].reg_rtx == dst && reload_inherited[j])
cb2afeb3
R
8221 || reload_override_in[j] == dst)
8222 return;
8223 /* ??? We can't finish the loop here, because dst might be
8224 allocated to a pseudo in this block if no reload in this
8225 block needs any of the clsses containing DST - see
8226 spill_hard_reg. There is no easy way to tell this, so we
8227 have to scan till the end of the basic block. */
8228 }
3900dc09
R
8229 if (reg_set_p (dst, PATTERN (i2)))
8230 break;
cb2afeb3
R
8231 }
8232 }
8233 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8234 reg_reloaded_contents[REGNO (dst)] = -1;
8235 /* Can't use delete_insn here because PREV might be a basic block head. */
8236 PUT_CODE (prev, NOTE);
8237 NOTE_LINE_NUMBER (prev) = NOTE_INSN_DELETED;
8238 NOTE_SOURCE_FILE (prev) = 0;
32131a9c 8239}
32131a9c 8240\f
a8fdc208 8241/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 8242 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
8243 is a register or memory location;
8244 so reloading involves incrementing that location.
cb2afeb3 8245 IN is either identical to VALUE, or some cheaper place to reload from.
32131a9c
RK
8246
8247 INC_AMOUNT is the number to increment or decrement by (always positive).
cb2afeb3 8248 This cannot be deduced from VALUE.
32131a9c 8249
cb2afeb3
R
8250 Return the instruction that stores into RELOADREG. */
8251
8252static rtx
8253inc_for_reload (reloadreg, in, value, inc_amount)
32131a9c 8254 rtx reloadreg;
cb2afeb3 8255 rtx in, value;
32131a9c 8256 int inc_amount;
32131a9c
RK
8257{
8258 /* REG or MEM to be copied and incremented. */
8259 rtx incloc = XEXP (value, 0);
8260 /* Nonzero if increment after copying. */
8261 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 8262 rtx last;
0009eff2
RK
8263 rtx inc;
8264 rtx add_insn;
8265 int code;
cb2afeb3
R
8266 rtx store;
8267 rtx real_in = in == value ? XEXP (in, 0) : in;
32131a9c
RK
8268
8269 /* No hard register is equivalent to this register after
8270 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
8271 we could inc/dec that register as well (maybe even using it for
8272 the source), but I'm not sure it's worth worrying about. */
8273 if (GET_CODE (incloc) == REG)
8274 reg_last_reload_reg[REGNO (incloc)] = 0;
8275
8276 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8277 inc_amount = - inc_amount;
8278
fb3821f7 8279 inc = GEN_INT (inc_amount);
0009eff2
RK
8280
8281 /* If this is post-increment, first copy the location to the reload reg. */
cb2afeb3
R
8282 if (post && real_in != reloadreg)
8283 emit_insn (gen_move_insn (reloadreg, real_in));
0009eff2 8284
cb2afeb3
R
8285 if (in == value)
8286 {
8287 /* See if we can directly increment INCLOC. Use a method similar to
8288 that in gen_reload. */
0009eff2 8289
cb2afeb3
R
8290 last = get_last_insn ();
8291 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8292 gen_rtx_PLUS (GET_MODE (incloc),
8293 incloc, inc)));
05d10675 8294
cb2afeb3
R
8295 code = recog_memoized (add_insn);
8296 if (code >= 0)
32131a9c 8297 {
0eadeb15
BS
8298 extract_insn (add_insn);
8299 if (constrain_operands (1))
cb2afeb3
R
8300 {
8301 /* If this is a pre-increment and we have incremented the value
8302 where it lives, copy the incremented value to RELOADREG to
8303 be used as an address. */
0009eff2 8304
cb2afeb3
R
8305 if (! post)
8306 emit_insn (gen_move_insn (reloadreg, incloc));
546b63fb 8307
cb2afeb3
R
8308 return add_insn;
8309 }
32131a9c 8310 }
cb2afeb3 8311 delete_insns_since (last);
32131a9c 8312 }
0009eff2 8313
0009eff2
RK
8314 /* If couldn't do the increment directly, must increment in RELOADREG.
8315 The way we do this depends on whether this is pre- or post-increment.
8316 For pre-increment, copy INCLOC to the reload register, increment it
8317 there, then save back. */
8318
8319 if (! post)
8320 {
cb2afeb3
R
8321 if (in != reloadreg)
8322 emit_insn (gen_move_insn (reloadreg, real_in));
546b63fb 8323 emit_insn (gen_add2_insn (reloadreg, inc));
cb2afeb3 8324 store = emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 8325 }
32131a9c
RK
8326 else
8327 {
0009eff2
RK
8328 /* Postincrement.
8329 Because this might be a jump insn or a compare, and because RELOADREG
8330 may not be available after the insn in an input reload, we must do
8331 the incrementation before the insn being reloaded for.
8332
cb2afeb3 8333 We have already copied IN to RELOADREG. Increment the copy in
0009eff2
RK
8334 RELOADREG, save that back, then decrement RELOADREG so it has
8335 the original value. */
8336
546b63fb 8337 emit_insn (gen_add2_insn (reloadreg, inc));
cb2afeb3 8338 store = emit_insn (gen_move_insn (incloc, reloadreg));
546b63fb 8339 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 8340 }
0009eff2 8341
cb2afeb3 8342 return store;
32131a9c
RK
8343}
8344\f
8345/* Return 1 if we are certain that the constraint-string STRING allows
8346 the hard register REG. Return 0 if we can't be sure of this. */
8347
8348static int
8349constraint_accepts_reg_p (string, reg)
9b3142b3 8350 const char *string;
32131a9c
RK
8351 rtx reg;
8352{
8353 int value = 0;
8354 int regno = true_regnum (reg);
8355 int c;
8356
8357 /* Initialize for first alternative. */
8358 value = 0;
8359 /* Check that each alternative contains `g' or `r'. */
8360 while (1)
8361 switch (c = *string++)
8362 {
8363 case 0:
8364 /* If an alternative lacks `g' or `r', we lose. */
8365 return value;
8366 case ',':
8367 /* If an alternative lacks `g' or `r', we lose. */
8368 if (value == 0)
8369 return 0;
8370 /* Initialize for next alternative. */
8371 value = 0;
8372 break;
8373 case 'g':
8374 case 'r':
8375 /* Any general reg wins for this alternative. */
8376 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
8377 value = 1;
8378 break;
8379 default:
8380 /* Any reg in specified class wins for this alternative. */
8381 {
0009eff2 8382 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 8383
0009eff2 8384 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
8385 value = 1;
8386 }
8387 }
8388}
8389\f
d445b551
RK
8390/* Return the number of places FIND appears within X, but don't count
8391 an occurrence if some SET_DEST is FIND. */
32131a9c 8392
184bb750 8393int
32131a9c
RK
8394count_occurrences (x, find)
8395 register rtx x, find;
8396{
8397 register int i, j;
8398 register enum rtx_code code;
6f7d635c 8399 register const char *format_ptr;
32131a9c
RK
8400 int count;
8401
8402 if (x == find)
8403 return 1;
8404 if (x == 0)
8405 return 0;
8406
8407 code = GET_CODE (x);
8408
8409 switch (code)
8410 {
8411 case REG:
8412 case QUEUED:
8413 case CONST_INT:
8414 case CONST_DOUBLE:
8415 case SYMBOL_REF:
8416 case CODE_LABEL:
8417 case PC:
8418 case CC0:
8419 return 0;
d445b551 8420
cb2afeb3
R
8421 case MEM:
8422 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
8423 return 1;
8424 break;
d445b551
RK
8425 case SET:
8426 if (SET_DEST (x) == find)
8427 return count_occurrences (SET_SRC (x), find);
8428 break;
05d10675 8429
e9a25f70
JL
8430 default:
8431 break;
32131a9c
RK
8432 }
8433
8434 format_ptr = GET_RTX_FORMAT (code);
8435 count = 0;
8436
8437 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8438 {
8439 switch (*format_ptr++)
8440 {
8441 case 'e':
8442 count += count_occurrences (XEXP (x, i), find);
8443 break;
8444
8445 case 'E':
8446 if (XVEC (x, i) != NULL)
8447 {
8448 for (j = 0; j < XVECLEN (x, i); j++)
8449 count += count_occurrences (XVECEXP (x, i, j), find);
8450 }
8451 break;
8452 }
8453 }
8454 return count;
8455}
2a9fb548
ILT
8456\f
8457/* This array holds values which are equivalent to a hard register
8458 during reload_cse_regs. Each array element is an EXPR_LIST of
8459 values. Each time a hard register is set, we set the corresponding
8460 array element to the value. Each time a hard register is copied
8461 into memory, we add the memory location to the corresponding array
8462 element. We don't store values or memory addresses with side
8463 effects in this array.
8464
8465 If the value is a CONST_INT, then the mode of the containing
8466 EXPR_LIST is the mode in which that CONST_INT was referenced.
8467
8468 We sometimes clobber a specific entry in a list. In that case, we
8469 just set XEXP (list-entry, 0) to 0. */
8470
8471static rtx *reg_values;
8472
ba325eba
ILT
8473/* This is a preallocated REG rtx which we use as a temporary in
8474 reload_cse_invalidate_regno, so that we don't need to allocate a
8475 new one each time through a loop in that function. */
8476
8477static rtx invalidate_regno_rtx;
8478
2a9fb548
ILT
8479/* Invalidate any entries in reg_values which depend on REGNO,
8480 including those for REGNO itself. This is called if REGNO is
8481 changing. If CLOBBER is true, then always forget anything we
8482 currently know about REGNO. MODE is the mode of the assignment to
8483 REGNO, which is used to determine how many hard registers are being
8484 changed. If MODE is VOIDmode, then only REGNO is being changed;
8485 this is used when invalidating call clobbered registers across a
8486 call. */
8487
8488static void
8489reload_cse_invalidate_regno (regno, mode, clobber)
8490 int regno;
8491 enum machine_mode mode;
8492 int clobber;
8493{
8494 int endregno;
8495 register int i;
8496
8497 /* Our callers don't always go through true_regnum; we may see a
8498 pseudo-register here from a CLOBBER or the like. We probably
8499 won't ever see a pseudo-register that has a real register number,
8500 for we check anyhow for safety. */
8501 if (regno >= FIRST_PSEUDO_REGISTER)
8502 regno = reg_renumber[regno];
8503 if (regno < 0)
8504 return;
8505
8506 if (mode == VOIDmode)
8507 endregno = regno + 1;
8508 else
8509 endregno = regno + HARD_REGNO_NREGS (regno, mode);
8510
8511 if (clobber)
8512 for (i = regno; i < endregno; i++)
8513 reg_values[i] = 0;
8514
8515 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8516 {
8517 rtx x;
8518
8519 for (x = reg_values[i]; x; x = XEXP (x, 1))
8520 {
8521 if (XEXP (x, 0) != 0
9e148ceb 8522 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
2a9fb548
ILT
8523 {
8524 /* If this is the only entry on the list, clear
05d10675
BS
8525 reg_values[i]. Otherwise, just clear this entry on
8526 the list. */
2a9fb548
ILT
8527 if (XEXP (x, 1) == 0 && x == reg_values[i])
8528 {
8529 reg_values[i] = 0;
8530 break;
8531 }
8532 XEXP (x, 0) = 0;
8533 }
8534 }
8535 }
ba325eba
ILT
8536
8537 /* We must look at earlier registers, in case REGNO is part of a
8538 multi word value but is not the first register. If an earlier
8539 register has a value in a mode which overlaps REGNO, then we must
8540 invalidate that earlier register. Note that we do not need to
8541 check REGNO or later registers (we must not check REGNO itself,
8542 because we would incorrectly conclude that there was a conflict). */
8543
8544 for (i = 0; i < regno; i++)
8545 {
8546 rtx x;
8547
8548 for (x = reg_values[i]; x; x = XEXP (x, 1))
8549 {
8550 if (XEXP (x, 0) != 0)
8551 {
dbd7556e 8552 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
ba325eba
ILT
8553 REGNO (invalidate_regno_rtx) = i;
8554 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
8555 NULL_PTR))
8556 {
8557 reload_cse_invalidate_regno (i, VOIDmode, 1);
8558 break;
8559 }
8560 }
8561 }
8562 }
2a9fb548
ILT
8563}
8564
866aa3b6
DE
8565/* The memory at address MEM_BASE is being changed.
8566 Return whether this change will invalidate VAL. */
2a9fb548
ILT
8567
8568static int
cbfc3ad3 8569reload_cse_mem_conflict_p (mem_base, val)
2a9fb548 8570 rtx mem_base;
2a9fb548
ILT
8571 rtx val;
8572{
8573 enum rtx_code code;
6f7d635c 8574 const char *fmt;
2a9fb548
ILT
8575 int i;
8576
8577 code = GET_CODE (val);
8578 switch (code)
8579 {
8580 /* Get rid of a few simple cases quickly. */
8581 case REG:
2a9fb548
ILT
8582 case PC:
8583 case CC0:
8584 case SCRATCH:
8585 case CONST:
8586 case CONST_INT:
8587 case CONST_DOUBLE:
8588 case SYMBOL_REF:
8589 case LABEL_REF:
8590 return 0;
8591
8592 case MEM:
866aa3b6
DE
8593 if (GET_MODE (mem_base) == BLKmode
8594 || GET_MODE (val) == BLKmode)
8595 return 1;
e9a25f70
JL
8596 if (anti_dependence (val, mem_base))
8597 return 1;
8598 /* The address may contain nested MEMs. */
8599 break;
2a9fb548
ILT
8600
8601 default:
8602 break;
8603 }
8604
8605 fmt = GET_RTX_FORMAT (code);
8606
8607 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8608 {
8609 if (fmt[i] == 'e')
8610 {
cbfc3ad3 8611 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
2a9fb548
ILT
8612 return 1;
8613 }
8614 else if (fmt[i] == 'E')
8615 {
8616 int j;
8617
8618 for (j = 0; j < XVECLEN (val, i); j++)
cbfc3ad3 8619 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
2a9fb548
ILT
8620 return 1;
8621 }
8622 }
8623
8624 return 0;
8625}
8626
8627/* Invalidate any entries in reg_values which are changed because of a
8628 store to MEM_RTX. If this is called because of a non-const call
8629 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8630
8631static void
8632reload_cse_invalidate_mem (mem_rtx)
8633 rtx mem_rtx;
8634{
8635 register int i;
2a9fb548
ILT
8636
8637 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8638 {
8639 rtx x;
8640
8641 for (x = reg_values[i]; x; x = XEXP (x, 1))
8642 {
8643 if (XEXP (x, 0) != 0
cbfc3ad3 8644 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
2a9fb548
ILT
8645 {
8646 /* If this is the only entry on the list, clear
05d10675
BS
8647 reg_values[i]. Otherwise, just clear this entry on
8648 the list. */
2a9fb548
ILT
8649 if (XEXP (x, 1) == 0 && x == reg_values[i])
8650 {
8651 reg_values[i] = 0;
8652 break;
8653 }
8654 XEXP (x, 0) = 0;
8655 }
8656 }
8657 }
8658}
8659
8660/* Invalidate DEST, which is being assigned to or clobbered. The
8661 second parameter exists so that this function can be passed to
8662 note_stores; it is ignored. */
8663
8664static void
8665reload_cse_invalidate_rtx (dest, ignore)
8666 rtx dest;
487a6e06 8667 rtx ignore ATTRIBUTE_UNUSED;
2a9fb548
ILT
8668{
8669 while (GET_CODE (dest) == STRICT_LOW_PART
8670 || GET_CODE (dest) == SIGN_EXTRACT
8671 || GET_CODE (dest) == ZERO_EXTRACT
8672 || GET_CODE (dest) == SUBREG)
8673 dest = XEXP (dest, 0);
8674
8675 if (GET_CODE (dest) == REG)
8676 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8677 else if (GET_CODE (dest) == MEM)
8678 reload_cse_invalidate_mem (dest);
8679}
8680
8681/* Do a very simple CSE pass over the hard registers.
8682
8683 This function detects no-op moves where we happened to assign two
8684 different pseudo-registers to the same hard register, and then
8685 copied one to the other. Reload will generate a useless
8686 instruction copying a register to itself.
8687
8688 This function also detects cases where we load a value from memory
8689 into two different registers, and (if memory is more expensive than
8690 registers) changes it to simply copy the first register into the
05d10675 8691 second register.
e9a25f70
JL
8692
8693 Another optimization is performed that scans the operands of each
8694 instruction to see whether the value is already available in a
8695 hard register. It then replaces the operand with the hard register
8696 if possible, much like an optional reload would. */
2a9fb548 8697
5adf6da0
R
8698static void
8699reload_cse_regs_1 (first)
2a9fb548
ILT
8700 rtx first;
8701{
8702 char *firstobj;
8703 rtx callmem;
8704 register int i;
8705 rtx insn;
8706
cbfc3ad3
RK
8707 init_alias_analysis ();
8708
2a9fb548 8709 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
e016950d 8710 bzero ((char *)reg_values, FIRST_PSEUDO_REGISTER * sizeof (rtx));
2a9fb548
ILT
8711
8712 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8713 free them when we are done. */
8714 push_obstacks (&reload_obstack, &reload_obstack);
8715 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8716
8717 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8718 memory for a non-const call instruction. */
38a448ca 8719 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
2a9fb548 8720
ba325eba
ILT
8721 /* This is used in reload_cse_invalidate_regno to avoid consing a
8722 new REG in a loop in that function. */
38a448ca 8723 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
ba325eba 8724
2a9fb548
ILT
8725 for (insn = first; insn; insn = NEXT_INSN (insn))
8726 {
8727 rtx body;
8728
8729 if (GET_CODE (insn) == CODE_LABEL)
8730 {
8731 /* Forget all the register values at a code label. We don't
05d10675 8732 try to do anything clever around jumps. */
2a9fb548
ILT
8733 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8734 reg_values[i] = 0;
8735
8736 continue;
8737 }
8738
05d10675 8739#ifdef NON_SAVING_SETJMP
2a9fb548
ILT
8740 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8741 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8742 {
8743 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8744 reg_values[i] = 0;
8745
8746 continue;
8747 }
8748#endif
8749
8750 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8751 continue;
8752
8753 /* If this is a call instruction, forget anything stored in a
8754 call clobbered register, or, if this is not a const call, in
8755 memory. */
8756 if (GET_CODE (insn) == CALL_INSN)
8757 {
8758 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8759 if (call_used_regs[i])
8760 reload_cse_invalidate_regno (i, VOIDmode, 1);
8761
8762 if (! CONST_CALL_P (insn))
8763 reload_cse_invalidate_mem (callmem);
8764 }
8765
05d10675 8766
01e752d3
JL
8767 /* Forget all the register values at a volatile asm. */
8768 if (GET_CODE (insn) == INSN
8769 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
8770 && MEM_VOLATILE_P (PATTERN (insn)))
8771 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8772 reg_values[i] = 0;
8773
2a9fb548
ILT
8774 body = PATTERN (insn);
8775 if (GET_CODE (body) == SET)
8776 {
e9a25f70 8777 int count = 0;
31418d35 8778 if (reload_cse_noop_set_p (body, insn))
2a9fb548 8779 {
54e89d25
R
8780 /* If this sets the return value of the function, we must keep
8781 a USE around, in case this is in a different basic block
8782 than the final USE. Otherwise, we could loose important
8783 register lifeness information on SMALL_REGISTER_CLASSES
8784 machines, where return registers might be used as spills:
8785 subsequent passes assume that spill registers are dead at
8786 the end of a basic block. */
8787 if (REG_FUNCTION_VALUE_P (SET_DEST (body)))
8788 {
8789 pop_obstacks ();
8790 PATTERN (insn) = gen_rtx_USE (VOIDmode, SET_DEST (body));
8791 INSN_CODE (insn) = -1;
8792 REG_NOTES (insn) = NULL_RTX;
8793 push_obstacks (&reload_obstack, &reload_obstack);
8794 }
8795 else
8796 {
8797 PUT_CODE (insn, NOTE);
8798 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8799 NOTE_SOURCE_FILE (insn) = 0;
8800 }
2a9fb548
ILT
8801
8802 /* We're done with this insn. */
8803 continue;
8804 }
8805
e9a25f70 8806 /* It's not a no-op, but we can try to simplify it. */
e9a25f70
JL
8807 count += reload_cse_simplify_set (body, insn);
8808
6764d250
BS
8809 if (count > 0)
8810 apply_change_group ();
121315ea 8811 else
6764d250 8812 reload_cse_simplify_operands (insn);
05d10675 8813
2a9fb548
ILT
8814 reload_cse_record_set (body, body);
8815 }
8816 else if (GET_CODE (body) == PARALLEL)
8817 {
e9a25f70 8818 int count = 0;
54e89d25 8819 rtx value = NULL_RTX;
2a9fb548
ILT
8820
8821 /* If every action in a PARALLEL is a noop, we can delete
05d10675 8822 the entire PARALLEL. */
2a9fb548 8823 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
54e89d25
R
8824 {
8825 rtx part = XVECEXP (body, 0, i);
8826 if (GET_CODE (part) == SET)
8827 {
8828 if (! reload_cse_noop_set_p (part, insn))
8829 break;
8830 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8831 {
8832 if (value)
8833 break;
8834 value = SET_DEST (part);
8835 }
8836 }
8837 else if (GET_CODE (part) != CLOBBER)
8838 break;
8839 }
2a9fb548
ILT
8840 if (i < 0)
8841 {
54e89d25
R
8842 if (value)
8843 {
8844 pop_obstacks ();
8845 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8846 INSN_CODE (insn) = -1;
8847 REG_NOTES (insn) = NULL_RTX;
8848 push_obstacks (&reload_obstack, &reload_obstack);
8849 }
8850 else
8851 {
8852 PUT_CODE (insn, NOTE);
8853 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8854 NOTE_SOURCE_FILE (insn) = 0;
8855 }
2a9fb548
ILT
8856
8857 /* We're done with this insn. */
8858 continue;
8859 }
05d10675 8860
e9a25f70 8861 /* It's not a no-op, but we can try to simplify it. */
e9a25f70
JL
8862 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8863 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8864 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8865
6764d250
BS
8866 if (count > 0)
8867 apply_change_group ();
121315ea 8868 else
6764d250 8869 reload_cse_simplify_operands (insn);
2a9fb548
ILT
8870
8871 /* Look through the PARALLEL and record the values being
05d10675 8872 set, if possible. Also handle any CLOBBERs. */
2a9fb548
ILT
8873 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8874 {
8875 rtx x = XVECEXP (body, 0, i);
8876
8877 if (GET_CODE (x) == SET)
8878 reload_cse_record_set (x, body);
8879 else
8880 note_stores (x, reload_cse_invalidate_rtx);
8881 }
8882 }
8883 else
8884 note_stores (body, reload_cse_invalidate_rtx);
8885
8886#ifdef AUTO_INC_DEC
8887 /* Clobber any registers which appear in REG_INC notes. We
05d10675
BS
8888 could keep track of the changes to their values, but it is
8889 unlikely to help. */
2a9fb548
ILT
8890 {
8891 rtx x;
8892
8893 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8894 if (REG_NOTE_KIND (x) == REG_INC)
8895 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8896 }
8897#endif
8898
8899 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
05d10675 8900 after we have processed the insn. */
2a9fb548
ILT
8901 if (GET_CODE (insn) == CALL_INSN)
8902 {
8903 rtx x;
8904
8905 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8906 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8907 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8908 }
8909 }
8910
8911 /* Free all the temporary structures we created, and go back to the
8912 regular obstacks. */
8913 obstack_free (&reload_obstack, firstobj);
8914 pop_obstacks ();
8915}
8916
5adf6da0
R
8917/* Call cse / combine like post-reload optimization phases.
8918 FIRST is the first instruction. */
8919void
8920reload_cse_regs (first)
8921 rtx first;
8922{
8923 reload_cse_regs_1 (first);
8924 reload_combine ();
8925 reload_cse_move2add (first);
8926 if (flag_expensive_optimizations)
8927 reload_cse_regs_1 (first);
8928}
8929
2a9fb548
ILT
8930/* Return whether the values known for REGNO are equal to VAL. MODE
8931 is the mode of the object that VAL is being copied to; this matters
8932 if VAL is a CONST_INT. */
8933
8934static int
8935reload_cse_regno_equal_p (regno, val, mode)
8936 int regno;
8937 rtx val;
8938 enum machine_mode mode;
8939{
8940 rtx x;
8941
8942 if (val == 0)
8943 return 0;
8944
8945 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8946 if (XEXP (x, 0) != 0
8947 && rtx_equal_p (XEXP (x, 0), val)
bb173ade
RK
8948 && (! flag_float_store || GET_CODE (XEXP (x, 0)) != MEM
8949 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2a9fb548
ILT
8950 && (GET_CODE (val) != CONST_INT
8951 || mode == GET_MODE (x)
8952 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
6e848450
RK
8953 /* On a big endian machine if the value spans more than
8954 one register then this register holds the high part of
8955 it and we can't use it.
8956
8957 ??? We should also compare with the high part of the
8958 value. */
8959 && !(WORDS_BIG_ENDIAN
8960 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
2a9fb548
ILT
8961 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8962 GET_MODE_BITSIZE (GET_MODE (x))))))
8963 return 1;
8964
8965 return 0;
8966}
8967
31418d35
ILT
8968/* See whether a single set is a noop. SET is the set instruction we
8969 are should check, and INSN is the instruction from which it came. */
2a9fb548
ILT
8970
8971static int
31418d35 8972reload_cse_noop_set_p (set, insn)
2a9fb548 8973 rtx set;
6a651371 8974 rtx insn ATTRIBUTE_UNUSED;
2a9fb548
ILT
8975{
8976 rtx src, dest;
8977 enum machine_mode dest_mode;
8978 int dreg, sreg;
31418d35 8979 int ret;
2a9fb548
ILT
8980
8981 src = SET_SRC (set);
8982 dest = SET_DEST (set);
8983 dest_mode = GET_MODE (dest);
8984
8985 if (side_effects_p (src))
8986 return 0;
8987
8988 dreg = true_regnum (dest);
8989 sreg = true_regnum (src);
8990
31418d35
ILT
8991 /* Check for setting a register to itself. In this case, we don't
8992 have to worry about REG_DEAD notes. */
8993 if (dreg >= 0 && dreg == sreg)
8994 return 1;
8995
8996 ret = 0;
2a9fb548
ILT
8997 if (dreg >= 0)
8998 {
8999 /* Check for setting a register to itself. */
9000 if (dreg == sreg)
31418d35 9001 ret = 1;
2a9fb548
ILT
9002
9003 /* Check for setting a register to a value which we already know
05d10675 9004 is in the register. */
31418d35
ILT
9005 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
9006 ret = 1;
2a9fb548
ILT
9007
9008 /* Check for setting a register DREG to another register SREG
05d10675 9009 where SREG is equal to a value which is already in DREG. */
31418d35 9010 else if (sreg >= 0)
2a9fb548
ILT
9011 {
9012 rtx x;
9013
9014 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
31418d35 9015 {
99c2b71f
ILT
9016 rtx tmp;
9017
9018 if (XEXP (x, 0) == 0)
9019 continue;
9020
9021 if (dest_mode == GET_MODE (x))
9022 tmp = XEXP (x, 0);
9023 else if (GET_MODE_BITSIZE (dest_mode)
9024 < GET_MODE_BITSIZE (GET_MODE (x)))
9025 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9026 else
9027 continue;
9028
9029 if (tmp
9030 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
31418d35
ILT
9031 {
9032 ret = 1;
9033 break;
9034 }
9035 }
2a9fb548
ILT
9036 }
9037 }
9038 else if (GET_CODE (dest) == MEM)
9039 {
9040 /* Check for storing a register to memory when we know that the
05d10675 9041 register is equivalent to the memory location. */
2a9fb548
ILT
9042 if (sreg >= 0
9043 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
9044 && ! side_effects_p (dest))
31418d35 9045 ret = 1;
2a9fb548
ILT
9046 }
9047
31418d35 9048 return ret;
2a9fb548
ILT
9049}
9050
9051/* Try to simplify a single SET instruction. SET is the set pattern.
e9a25f70
JL
9052 INSN is the instruction it came from.
9053 This function only handles one case: if we set a register to a value
9054 which is not a register, we try to find that value in some other register
9055 and change the set into a register copy. */
2a9fb548 9056
e9a25f70 9057static int
2a9fb548
ILT
9058reload_cse_simplify_set (set, insn)
9059 rtx set;
9060 rtx insn;
9061{
9062 int dreg;
9063 rtx src;
9064 enum machine_mode dest_mode;
9065 enum reg_class dclass;
9066 register int i;
9067
2a9fb548
ILT
9068 dreg = true_regnum (SET_DEST (set));
9069 if (dreg < 0)
e9a25f70 9070 return 0;
2a9fb548
ILT
9071
9072 src = SET_SRC (set);
9073 if (side_effects_p (src) || true_regnum (src) >= 0)
e9a25f70 9074 return 0;
2a9fb548 9075
cbd5b9a2
KR
9076 dclass = REGNO_REG_CLASS (dreg);
9077
33ab8de0 9078 /* If memory loads are cheaper than register copies, don't change them. */
cbd5b9a2
KR
9079 if (GET_CODE (src) == MEM
9080 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
e9a25f70 9081 return 0;
2a9fb548 9082
0254c561
JC
9083 /* If the constant is cheaper than a register, don't change it. */
9084 if (CONSTANT_P (src)
9085 && rtx_cost (src, SET) < 2)
9086 return 0;
9087
2a9fb548 9088 dest_mode = GET_MODE (SET_DEST (set));
2a9fb548
ILT
9089 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9090 {
9091 if (i != dreg
9092 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
9093 && reload_cse_regno_equal_p (i, src, dest_mode))
9094 {
9095 int validated;
9096
9097 /* Pop back to the real obstacks while changing the insn. */
9098 pop_obstacks ();
9099
9100 validated = validate_change (insn, &SET_SRC (set),
38a448ca 9101 gen_rtx_REG (dest_mode, i), 1);
2a9fb548
ILT
9102
9103 /* Go back to the obstack we are using for temporary
05d10675 9104 storage. */
2a9fb548
ILT
9105 push_obstacks (&reload_obstack, &reload_obstack);
9106
6764d250
BS
9107 if (validated)
9108 return 1;
e9a25f70
JL
9109 }
9110 }
9111 return 0;
9112}
9113
9114/* Try to replace operands in INSN with equivalent values that are already
05d10675
BS
9115 in registers. This can be viewed as optional reloading.
9116
e9a25f70
JL
9117 For each non-register operand in the insn, see if any hard regs are
9118 known to be equivalent to that operand. Record the alternatives which
9119 can accept these hard registers. Among all alternatives, select the
9120 ones which are better or equal to the one currently matching, where
9121 "better" is in terms of '?' and '!' constraints. Among the remaining
9122 alternatives, select the one which replaces most operands with
9123 hard registers. */
9124
9125static int
9126reload_cse_simplify_operands (insn)
9127 rtx insn;
9128{
9129#ifdef REGISTER_CONSTRAINTS
e9a25f70
JL
9130 int i,j;
9131
9b3142b3 9132 const char *constraints[MAX_RECOG_OPERANDS];
05d10675 9133
e9a25f70
JL
9134 /* Vector recording how bad an alternative is. */
9135 int *alternative_reject;
9136 /* Vector recording how many registers can be introduced by choosing
9137 this alternative. */
9138 int *alternative_nregs;
9139 /* Array of vectors recording, for each operand and each alternative,
9140 which hard register to substitute, or -1 if the operand should be
9141 left as it is. */
9142 int *op_alt_regno[MAX_RECOG_OPERANDS];
9143 /* Array of alternatives, sorted in order of decreasing desirability. */
9144 int *alternative_order;
0254c561 9145 rtx reg = gen_rtx_REG (VOIDmode, -1);
05d10675 9146
0eadeb15 9147 extract_insn (insn);
e9a25f70 9148
1ccbefce 9149 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
1d300e19 9150 return 0;
e9a25f70
JL
9151
9152 /* Figure out which alternative currently matches. */
0eadeb15 9153 if (! constrain_operands (1))
b8705408 9154 fatal_insn_not_found (insn);
e9a25f70 9155
1ccbefce
RH
9156 alternative_reject = (int *) alloca (recog_data.n_alternatives * sizeof (int));
9157 alternative_nregs = (int *) alloca (recog_data.n_alternatives * sizeof (int));
9158 alternative_order = (int *) alloca (recog_data.n_alternatives * sizeof (int));
9159 bzero ((char *)alternative_reject, recog_data.n_alternatives * sizeof (int));
9160 bzero ((char *)alternative_nregs, recog_data.n_alternatives * sizeof (int));
e9a25f70 9161
1ccbefce 9162 for (i = 0; i < recog_data.n_operands; i++)
e9a25f70
JL
9163 {
9164 enum machine_mode mode;
9165 int regno;
9b3142b3 9166 const char *p;
e9a25f70 9167
1ccbefce
RH
9168 op_alt_regno[i] = (int *) alloca (recog_data.n_alternatives * sizeof (int));
9169 for (j = 0; j < recog_data.n_alternatives; j++)
e9a25f70
JL
9170 op_alt_regno[i][j] = -1;
9171
1ccbefce
RH
9172 p = constraints[i] = recog_data.constraints[i];
9173 mode = recog_data.operand_mode[i];
e9a25f70
JL
9174
9175 /* Add the reject values for each alternative given by the constraints
9176 for this operand. */
9177 j = 0;
9178 while (*p != '\0')
9179 {
9180 char c = *p++;
9181 if (c == ',')
9182 j++;
9183 else if (c == '?')
9184 alternative_reject[j] += 3;
9185 else if (c == '!')
9186 alternative_reject[j] += 300;
9187 }
9188
9189 /* We won't change operands which are already registers. We
9190 also don't want to modify output operands. */
1ccbefce 9191 regno = true_regnum (recog_data.operand[i]);
e9a25f70
JL
9192 if (regno >= 0
9193 || constraints[i][0] == '='
9194 || constraints[i][0] == '+')
9195 continue;
9196
9197 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9198 {
9199 int class = (int) NO_REGS;
9200
1ccbefce 9201 if (! reload_cse_regno_equal_p (regno, recog_data.operand[i], mode))
e9a25f70
JL
9202 continue;
9203
0254c561
JC
9204 REGNO (reg) = regno;
9205 PUT_MODE (reg, mode);
9206
e9a25f70
JL
9207 /* We found a register equal to this operand. Now look for all
9208 alternatives that can accept this register and have not been
9209 assigned a register they can use yet. */
9210 j = 0;
9211 p = constraints[i];
9212 for (;;)
31418d35 9213 {
e9a25f70 9214 char c = *p++;
05d10675 9215
e9a25f70 9216 switch (c)
31418d35 9217 {
e9a25f70
JL
9218 case '=': case '+': case '?':
9219 case '#': case '&': case '!':
05d10675 9220 case '*': case '%':
e9a25f70 9221 case '0': case '1': case '2': case '3': case '4':
c5c76735 9222 case '5': case '6': case '7': case '8': case '9':
e9a25f70
JL
9223 case 'm': case '<': case '>': case 'V': case 'o':
9224 case 'E': case 'F': case 'G': case 'H':
9225 case 's': case 'i': case 'n':
9226 case 'I': case 'J': case 'K': case 'L':
9227 case 'M': case 'N': case 'O': case 'P':
9228#ifdef EXTRA_CONSTRAINT
9229 case 'Q': case 'R': case 'S': case 'T': case 'U':
9230#endif
9231 case 'p': case 'X':
9232 /* These don't say anything we care about. */
9233 break;
9234
9235 case 'g': case 'r':
9236 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
9237 break;
9238
9239 default:
9240 class
e51712db 9241 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
e9a25f70 9242 break;
31418d35 9243
e9a25f70
JL
9244 case ',': case '\0':
9245 /* See if REGNO fits this alternative, and set it up as the
9246 replacement register if we don't have one for this
0254c561
JC
9247 alternative yet and the operand being replaced is not
9248 a cheap CONST_INT. */
e9a25f70 9249 if (op_alt_regno[i][j] == -1
0254c561 9250 && reg_fits_class_p (reg, class, 0, mode)
1ccbefce
RH
9251 && (GET_CODE (recog_data.operand[i]) != CONST_INT
9252 || (rtx_cost (recog_data.operand[i], SET)
9253 > rtx_cost (reg, SET))))
31418d35 9254 {
e9a25f70
JL
9255 alternative_nregs[j]++;
9256 op_alt_regno[i][j] = regno;
31418d35 9257 }
e9a25f70
JL
9258 j++;
9259 break;
31418d35
ILT
9260 }
9261
e9a25f70
JL
9262 if (c == '\0')
9263 break;
9264 }
9265 }
9266 }
9267
9268 /* Record all alternatives which are better or equal to the currently
9269 matching one in the alternative_order array. */
1ccbefce 9270 for (i = j = 0; i < recog_data.n_alternatives; i++)
e9a25f70
JL
9271 if (alternative_reject[i] <= alternative_reject[which_alternative])
9272 alternative_order[j++] = i;
1ccbefce 9273 recog_data.n_alternatives = j;
e9a25f70
JL
9274
9275 /* Sort it. Given a small number of alternatives, a dumb algorithm
9276 won't hurt too much. */
1ccbefce 9277 for (i = 0; i < recog_data.n_alternatives - 1; i++)
e9a25f70
JL
9278 {
9279 int best = i;
9280 int best_reject = alternative_reject[alternative_order[i]];
9281 int best_nregs = alternative_nregs[alternative_order[i]];
9282 int tmp;
9283
1ccbefce 9284 for (j = i + 1; j < recog_data.n_alternatives; j++)
e9a25f70
JL
9285 {
9286 int this_reject = alternative_reject[alternative_order[j]];
9287 int this_nregs = alternative_nregs[alternative_order[j]];
9288
9289 if (this_reject < best_reject
9290 || (this_reject == best_reject && this_nregs < best_nregs))
9291 {
9292 best = j;
9293 best_reject = this_reject;
9294 best_nregs = this_nregs;
31418d35 9295 }
2a9fb548 9296 }
05d10675 9297
e9a25f70
JL
9298 tmp = alternative_order[best];
9299 alternative_order[best] = alternative_order[i];
9300 alternative_order[i] = tmp;
9301 }
05d10675 9302
e9a25f70
JL
9303 /* Substitute the operands as determined by op_alt_regno for the best
9304 alternative. */
9305 j = alternative_order[0];
e9a25f70
JL
9306
9307 /* Pop back to the real obstacks while changing the insn. */
9308 pop_obstacks ();
9309
1ccbefce 9310 for (i = 0; i < recog_data.n_operands; i++)
e9a25f70 9311 {
1ccbefce 9312 enum machine_mode mode = recog_data.operand_mode[i];
e9a25f70
JL
9313 if (op_alt_regno[i][j] == -1)
9314 continue;
9315
1ccbefce 9316 validate_change (insn, recog_data.operand_loc[i],
38a448ca 9317 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
e9a25f70
JL
9318 }
9319
1ccbefce 9320 for (i = recog_data.n_dups - 1; i >= 0; i--)
e9a25f70 9321 {
1ccbefce
RH
9322 int op = recog_data.dup_num[i];
9323 enum machine_mode mode = recog_data.operand_mode[op];
e9a25f70
JL
9324
9325 if (op_alt_regno[op][j] == -1)
9326 continue;
9327
1ccbefce 9328 validate_change (insn, recog_data.dup_loc[i],
38a448ca 9329 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
2a9fb548 9330 }
e9a25f70
JL
9331
9332 /* Go back to the obstack we are using for temporary
9333 storage. */
9334 push_obstacks (&reload_obstack, &reload_obstack);
9335
9336 return apply_change_group ();
9337#else
9338 return 0;
9339#endif
2a9fb548
ILT
9340}
9341
9342/* These two variables are used to pass information from
9343 reload_cse_record_set to reload_cse_check_clobber. */
9344
9345static int reload_cse_check_clobbered;
9346static rtx reload_cse_check_src;
9347
9348/* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
9349 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
9350 second argument, which is passed by note_stores, is ignored. */
9351
9352static void
9353reload_cse_check_clobber (dest, ignore)
9354 rtx dest;
487a6e06 9355 rtx ignore ATTRIBUTE_UNUSED;
2a9fb548
ILT
9356{
9357 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
9358 reload_cse_check_clobbered = 1;
9359}
9360
9361/* Record the result of a SET instruction. SET is the set pattern.
9362 BODY is the pattern of the insn that it came from. */
9363
9364static void
9365reload_cse_record_set (set, body)
9366 rtx set;
9367 rtx body;
9368{
9e148ceb 9369 rtx dest, src, x;
2a9fb548
ILT
9370 int dreg, sreg;
9371 enum machine_mode dest_mode;
9372
9373 dest = SET_DEST (set);
9374 src = SET_SRC (set);
9375 dreg = true_regnum (dest);
9376 sreg = true_regnum (src);
9377 dest_mode = GET_MODE (dest);
9378
9e148ceb
ILT
9379 /* Some machines don't define AUTO_INC_DEC, but they still use push
9380 instructions. We need to catch that case here in order to
9381 invalidate the stack pointer correctly. Note that invalidating
9382 the stack pointer is different from invalidating DEST. */
9383 x = dest;
9384 while (GET_CODE (x) == SUBREG
9385 || GET_CODE (x) == ZERO_EXTRACT
9386 || GET_CODE (x) == SIGN_EXTRACT
9387 || GET_CODE (x) == STRICT_LOW_PART)
9388 x = XEXP (x, 0);
9389 if (push_operand (x, GET_MODE (x)))
9390 {
9391 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
9392 reload_cse_invalidate_rtx (dest, NULL_RTX);
9393 return;
9394 }
9395
2a9fb548
ILT
9396 /* We can only handle an assignment to a register, or a store of a
9397 register to a memory location. For other cases, we just clobber
9398 the destination. We also have to just clobber if there are side
9399 effects in SRC or DEST. */
9400 if ((dreg < 0 && GET_CODE (dest) != MEM)
9401 || side_effects_p (src)
9402 || side_effects_p (dest))
9403 {
9404 reload_cse_invalidate_rtx (dest, NULL_RTX);
9405 return;
9406 }
9407
9408#ifdef HAVE_cc0
9409 /* We don't try to handle values involving CC, because it's a pain
9410 to keep track of when they have to be invalidated. */
9411 if (reg_mentioned_p (cc0_rtx, src)
9412 || reg_mentioned_p (cc0_rtx, dest))
9413 {
9414 reload_cse_invalidate_rtx (dest, NULL_RTX);
9415 return;
9416 }
9417#endif
9418
9419 /* If BODY is a PARALLEL, then we need to see whether the source of
9420 SET is clobbered by some other instruction in the PARALLEL. */
9421 if (GET_CODE (body) == PARALLEL)
9422 {
9423 int i;
9424
9425 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
9426 {
9427 rtx x;
9428
9429 x = XVECEXP (body, 0, i);
9430 if (x == set)
9431 continue;
9432
9433 reload_cse_check_clobbered = 0;
9434 reload_cse_check_src = src;
9435 note_stores (x, reload_cse_check_clobber);
9436 if (reload_cse_check_clobbered)
9437 {
9438 reload_cse_invalidate_rtx (dest, NULL_RTX);
9439 return;
9440 }
9441 }
9442 }
9443
9444 if (dreg >= 0)
9445 {
9446 int i;
9447
9448 /* This is an assignment to a register. Update the value we
05d10675 9449 have stored for the register. */
2a9fb548 9450 if (sreg >= 0)
ad578014
ILT
9451 {
9452 rtx x;
9453
9454 /* This is a copy from one register to another. Any values
9455 which were valid for SREG are now valid for DREG. If the
9456 mode changes, we use gen_lowpart_common to extract only
9457 the part of the value that is copied. */
9458 reg_values[dreg] = 0;
9459 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
9460 {
9461 rtx tmp;
9462
9463 if (XEXP (x, 0) == 0)
9464 continue;
9465 if (dest_mode == GET_MODE (XEXP (x, 0)))
9466 tmp = XEXP (x, 0);
23e7786b 9467 else if (GET_MODE_BITSIZE (dest_mode)
05d10675 9468 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
23e7786b 9469 continue;
ad578014
ILT
9470 else
9471 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9472 if (tmp)
38a448ca
RH
9473 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
9474 reg_values[dreg]);
05d10675 9475 }
ad578014 9476 }
2a9fb548 9477 else
38a448ca 9478 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
2a9fb548
ILT
9479
9480 /* We've changed DREG, so invalidate any values held by other
05d10675 9481 registers that depend upon it. */
2a9fb548
ILT
9482 reload_cse_invalidate_regno (dreg, dest_mode, 0);
9483
9484 /* If this assignment changes more than one hard register,
05d10675 9485 forget anything we know about the others. */
2a9fb548
ILT
9486 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
9487 reg_values[dreg + i] = 0;
9488 }
9489 else if (GET_CODE (dest) == MEM)
9490 {
9491 /* Invalidate conflicting memory locations. */
9492 reload_cse_invalidate_mem (dest);
9493
9494 /* If we're storing a register to memory, add DEST to the list
05d10675 9495 in REG_VALUES. */
2a9fb548 9496 if (sreg >= 0 && ! side_effects_p (dest))
38a448ca 9497 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
2a9fb548
ILT
9498 reg_values[sreg]);
9499 }
9500 else
9501 {
9502 /* We should have bailed out earlier. */
9503 abort ();
9504 }
9505}
5adf6da0
R
9506\f
9507/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
9508 addressing now.
9509 This code might also be useful when reload gave up on reg+reg addresssing
9510 because of clashes between the return register and INDEX_REG_CLASS. */
9511
9512/* The maximum number of uses of a register we can keep track of to
9513 replace them with reg+reg addressing. */
9514#define RELOAD_COMBINE_MAX_USES 6
9515
9516/* INSN is the insn where a register has ben used, and USEP points to the
9517 location of the register within the rtl. */
9518struct reg_use { rtx insn, *usep; };
9519
9520/* If the register is used in some unknown fashion, USE_INDEX is negative.
9521 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
9522 indicates where it becomes live again.
9523 Otherwise, USE_INDEX is the index of the last encountered use of the
9524 register (which is first among these we have seen since we scan backwards),
9525 OFFSET contains the constant offset that is added to the register in
9526 all encountered uses, and USE_RUID indicates the first encountered, i.e.
ed937a19
R
9527 last, of these uses.
9528 STORE_RUID is always meaningful if we only want to use a value in a
9529 register in a different place: it denotes the next insn in the insn
9530 stream (i.e. the last ecountered) that sets or clobbers the register. */
5adf6da0
R
9531static struct
9532 {
9533 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
9534 int use_index;
9535 rtx offset;
9536 int store_ruid;
9537 int use_ruid;
9538 } reg_state[FIRST_PSEUDO_REGISTER];
9539
9540/* Reverse linear uid. This is increased in reload_combine while scanning
9541 the instructions from last to first. It is used to set last_label_ruid
9542 and the store_ruid / use_ruid fields in reg_state. */
9543static int reload_combine_ruid;
9544
b0634509
R
9545#define LABEL_LIVE(LABEL) \
9546 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
9547
5adf6da0
R
9548static void
9549reload_combine ()
9550{
9551 rtx insn, set;
9552 int first_index_reg = 1, last_index_reg = 0;
9553 int i;
9554 int last_label_ruid;
b0634509
R
9555 int min_labelno, n_labels;
9556 HARD_REG_SET ever_live_at_start, *label_live;
5adf6da0
R
9557
9558 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
9559 reload has already used it where appropriate, so there is no use in
9560 trying to generate it now. */
03acd8f8 9561 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
5adf6da0
R
9562 return;
9563
9564 /* To avoid wasting too much time later searching for an index register,
9565 determine the minimum and maximum index register numbers. */
9566 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9567 {
9568 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i))
9569 {
9570 if (! last_index_reg)
9571 last_index_reg = i;
9572 first_index_reg = i;
9573 }
9574 }
9575 /* If no index register is available, we can quit now. */
9576 if (first_index_reg > last_index_reg)
9577 return;
9578
b0634509
R
9579 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
9580 information is a bit fuzzy immediately after reload, but it's
9581 still good enough to determine which registers are live at a jump
9582 destination. */
9583 min_labelno = get_first_label_num ();
9584 n_labels = max_label_num () - min_labelno;
9585 label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
9586 CLEAR_HARD_REG_SET (ever_live_at_start);
9587 for (i = n_basic_blocks - 1; i >= 0; i--)
9588 {
3b413743 9589 insn = BLOCK_HEAD (i);
b0634509
R
9590 if (GET_CODE (insn) == CODE_LABEL)
9591 {
9592 HARD_REG_SET live;
9593
e881bb1b
RH
9594 REG_SET_TO_HARD_REG_SET (live, BASIC_BLOCK (i)->global_live_at_start);
9595 compute_use_by_pseudos (&live, BASIC_BLOCK (i)->global_live_at_start);
b0634509
R
9596 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
9597 IOR_HARD_REG_SET (ever_live_at_start, live);
9598 }
9599 }
9600
5adf6da0
R
9601 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
9602 last_label_ruid = reload_combine_ruid = 0;
9603 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9604 {
ed937a19 9605 reg_state[i].store_ruid = reload_combine_ruid;
5adf6da0
R
9606 if (fixed_regs[i])
9607 reg_state[i].use_index = -1;
9608 else
ed937a19 9609 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
5adf6da0
R
9610 }
9611
9612 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
9613 {
9614 rtx note;
9615
9616 /* We cannot do our optimization across labels. Invalidating all the use
9617 information we have would be costly, so we just note where the label
05d10675 9618 is and then later disable any optimization that would cross it. */
5adf6da0
R
9619 if (GET_CODE (insn) == CODE_LABEL)
9620 last_label_ruid = reload_combine_ruid;
b0634509
R
9621 if (GET_CODE (insn) == BARRIER)
9622 {
9623 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9624 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9625 }
5adf6da0
R
9626 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9627 continue;
9628 reload_combine_ruid++;
9629
9630 /* Look for (set (REGX) (CONST_INT))
eceef4c9
BS
9631 (set (REGX) (PLUS (REGX) (REGY)))
9632 ...
9633 ... (MEM (REGX)) ...
5adf6da0 9634 and convert it to
eceef4c9
BS
9635 (set (REGZ) (CONST_INT))
9636 ...
9637 ... (MEM (PLUS (REGZ) (REGY)))... .
5adf6da0
R
9638
9639 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
9640 and that we know all uses of REGX before it dies. */
2abbc1bd
R
9641 set = single_set (insn);
9642 if (set != NULL_RTX
5adf6da0
R
9643 && GET_CODE (SET_DEST (set)) == REG
9644 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
9645 GET_MODE (SET_DEST (set)))
9646 == 1)
9647 && GET_CODE (SET_SRC (set)) == PLUS
9648 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
9649 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
9650 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
9651 {
9652 rtx reg = SET_DEST (set);
9653 rtx plus = SET_SRC (set);
9654 rtx base = XEXP (plus, 1);
9655 rtx prev = prev_nonnote_insn (insn);
9656 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
9657 int regno = REGNO (reg);
6a651371 9658 rtx const_reg = NULL_RTX;
5adf6da0
R
9659 rtx reg_sum = NULL_RTX;
9660
9661 /* Now, we need an index register.
9662 We'll set index_reg to this index register, const_reg to the
9663 register that is to be loaded with the constant
9664 (denoted as REGZ in the substitution illustration above),
9665 and reg_sum to the register-register that we want to use to
9666 substitute uses of REG (typically in MEMs) with.
9667 First check REG and BASE for being index registers;
9668 we can use them even if they are not dead. */
9669 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
9670 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
9671 REGNO (base)))
9672 {
9673 const_reg = reg;
9674 reg_sum = plus;
9675 }
9676 else
9677 {
05d10675
BS
9678 /* Otherwise, look for a free index register. Since we have
9679 checked above that neiter REG nor BASE are index registers,
9680 if we find anything at all, it will be different from these
9681 two registers. */
9682 for (i = first_index_reg; i <= last_index_reg; i++)
5adf6da0
R
9683 {
9684 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
9685 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
9686 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
9687 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
9688 {
9689 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
9690 const_reg = index_reg;
9691 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
9692 break;
9693 }
9694 }
9695 }
ed937a19
R
9696 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
9697 (REGY), i.e. BASE, is not clobbered before the last use we'll
9698 create. */
5adf6da0
R
9699 if (prev_set
9700 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
9701 && rtx_equal_p (SET_DEST (prev_set), reg)
9702 && reg_state[regno].use_index >= 0
ed937a19 9703 && reg_state[REGNO (base)].store_ruid <= reg_state[regno].use_ruid
5adf6da0
R
9704 && reg_sum)
9705 {
9706 int i;
9707
9708 /* Change destination register and - if necessary - the
9709 constant value in PREV, the constant loading instruction. */
9710 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
9711 if (reg_state[regno].offset != const0_rtx)
9712 validate_change (prev,
9713 &SET_SRC (prev_set),
9714 GEN_INT (INTVAL (SET_SRC (prev_set))
9715 + INTVAL (reg_state[regno].offset)),
9716 1);
9717 /* Now for every use of REG that we have recorded, replace REG
9718 with REG_SUM. */
9719 for (i = reg_state[regno].use_index;
9720 i < RELOAD_COMBINE_MAX_USES; i++)
9721 validate_change (reg_state[regno].reg_use[i].insn,
9722 reg_state[regno].reg_use[i].usep,
9723 reg_sum, 1);
9724
9725 if (apply_change_group ())
9726 {
9727 rtx *np;
9728
9729 /* Delete the reg-reg addition. */
9730 PUT_CODE (insn, NOTE);
9731 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9732 NOTE_SOURCE_FILE (insn) = 0;
9733
9734 if (reg_state[regno].offset != const0_rtx)
9735 {
9736 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
9737 are now invalid. */
9738 for (np = &REG_NOTES (prev); *np; )
9739 {
9740 if (REG_NOTE_KIND (*np) == REG_EQUAL
9741 || REG_NOTE_KIND (*np) == REG_EQUIV)
9742 *np = XEXP (*np, 1);
9743 else
9744 np = &XEXP (*np, 1);
9745 }
9746 }
9747 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9748 reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid;
9749 continue;
9750 }
9751 }
9752 }
9753 note_stores (PATTERN (insn), reload_combine_note_store);
9754 if (GET_CODE (insn) == CALL_INSN)
9755 {
9756 rtx link;
9757
9758 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9759 {
9760 if (call_used_regs[i])
9761 {
9762 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9763 reg_state[i].store_ruid = reload_combine_ruid;
9764 }
9765 }
9766 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
9767 link = XEXP (link, 1))
9768 {
9769 rtx use = XEXP (link, 0);
9770 int regno = REGNO (XEXP (use, 0));
9771 if (GET_CODE (use) == CLOBBER)
9772 {
9773 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9774 reg_state[regno].store_ruid = reload_combine_ruid;
9775 }
9776 else
9777 reg_state[regno].use_index = -1;
9778 }
9779 }
b0634509 9780 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) != RETURN)
5adf6da0
R
9781 {
9782 /* Non-spill registers might be used at the call destination in
9783 some unknown fashion, so we have to mark the unknown use. */
b0634509
R
9784 HARD_REG_SET *live;
9785 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
9786 && JUMP_LABEL (insn))
9787 live = &LABEL_LIVE (JUMP_LABEL (insn));
9788 else
9789 live = &ever_live_at_start;
5adf6da0
R
9790 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9791 {
b0634509 9792 if (TEST_HARD_REG_BIT (*live, i))
5adf6da0
R
9793 reg_state[i].use_index = -1;
9794 }
9795 }
9796 reload_combine_note_use (&PATTERN (insn), insn);
9797 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9798 {
9799 if (REG_NOTE_KIND (note) == REG_INC
9800 && GET_CODE (XEXP (note, 0)) == REG)
ed937a19
R
9801 {
9802 int regno = REGNO (XEXP (note, 0));
9803
9804 reg_state[regno].store_ruid = reload_combine_ruid;
9805 reg_state[regno].use_index = -1;
9806 }
5adf6da0
R
9807 }
9808 }
b0634509 9809 free (label_live);
5adf6da0
R
9810}
9811
9812/* Check if DST is a register or a subreg of a register; if it is,
9813 update reg_state[regno].store_ruid and reg_state[regno].use_index
f93233bb 9814 accordingly. Called via note_stores from reload_combine. */
5adf6da0
R
9815static void
9816reload_combine_note_store (dst, set)
f93233bb 9817 rtx dst, set;
5adf6da0
R
9818{
9819 int regno = 0;
9820 int i;
9821 unsigned size = GET_MODE_SIZE (GET_MODE (dst));
9822
9823 if (GET_CODE (dst) == SUBREG)
9824 {
9825 regno = SUBREG_WORD (dst);
9826 dst = SUBREG_REG (dst);
9827 }
9828 if (GET_CODE (dst) != REG)
9829 return;
9830 regno += REGNO (dst);
54ca6ffa 9831
5adf6da0 9832 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
05d10675 9833 careful with registers / register parts that are not full words.
54ca6ffa
JL
9834
9835 Similarly for ZERO_EXTRACT and SIGN_EXTRACT. */
9836 if (GET_CODE (set) != SET
9837 || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
9838 || GET_CODE (SET_DEST (set)) == SIGN_EXTRACT
9839 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
ed937a19 9840 {
43f854cf 9841 for (i = (size - 1) / UNITS_PER_WORD + regno; i >= regno; i--)
f93233bb
JL
9842 {
9843 reg_state[i].use_index = -1;
9844 reg_state[i].store_ruid = reload_combine_ruid;
9845 }
ed937a19 9846 }
5adf6da0
R
9847 else
9848 {
43f854cf 9849 for (i = (size - 1) / UNITS_PER_WORD + regno; i >= regno; i--)
5adf6da0
R
9850 {
9851 reg_state[i].store_ruid = reload_combine_ruid;
9852 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9853 }
9854 }
9855}
9856
9857/* XP points to a piece of rtl that has to be checked for any uses of
9858 registers.
9859 *XP is the pattern of INSN, or a part of it.
9860 Called from reload_combine, and recursively by itself. */
9861static void
9862reload_combine_note_use (xp, insn)
9863 rtx *xp, insn;
9864{
9865 rtx x = *xp;
9866 enum rtx_code code = x->code;
6f7d635c 9867 const char *fmt;
5adf6da0
R
9868 int i, j;
9869 rtx offset = const0_rtx; /* For the REG case below. */
9870
9871 switch (code)
9872 {
9873 case SET:
9874 if (GET_CODE (SET_DEST (x)) == REG)
9875 {
9876 reload_combine_note_use (&SET_SRC (x), insn);
9877 return;
9878 }
9879 break;
9880
9881 case CLOBBER:
9882 if (GET_CODE (SET_DEST (x)) == REG)
9883 return;
9884 break;
9885
9886 case PLUS:
9887 /* We are interested in (plus (reg) (const_int)) . */
9888 if (GET_CODE (XEXP (x, 0)) != REG || GET_CODE (XEXP (x, 1)) != CONST_INT)
9889 break;
9890 offset = XEXP (x, 1);
9891 x = XEXP (x, 0);
05d10675 9892 /* Fall through. */
5adf6da0
R
9893 case REG:
9894 {
9895 int regno = REGNO (x);
9896 int use_index;
9897
9898 /* Some spurious USEs of pseudo registers might remain.
9899 Just ignore them. */
9900 if (regno >= FIRST_PSEUDO_REGISTER)
9901 return;
9902
9903 /* If this register is already used in some unknown fashion, we
9904 can't do anything.
9905 If we decrement the index from zero to -1, we can't store more
9906 uses, so this register becomes used in an unknown fashion. */
9907 use_index = --reg_state[regno].use_index;
9908 if (use_index < 0)
9909 return;
9910
9911 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9912 {
9913 /* We have found another use for a register that is already
9914 used later. Check if the offsets match; if not, mark the
9915 register as used in an unknown fashion. */
9916 if (! rtx_equal_p (offset, reg_state[regno].offset))
9917 {
9918 reg_state[regno].use_index = -1;
9919 return;
9920 }
9921 }
9922 else
9923 {
9924 /* This is the first use of this register we have seen since we
9925 marked it as dead. */
9926 reg_state[regno].offset = offset;
9927 reg_state[regno].use_ruid = reload_combine_ruid;
9928 }
9929 reg_state[regno].reg_use[use_index].insn = insn;
9930 reg_state[regno].reg_use[use_index].usep = xp;
9931 return;
9932 }
9933
9934 default:
9935 break;
9936 }
9937
9938 /* Recursively process the components of X. */
9939 fmt = GET_RTX_FORMAT (code);
9940 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9941 {
9942 if (fmt[i] == 'e')
9943 reload_combine_note_use (&XEXP (x, i), insn);
9944 else if (fmt[i] == 'E')
9945 {
9946 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9947 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9948 }
9949 }
9950}
9951\f
9952/* See if we can reduce the cost of a constant by replacing a move with
9953 an add. */
9954/* We cannot do our optimization across labels. Invalidating all the
9955 information about register contents we have would be costly, so we
9956 use last_label_luid (local variable of reload_cse_move2add) to note
9957 where the label is and then later disable any optimization that would
9958 cross it.
9959 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9960 reg_set_luid[n] is larger than last_label_luid[n] . */
9961static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9962/* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] /
9963 reg_mode[n] to be valid.
9964 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n
9965 has been set to reg_offset[n] in mode reg_mode[n] .
9966 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative,
9967 register n has been set to the sum of reg_offset[n] and register
9968 reg_base_reg[n], calculated in mode reg_mode[n] . */
9969static rtx reg_offset[FIRST_PSEUDO_REGISTER];
9970static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9971static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9972/* move2add_luid is linearily increased while scanning the instructions
9973 from first to last. It is used to set reg_set_luid in
6764d250 9974 reload_cse_move2add and move2add_note_store. */
5adf6da0
R
9975static int move2add_luid;
9976
ccc4ae07
AS
9977/* Generate a CONST_INT and force it in the range of MODE. */
9978static rtx
9979gen_mode_int (mode, value)
9980 enum machine_mode mode;
9981 HOST_WIDE_INT value;
9982{
9983 HOST_WIDE_INT cval = value & GET_MODE_MASK (mode);
9984 int width = GET_MODE_BITSIZE (mode);
9985
9986 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative number,
9987 sign extend it. */
9988 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
9989 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
9990 cval |= (HOST_WIDE_INT) -1 << width;
9991
9992 return GEN_INT (cval);
9993}
9994
5adf6da0
R
9995static void
9996reload_cse_move2add (first)
9997 rtx first;
9998{
9999 int i;
10000 rtx insn;
10001 int last_label_luid;
5adf6da0
R
10002
10003 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
6764d250
BS
10004 reg_set_luid[i] = 0;
10005
5adf6da0
R
10006 last_label_luid = 0;
10007 move2add_luid = 1;
10008 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
10009 {
10010 rtx pat, note;
10011
10012 if (GET_CODE (insn) == CODE_LABEL)
10013 last_label_luid = move2add_luid;
10014 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
10015 continue;
10016 pat = PATTERN (insn);
10017 /* For simplicity, we only perform this optimization on
10018 straightforward SETs. */
10019 if (GET_CODE (pat) == SET
10020 && GET_CODE (SET_DEST (pat)) == REG)
10021 {
10022 rtx reg = SET_DEST (pat);
10023 int regno = REGNO (reg);
10024 rtx src = SET_SRC (pat);
10025
10026 /* Check if we have valid information on the contents of this
10027 register in the mode of REG. */
10028 /* ??? We don't know how zero / sign extension is handled, hence
10029 we can't go from a narrower to a wider mode. */
10030 if (reg_set_luid[regno] > last_label_luid
05d10675
BS
10031 && (GET_MODE_SIZE (GET_MODE (reg))
10032 <= GET_MODE_SIZE (reg_mode[regno]))
10033 && GET_CODE (reg_offset[regno]) == CONST_INT)
5adf6da0
R
10034 {
10035 /* Try to transform (set (REGX) (CONST_INT A))
10036 ...
10037 (set (REGX) (CONST_INT B))
10038 to
10039 (set (REGX) (CONST_INT A))
10040 ...
10041 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
10042
10043 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
10044 {
10045 int success = 0;
ccc4ae07
AS
10046 rtx new_src
10047 = gen_mode_int (GET_MODE (reg),
10048 INTVAL (src) - INTVAL (reg_offset[regno]));
5adf6da0
R
10049 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
10050 use (set (reg) (reg)) instead.
10051 We don't delete this insn, nor do we convert it into a
10052 note, to avoid losing register notes or the return
10053 value flag. jump2 already knowns how to get rid of
10054 no-op moves. */
10055 if (new_src == const0_rtx)
10056 success = validate_change (insn, &SET_SRC (pat), reg, 0);
10057 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
10058 && have_add2_insn (GET_MODE (reg)))
10059 success = validate_change (insn, &PATTERN (insn),
10060 gen_add2_insn (reg, new_src), 0);
5adf6da0
R
10061 reg_set_luid[regno] = move2add_luid;
10062 reg_mode[regno] = GET_MODE (reg);
10063 reg_offset[regno] = src;
10064 continue;
10065 }
10066
10067 /* Try to transform (set (REGX) (REGY))
10068 (set (REGX) (PLUS (REGX) (CONST_INT A)))
10069 ...
10070 (set (REGX) (REGY))
10071 (set (REGX) (PLUS (REGX) (CONST_INT B)))
10072 to
10073 (REGX) (REGY))
10074 (set (REGX) (PLUS (REGX) (CONST_INT A)))
10075 ...
10076 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
10077 else if (GET_CODE (src) == REG
10078 && reg_base_reg[regno] == REGNO (src)
10079 && reg_set_luid[regno] > reg_set_luid[REGNO (src)])
10080 {
10081 rtx next = next_nonnote_insn (insn);
6a651371 10082 rtx set = NULL_RTX;
5adf6da0
R
10083 if (next)
10084 set = single_set (next);
10085 if (next
10086 && set
10087 && SET_DEST (set) == reg
10088 && GET_CODE (SET_SRC (set)) == PLUS
10089 && XEXP (SET_SRC (set), 0) == reg
10090 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
10091 {
5adf6da0 10092 rtx src3 = XEXP (SET_SRC (set), 1);
ccc4ae07
AS
10093 rtx new_src
10094 = gen_mode_int (GET_MODE (reg),
10095 INTVAL (src3)
10096 - INTVAL (reg_offset[regno]));
5adf6da0
R
10097 int success = 0;
10098
10099 if (new_src == const0_rtx)
10100 /* See above why we create (set (reg) (reg)) here. */
10101 success
10102 = validate_change (next, &SET_SRC (set), reg, 0);
10103 else if ((rtx_cost (new_src, PLUS)
10104 < 2 + rtx_cost (src3, SET))
10105 && have_add2_insn (GET_MODE (reg)))
10106 success
10107 = validate_change (next, &PATTERN (next),
10108 gen_add2_insn (reg, new_src), 0);
10109 if (success)
10110 {
5adf6da0
R
10111 /* INSN might be the first insn in a basic block
10112 if the preceding insn is a conditional jump
10113 or a possible-throwing call. */
10114 PUT_CODE (insn, NOTE);
10115 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
10116 NOTE_SOURCE_FILE (insn) = 0;
10117 }
10118 insn = next;
10119 reg_set_luid[regno] = move2add_luid;
10120 reg_mode[regno] = GET_MODE (reg);
10121 reg_offset[regno] = src3;
10122 continue;
10123 }
10124 }
10125 }
10126 }
10127
10128 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
10129 {
10130 if (REG_NOTE_KIND (note) == REG_INC
10131 && GET_CODE (XEXP (note, 0)) == REG)
10132 {
10133 /* Indicate that this register has been recently written to,
10134 but the exact contents are not available. */
10135 int regno = REGNO (XEXP (note, 0));
10136 if (regno < FIRST_PSEUDO_REGISTER)
10137 {
10138 reg_set_luid[regno] = move2add_luid;
10139 reg_offset[regno] = note;
10140 }
10141 }
5adf6da0
R
10142 }
10143 note_stores (PATTERN (insn), move2add_note_store);
10144 /* If this is a CALL_INSN, all call used registers are stored with
10145 unknown values. */
10146 if (GET_CODE (insn) == CALL_INSN)
10147 {
10148 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
10149 {
10150 if (call_used_regs[i])
10151 {
10152 reg_set_luid[i] = move2add_luid;
10153 reg_offset[i] = insn; /* Invalidate contents. */
10154 }
10155 }
10156 }
10157 }
10158}
10159
10160/* SET is a SET or CLOBBER that sets DST.
10161 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
10162 Called from reload_cse_move2add via note_stores. */
10163static void
10164move2add_note_store (dst, set)
10165 rtx dst, set;
10166{
10167 int regno = 0;
10168 int i;
10169
10170 enum machine_mode mode = GET_MODE (dst);
10171 if (GET_CODE (dst) == SUBREG)
10172 {
10173 regno = SUBREG_WORD (dst);
10174 dst = SUBREG_REG (dst);
10175 }
10176 if (GET_CODE (dst) != REG)
10177 return;
10178
10179 regno += REGNO (dst);
10180
f93233bb
JL
10181 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET
10182 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
10183 && GET_CODE (SET_DEST (set)) != SIGN_EXTRACT
10184 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
5adf6da0
R
10185 {
10186 rtx src = SET_SRC (set);
10187
10188 reg_mode[regno] = mode;
10189 switch (GET_CODE (src))
10190 {
10191 case PLUS:
10192 {
10193 rtx src0 = XEXP (src, 0);
10194 if (GET_CODE (src0) == REG)
10195 {
10196 if (REGNO (src0) != regno
10197 || reg_offset[regno] != const0_rtx)
10198 {
10199 reg_base_reg[regno] = REGNO (src0);
10200 reg_set_luid[regno] = move2add_luid;
10201 }
10202 reg_offset[regno] = XEXP (src, 1);
10203 break;
10204 }
10205 reg_set_luid[regno] = move2add_luid;
10206 reg_offset[regno] = set; /* Invalidate contents. */
10207 break;
10208 }
10209
10210 case REG:
10211 reg_base_reg[regno] = REGNO (SET_SRC (set));
10212 reg_offset[regno] = const0_rtx;
10213 reg_set_luid[regno] = move2add_luid;
10214 break;
10215
10216 default:
10217 reg_base_reg[regno] = -1;
10218 reg_offset[regno] = SET_SRC (set);
10219 reg_set_luid[regno] = move2add_luid;
10220 break;
10221 }
10222 }
10223 else
10224 {
10225 for (i = regno + HARD_REGNO_NREGS (regno, mode) - 1; i >= regno; i--)
10226 {
10227 /* Indicate that this register has been recently written to,
10228 but the exact contents are not available. */
10229 reg_set_luid[i] = move2add_luid;
10230 reg_offset[i] = dst;
10231 }
10232 }
10233}
2dfa9a87
MH
10234
10235#ifdef AUTO_INC_DEC
10236static void
10237add_auto_inc_notes (insn, x)
10238 rtx insn;
10239 rtx x;
10240{
10241 enum rtx_code code = GET_CODE (x);
6f7d635c 10242 const char *fmt;
2dfa9a87
MH
10243 int i, j;
10244
10245 if (code == MEM && auto_inc_p (XEXP (x, 0)))
10246 {
10247 REG_NOTES (insn)
10248 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
10249 return;
10250 }
10251
10252 /* Scan all the operand sub-expressions. */
10253 fmt = GET_RTX_FORMAT (code);
10254 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10255 {
10256 if (fmt[i] == 'e')
10257 add_auto_inc_notes (insn, XEXP (x, i));
10258 else if (fmt[i] == 'E')
10259 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10260 add_auto_inc_notes (insn, XVECEXP (x, i, j));
10261 }
10262}
10263#endif
This page took 2.614878 seconds and 5 git commands to generate.