]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(gen_reload): Renamed from gen_input_reload.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
8c15858f 2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
546b63fb
RK
69
70
71#ifndef REGISTER_MOVE_COST
72#define REGISTER_MOVE_COST(x, y) 2
73#endif
74
75#ifndef MEMORY_MOVE_COST
76#define MEMORY_MOVE_COST(x) 4
77#endif
32131a9c
RK
78\f
79/* During reload_as_needed, element N contains a REG rtx for the hard reg
d08ea79f 80 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
81static rtx *reg_last_reload_reg;
82
83/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85static char *reg_has_output_reload;
86
87/* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89static HARD_REG_SET reg_is_output_reload;
90
91/* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95rtx *reg_equiv_constant;
96
97/* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 101rtx *reg_equiv_memory_loc;
32131a9c
RK
102
103/* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106rtx *reg_equiv_address;
107
108/* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110rtx *reg_equiv_mem;
111
112/* Widest width in which each pseudo reg is referred to (via subreg). */
113static int *reg_max_ref_width;
114
115/* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117static rtx *reg_equiv_init;
118
119/* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126/* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132/* Number of spill-regs so far; number of valid elements of spill_regs. */
133static int n_spills;
134
135/* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141/* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146/* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152/* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155HARD_REG_SET forbidden_regs;
156
157/* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
546b63fb
RK
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
32131a9c
RK
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164static HARD_REG_SET bad_spill_regs;
165
166/* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171/* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178/* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183/* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185static HARD_REG_SET counted_for_groups;
186
187/* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191static HARD_REG_SET counted_for_nongroups;
192
208dffa5
RS
193/* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197static char *cannot_omit_stores;
198
32131a9c
RK
199/* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206static char spill_indirect_levels;
207
208/* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212char indirect_symref_ok;
213
214/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216char double_reg_address_ok;
217
218/* Record the stack slot for each spilled hard register. */
219
220static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222/* Width allocated so far for that stack slot. */
223
224static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226/* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231char *basic_block_needs[N_REG_CLASSES];
232
233/* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235int reload_first_uid;
236
237/* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240int caller_save_needed;
241
242/* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245int reload_in_progress = 0;
246
247/* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
d45cf215 254/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258struct obstack reload_obstack;
259char *reload_firstobj;
260
261#define obstack_chunk_alloc xmalloc
262#define obstack_chunk_free free
263
32131a9c
RK
264/* List of labels that must never be deleted. */
265extern rtx forced_labels;
266\f
267/* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272static struct elim_table
273{
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
a8efe40d 281 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290} reg_eliminate[] =
291
292/* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296#ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298#else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300#endif
301
302#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304/* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307static int num_not_at_initial_offset;
308
309/* Count the number of registers that we may be able to eliminate. */
310static int num_eliminable;
311
312/* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319static char *offsets_known_at;
320static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322/* Number of labels in the current function. */
323
324static int num_labels;
546b63fb
RK
325
326struct hard_reg_n_uses { int regno; int uses; };
32131a9c 327\f
546b63fb
RK
328static int possible_group_p PROTO((int, int *));
329static void count_possible_groups PROTO((int *, enum machine_mode *,
066aca28 330 int *, int));
546b63fb
RK
331static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334static void spill_failure PROTO((rtx));
335static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337static void delete_dead_insn PROTO((rtx));
338static void alter_reg PROTO((int, int));
c307c237 339static void mark_scratch_live PROTO((rtx));
546b63fb
RK
340static void set_label_offsets PROTO((rtx, rtx, int));
341static int eliminate_regs_in_insn PROTO((rtx, int));
342static void mark_not_eliminable PROTO((rtx, rtx));
343static int spill_hard_reg PROTO((int, int, FILE *, int));
344static void scan_paradoxical_subregs PROTO((rtx));
345static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347static void order_regs_for_reload PROTO((void));
1d1a832c 348static int compare_spill_regs PROTO((short *, short *));
546b63fb 349static void reload_as_needed PROTO((rtx, int));
9a881562 350static void forget_old_reloads_1 PROTO((rtx, rtx));
546b63fb
RK
351static int reload_reg_class_lower PROTO((short *, short *));
352static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
be7ae2a4
RK
354static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
546b63fb
RK
356static int reload_reg_free_p PROTO((int, int, enum reload_type));
357static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
351aa1c1 359static int reloads_conflict PROTO((int, int));
546b63fb
RK
360static int allocate_reload_reg PROTO((int, rtx, int, int));
361static void choose_reload_regs PROTO((rtx, rtx));
362static void merge_assigned_reloads PROTO((rtx));
363static void emit_reload_insns PROTO((rtx));
364static void delete_output_reload PROTO((rtx, int, rtx));
365static void inc_for_reload PROTO((rtx, rtx, int));
366static int constraint_accepts_reg_p PROTO((char *, rtx));
367static int count_occurrences PROTO((rtx, rtx));
32131a9c 368\f
546b63fb
RK
369/* Initialize the reload pass once per compilation. */
370
32131a9c
RK
371void
372init_reload ()
373{
374 register int i;
375
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
379
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 384 GEN_INT (4)));
32131a9c
RK
385 spill_indirect_levels = 0;
386
387 while (memory_address_p (QImode, tem))
388 {
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
391 }
392
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
394
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
397
398 /* See if reg+reg is a valid (and offsettable) address. */
399
65701fd2 400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
401 {
402 tem = gen_rtx (PLUS, Pmode,
3ec2ea3e 403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
57caa638
RS
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
407
408 if (memory_address_p (QImode, tem))
409 {
410 double_reg_address_ok = 1;
411 break;
412 }
413 }
32131a9c
RK
414
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
418}
419
546b63fb 420/* Main entry point for the reload pass.
32131a9c
RK
421
422 FIRST is the first insn of the function being compiled.
423
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
429
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
5352b11a 433 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 434
5352b11a
RS
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
437
438int
32131a9c
RK
439reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
443{
444 register int class;
8b3e912b 445 register int i, j, k;
32131a9c
RK
446 register rtx insn;
447 register struct elim_table *ep;
448
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
a8efe40d
RK
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
32131a9c 455
5352b11a
RS
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
458
32131a9c
RK
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
461
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
464
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
467
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
470
0dadecf6
RK
471#ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474#endif
475
32131a9c
RK
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
480
481 /* We don't have a stack slot for any spill reg yet. */
4c9a05bc
RK
482 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
483 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
32131a9c 484
a8efe40d
RK
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
a8fdc208 488
32131a9c
RK
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
493
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
496
c307c237
RK
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
500
32131a9c
RK
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
fb3821f7 503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
504
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
509
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
514
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 516 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
32131a9c 517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 518 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
32131a9c 519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 520 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
32131a9c 521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 522 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
32131a9c 523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 524 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
32131a9c 525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
4c9a05bc 526 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
208dffa5
RS
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
32131a9c 529
56f58d3a
RK
530#ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532#endif
533
32131a9c 534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
56f58d3a
RK
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
32131a9c
RK
538
539 for (insn = first; insn; insn = NEXT_INSN (insn))
540 {
541 rtx set = single_set (insn);
542
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
544 {
fb3821f7 545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
546 if (note
547#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550#endif
551 )
32131a9c
RK
552 {
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
556 {
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
560 {
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
d445b551 565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
566 }
567 else
568 continue;
569
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
577 }
578 }
579 }
580
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
589
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
592 }
593
594 /* Does this function require a frame pointer? */
595
596 frame_pointer_needed = (! flag_omit_frame_pointer
597#ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605#endif
606 || FRAME_POINTER_REQUIRED);
607
608 num_eliminable = 0;
609
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612#ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
614 {
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
9ff3516a 617 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
32131a9c
RK
618 }
619#else
620 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
621 = ! frame_pointer_needed;
622#endif
623
624 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 625 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
626 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
627 We depend on this. */
628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
629 {
630 num_eliminable += ep->can_eliminate;
631 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
632 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
633 }
634
635 num_labels = max_label_num () - get_first_label_num ();
636
637 /* Allocate the tables used to store offset information at labels. */
638 offsets_known_at = (char *) alloca (num_labels);
639 offsets_at
640 = (int (*)[NUM_ELIMINABLE_REGS])
641 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
642
643 offsets_known_at -= get_first_label_num ();
644 offsets_at -= get_first_label_num ();
645
646 /* Alter each pseudo-reg rtx to contain its hard reg number.
647 Assign stack slots to the pseudos that lack hard regs or equivalents.
648 Do not touch virtual registers. */
649
650 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
651 alter_reg (i, -1);
652
653 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
654 because the stack size may be a part of the offset computation for
655 register elimination. */
656 assign_stack_local (BLKmode, 0, 0);
657
658 /* If we have some registers we think can be eliminated, scan all insns to
659 see if there is an insn that sets one of these registers to something
660 other than itself plus a constant. If so, the register cannot be
661 eliminated. Doing this scan here eliminates an extra pass through the
662 main reload loop in the most common case where register elimination
663 cannot be done. */
664 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
665 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
666 || GET_CODE (insn) == CALL_INSN)
667 note_stores (PATTERN (insn), mark_not_eliminable);
668
669#ifndef REGISTER_CONSTRAINTS
670 /* If all the pseudo regs have hard regs,
671 except for those that are never referenced,
672 we know that no reloads are needed. */
673 /* But that is not true if there are register constraints, since
674 in that case some pseudos might be in the wrong kind of hard reg. */
675
676 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
677 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
678 break;
679
b8093d02 680 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
681 return;
682#endif
683
684 /* Compute the order of preference for hard registers to spill.
685 Store them by decreasing preference in potential_reload_regs. */
686
687 order_regs_for_reload ();
688
689 /* So far, no hard regs have been spilled. */
690 n_spills = 0;
691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
692 spill_reg_order[i] = -1;
693
694 /* On most machines, we can't use any register explicitly used in the
695 rtl as a spill register. But on some, we have to. Those will have
696 taken care to keep the life of hard regs as short as possible. */
697
56f58d3a 698#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
699 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
700#endif
701
702 /* Spill any hard regs that we know we can't eliminate. */
703 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
704 if (! ep->can_eliminate)
9ff3516a
RK
705 spill_hard_reg (ep->from, global, dumpfile, 1);
706
707#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
708 if (frame_pointer_needed)
709 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
710#endif
32131a9c
RK
711
712 if (global)
713 for (i = 0; i < N_REG_CLASSES; i++)
714 {
4c9a05bc 715 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
32131a9c
RK
716 bzero (basic_block_needs[i], n_basic_blocks);
717 }
718
b2f15f94
RK
719 /* From now on, we need to emit any moves without making new pseudos. */
720 reload_in_progress = 1;
721
32131a9c
RK
722 /* This loop scans the entire function each go-round
723 and repeats until one repetition spills no additional hard regs. */
724
d45cf215 725 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
726 to require another pass. Note that getting an additional reload
727 reg does not necessarily imply any pseudo reg was spilled;
728 sometimes we find a reload reg that no pseudo reg was allocated in. */
729 something_changed = 1;
730 /* This flag is set if there are any insns that require reloading. */
731 something_needs_reloads = 0;
732 /* This flag is set if there are any insns that require register
733 eliminations. */
734 something_needs_elimination = 0;
735 while (something_changed)
736 {
737 rtx after_call = 0;
738
739 /* For each class, number of reload regs needed in that class.
740 This is the maximum over all insns of the needs in that class
741 of the individual insn. */
742 int max_needs[N_REG_CLASSES];
743 /* For each class, size of group of consecutive regs
744 that is needed for the reloads of this class. */
745 int group_size[N_REG_CLASSES];
746 /* For each class, max number of consecutive groups needed.
747 (Each group contains group_size[CLASS] consecutive registers.) */
748 int max_groups[N_REG_CLASSES];
749 /* For each class, max number needed of regs that don't belong
750 to any of the groups. */
751 int max_nongroups[N_REG_CLASSES];
752 /* For each class, the machine mode which requires consecutive
753 groups of regs of that class.
754 If two different modes ever require groups of one class,
755 they must be the same size and equally restrictive for that class,
756 otherwise we can't handle the complexity. */
757 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
758 /* Record the insn where each maximum need is first found. */
759 rtx max_needs_insn[N_REG_CLASSES];
760 rtx max_groups_insn[N_REG_CLASSES];
761 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 762 rtx x;
0dadecf6 763 int starting_frame_size = get_frame_size ();
9ff3516a 764 int previous_frame_pointer_needed = frame_pointer_needed;
e404a39a 765 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
766
767 something_changed = 0;
4c9a05bc
RK
768 bzero ((char *) max_needs, sizeof max_needs);
769 bzero ((char *) max_groups, sizeof max_groups);
770 bzero ((char *) max_nongroups, sizeof max_nongroups);
771 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
772 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
773 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
774 bzero ((char *) group_size, sizeof group_size);
32131a9c
RK
775 for (i = 0; i < N_REG_CLASSES; i++)
776 group_mode[i] = VOIDmode;
777
778 /* Keep track of which basic blocks are needing the reloads. */
779 this_block = 0;
780
781 /* Remember whether any element of basic_block_needs
782 changes from 0 to 1 in this pass. */
783 new_basic_block_needs = 0;
784
785 /* Reset all offsets on eliminable registers to their initial values. */
786#ifdef ELIMINABLE_REGS
787 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
788 {
789 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
790 ep->previous_offset = ep->offset
791 = ep->max_offset = ep->initial_offset;
32131a9c
RK
792 }
793#else
794#ifdef INITIAL_FRAME_POINTER_OFFSET
795 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
796#else
797 if (!FRAME_POINTER_REQUIRED)
798 abort ();
799 reg_eliminate[0].initial_offset = 0;
800#endif
a8efe40d 801 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
802 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
803#endif
804
805 num_not_at_initial_offset = 0;
806
4c9a05bc 807 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
32131a9c
RK
808
809 /* Set a known offset for each forced label to be at the initial offset
810 of each elimination. We do this because we assume that all
811 computed jumps occur from a location where each elimination is
812 at its initial offset. */
813
814 for (x = forced_labels; x; x = XEXP (x, 1))
815 if (XEXP (x, 0))
fb3821f7 816 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
817
818 /* For each pseudo register that has an equivalent location defined,
819 try to eliminate any eliminable registers (such as the frame pointer)
820 assuming initial offsets for the replacement register, which
821 is the normal case.
822
823 If the resulting location is directly addressable, substitute
824 the MEM we just got directly for the old REG.
825
826 If it is not addressable but is a constant or the sum of a hard reg
827 and constant, it is probably not addressable because the constant is
828 out of range, in that case record the address; we will generate
829 hairy code to compute the address in a register each time it is
6491dbbb
RK
830 needed. Similarly if it is a hard register, but one that is not
831 valid as an address register.
32131a9c
RK
832
833 If the location is not addressable, but does not have one of the
834 above forms, assign a stack slot. We have to do this to avoid the
835 potential of producing lots of reloads if, e.g., a location involves
836 a pseudo that didn't get a hard register and has an equivalent memory
837 location that also involves a pseudo that didn't get a hard register.
838
839 Perhaps at some point we will improve reload_when_needed handling
840 so this problem goes away. But that's very hairy. */
841
842 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
843 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
844 {
fb3821f7 845 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
846
847 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
848 XEXP (x, 0)))
849 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
850 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
851 || (GET_CODE (XEXP (x, 0)) == REG
852 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
853 || (GET_CODE (XEXP (x, 0)) == PLUS
854 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
855 && (REGNO (XEXP (XEXP (x, 0), 0))
856 < FIRST_PSEUDO_REGISTER)
857 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
858 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
859 else
860 {
861 /* Make a new stack slot. Then indicate that something
a8fdc208 862 changed so we go back and recompute offsets for
32131a9c
RK
863 eliminable registers because the allocation of memory
864 below might change some offset. reg_equiv_{mem,address}
865 will be set up for this pseudo on the next pass around
866 the loop. */
867 reg_equiv_memory_loc[i] = 0;
868 reg_equiv_init[i] = 0;
869 alter_reg (i, -1);
870 something_changed = 1;
871 }
872 }
a8fdc208 873
d45cf215 874 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
875 bookkeeping. */
876 if (something_changed)
877 continue;
878
a8efe40d
RK
879 /* If caller-saves needs a group, initialize the group to include
880 the size and mode required for caller-saves. */
881
882 if (caller_save_group_size > 1)
883 {
884 group_mode[(int) caller_save_spill_class] = Pmode;
885 group_size[(int) caller_save_spill_class] = caller_save_group_size;
886 }
887
32131a9c
RK
888 /* Compute the most additional registers needed by any instruction.
889 Collect information separately for each class of regs. */
890
891 for (insn = first; insn; insn = NEXT_INSN (insn))
892 {
893 if (global && this_block + 1 < n_basic_blocks
894 && insn == basic_block_head[this_block+1])
895 ++this_block;
896
897 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
898 might include REG_LABEL), we need to see what effects this
899 has on the known offsets at labels. */
900
901 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
902 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
903 && REG_NOTES (insn) != 0))
904 set_label_offsets (insn, insn, 0);
905
906 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
907 {
908 /* Nonzero means don't use a reload reg that overlaps
909 the place where a function value can be returned. */
910 rtx avoid_return_reg = 0;
911
912 rtx old_body = PATTERN (insn);
913 int old_code = INSN_CODE (insn);
914 rtx old_notes = REG_NOTES (insn);
915 int did_elimination = 0;
546b63fb
RK
916
917 /* To compute the number of reload registers of each class
918 needed for an insn, we must similate what choose_reload_regs
919 can do. We do this by splitting an insn into an "input" and
920 an "output" part. RELOAD_OTHER reloads are used in both.
921 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
922 which must be live over the entire input section of reloads,
923 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
924 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
925 inputs.
926
927 The registers needed for output are RELOAD_OTHER and
928 RELOAD_FOR_OUTPUT, which are live for the entire output
929 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
930 reloads for each operand.
931
932 The total number of registers needed is the maximum of the
933 inputs and outputs. */
934
8b3e912b 935 struct needs
32131a9c 936 {
8b3e912b
RK
937 /* [0] is normal, [1] is nongroup. */
938 int regs[2][N_REG_CLASSES];
939 int groups[N_REG_CLASSES];
940 };
941
942 /* Each `struct needs' corresponds to one RELOAD_... type. */
943 struct {
944 struct needs other;
945 struct needs input;
946 struct needs output;
947 struct needs insn;
948 struct needs other_addr;
949 struct needs op_addr;
893bc853 950 struct needs op_addr_reload;
8b3e912b
RK
951 struct needs in_addr[MAX_RECOG_OPERANDS];
952 struct needs out_addr[MAX_RECOG_OPERANDS];
953 } insn_needs;
32131a9c
RK
954
955 /* If needed, eliminate any eliminable registers. */
956 if (num_eliminable)
957 did_elimination = eliminate_regs_in_insn (insn, 0);
958
959#ifdef SMALL_REGISTER_CLASSES
960 /* Set avoid_return_reg if this is an insn
961 that might use the value of a function call. */
962 if (GET_CODE (insn) == CALL_INSN)
963 {
964 if (GET_CODE (PATTERN (insn)) == SET)
965 after_call = SET_DEST (PATTERN (insn));
966 else if (GET_CODE (PATTERN (insn)) == PARALLEL
967 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
968 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
969 else
970 after_call = 0;
971 }
972 else if (after_call != 0
973 && !(GET_CODE (PATTERN (insn)) == SET
974 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
975 {
2b979c57 976 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
977 avoid_return_reg = after_call;
978 after_call = 0;
979 }
980#endif /* SMALL_REGISTER_CLASSES */
981
982 /* Analyze the instruction. */
983 find_reloads (insn, 0, spill_indirect_levels, global,
984 spill_reg_order);
985
986 /* Remember for later shortcuts which insns had any reloads or
987 register eliminations.
988
989 One might think that it would be worthwhile to mark insns
990 that need register replacements but not reloads, but this is
991 not safe because find_reloads may do some manipulation of
992 the insn (such as swapping commutative operands), which would
993 be lost when we restore the old pattern after register
994 replacement. So the actions of find_reloads must be redone in
995 subsequent passes or in reload_as_needed.
996
997 However, it is safe to mark insns that need reloads
998 but not register replacement. */
999
1000 PUT_MODE (insn, (did_elimination ? QImode
1001 : n_reloads ? HImode
546b63fb 1002 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
1003 : VOIDmode));
1004
1005 /* Discard any register replacements done. */
1006 if (did_elimination)
1007 {
1008 obstack_free (&reload_obstack, reload_firstobj);
1009 PATTERN (insn) = old_body;
1010 INSN_CODE (insn) = old_code;
1011 REG_NOTES (insn) = old_notes;
1012 something_needs_elimination = 1;
1013 }
1014
a8efe40d 1015 /* If this insn has no reloads, we need not do anything except
a8fdc208 1016 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1017 caller-save needs reloads. */
1018
1019 if (n_reloads == 0
1020 && ! (GET_CODE (insn) == CALL_INSN
1021 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1022 continue;
1023
1024 something_needs_reloads = 1;
4c9a05bc 1025 bzero ((char *) &insn_needs, sizeof insn_needs);
32131a9c
RK
1026
1027 /* Count each reload once in every class
1028 containing the reload's own class. */
1029
1030 for (i = 0; i < n_reloads; i++)
1031 {
1032 register enum reg_class *p;
e85ddd99 1033 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1034 int size;
1035 enum machine_mode mode;
ce0e109b 1036 int nongroup_need;
8b3e912b 1037 struct needs *this_needs;
32131a9c
RK
1038
1039 /* Don't count the dummy reloads, for which one of the
1040 regs mentioned in the insn can be used for reloading.
1041 Don't count optional reloads.
1042 Don't count reloads that got combined with others. */
1043 if (reload_reg_rtx[i] != 0
1044 || reload_optional[i] != 0
1045 || (reload_out[i] == 0 && reload_in[i] == 0
1046 && ! reload_secondary_p[i]))
1047 continue;
1048
e85ddd99
RK
1049 /* Show that a reload register of this class is needed
1050 in this basic block. We do not use insn_needs and
1051 insn_groups because they are overly conservative for
1052 this purpose. */
1053 if (global && ! basic_block_needs[(int) class][this_block])
1054 {
1055 basic_block_needs[(int) class][this_block] = 1;
1056 new_basic_block_needs = 1;
1057 }
1058
ee249c09
RK
1059
1060 mode = reload_inmode[i];
1061 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1062 mode = reload_outmode[i];
1063 size = CLASS_MAX_NREGS (class, mode);
1064
8b3e912b
RK
1065 /* If this class doesn't want a group, determine if we have
1066 a nongroup need or a regular need. We have a nongroup
1067 need if this reload conflicts with a group reload whose
1068 class intersects with this reload's class. */
ce0e109b
RK
1069
1070 nongroup_need = 0;
ee249c09 1071 if (size == 1)
b8f4c738
RK
1072 for (j = 0; j < n_reloads; j++)
1073 if ((CLASS_MAX_NREGS (reload_reg_class[j],
255cf280
RK
1074 (GET_MODE_SIZE (reload_outmode[j])
1075 > GET_MODE_SIZE (reload_inmode[j]))
1076 ? reload_outmode[j]
1077 : reload_inmode[j])
b8f4c738 1078 > 1)
893bc853
RK
1079 && (!reload_optional[j])
1080 && (reload_in[j] != 0 || reload_out[j] != 0
1081 || reload_secondary_p[j])
b8f4c738 1082 && reloads_conflict (i, j)
ce0e109b
RK
1083 && reg_classes_intersect_p (class,
1084 reload_reg_class[j]))
1085 {
1086 nongroup_need = 1;
1087 break;
1088 }
1089
32131a9c
RK
1090 /* Decide which time-of-use to count this reload for. */
1091 switch (reload_when_needed[i])
1092 {
1093 case RELOAD_OTHER:
8b3e912b 1094 this_needs = &insn_needs.other;
32131a9c 1095 break;
546b63fb 1096 case RELOAD_FOR_INPUT:
8b3e912b 1097 this_needs = &insn_needs.input;
32131a9c 1098 break;
546b63fb 1099 case RELOAD_FOR_OUTPUT:
8b3e912b 1100 this_needs = &insn_needs.output;
32131a9c 1101 break;
546b63fb 1102 case RELOAD_FOR_INSN:
8b3e912b 1103 this_needs = &insn_needs.insn;
546b63fb 1104 break;
546b63fb 1105 case RELOAD_FOR_OTHER_ADDRESS:
8b3e912b 1106 this_needs = &insn_needs.other_addr;
546b63fb 1107 break;
546b63fb 1108 case RELOAD_FOR_INPUT_ADDRESS:
8b3e912b 1109 this_needs = &insn_needs.in_addr[reload_opnum[i]];
546b63fb 1110 break;
546b63fb 1111 case RELOAD_FOR_OUTPUT_ADDRESS:
8b3e912b 1112 this_needs = &insn_needs.out_addr[reload_opnum[i]];
546b63fb 1113 break;
32131a9c 1114 case RELOAD_FOR_OPERAND_ADDRESS:
8b3e912b 1115 this_needs = &insn_needs.op_addr;
32131a9c 1116 break;
893bc853
RK
1117 case RELOAD_FOR_OPADDR_ADDR:
1118 this_needs = &insn_needs.op_addr_reload;
1119 break;
32131a9c
RK
1120 }
1121
32131a9c
RK
1122 if (size > 1)
1123 {
1124 enum machine_mode other_mode, allocate_mode;
1125
1126 /* Count number of groups needed separately from
1127 number of individual regs needed. */
8b3e912b 1128 this_needs->groups[(int) class]++;
e85ddd99 1129 p = reg_class_superclasses[(int) class];
32131a9c 1130 while (*p != LIM_REG_CLASSES)
8b3e912b 1131 this_needs->groups[(int) *p++]++;
32131a9c
RK
1132
1133 /* Record size and mode of a group of this class. */
1134 /* If more than one size group is needed,
1135 make all groups the largest needed size. */
e85ddd99 1136 if (group_size[(int) class] < size)
32131a9c 1137 {
e85ddd99 1138 other_mode = group_mode[(int) class];
32131a9c
RK
1139 allocate_mode = mode;
1140
e85ddd99
RK
1141 group_size[(int) class] = size;
1142 group_mode[(int) class] = mode;
32131a9c
RK
1143 }
1144 else
1145 {
1146 other_mode = mode;
e85ddd99 1147 allocate_mode = group_mode[(int) class];
32131a9c
RK
1148 }
1149
1150 /* Crash if two dissimilar machine modes both need
1151 groups of consecutive regs of the same class. */
1152
8b3e912b 1153 if (other_mode != VOIDmode && other_mode != allocate_mode
32131a9c 1154 && ! modes_equiv_for_class_p (allocate_mode,
8b3e912b 1155 other_mode, class))
a89b2cc4
MM
1156 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1157 insn);
32131a9c
RK
1158 }
1159 else if (size == 1)
1160 {
8b3e912b 1161 this_needs->regs[nongroup_need][(int) class] += 1;
e85ddd99 1162 p = reg_class_superclasses[(int) class];
32131a9c 1163 while (*p != LIM_REG_CLASSES)
8b3e912b 1164 this_needs->regs[nongroup_need][(int) *p++] += 1;
32131a9c
RK
1165 }
1166 else
1167 abort ();
1168 }
1169
1170 /* All reloads have been counted for this insn;
1171 now merge the various times of use.
1172 This sets insn_needs, etc., to the maximum total number
1173 of registers needed at any point in this insn. */
1174
1175 for (i = 0; i < N_REG_CLASSES; i++)
1176 {
546b63fb
RK
1177 int in_max, out_max;
1178
8b3e912b
RK
1179 /* Compute normal and nongroup needs. */
1180 for (j = 0; j <= 1; j++)
546b63fb 1181 {
8b3e912b
RK
1182 for (in_max = 0, out_max = 0, k = 0;
1183 k < reload_n_operands; k++)
1184 {
1185 in_max
1186 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1187 out_max
1188 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1189 }
546b63fb 1190
8b3e912b
RK
1191 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1192 and operand addresses but not things used to reload
1193 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1194 don't conflict with things needed to reload inputs or
1195 outputs. */
546b63fb 1196
893bc853
RK
1197 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1198 insn_needs.op_addr_reload.regs[j][i]),
1199 in_max);
1200
8b3e912b 1201 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
546b63fb 1202
8b3e912b
RK
1203 insn_needs.input.regs[j][i]
1204 = MAX (insn_needs.input.regs[j][i]
1205 + insn_needs.op_addr.regs[j][i]
1206 + insn_needs.insn.regs[j][i],
1207 in_max + insn_needs.input.regs[j][i]);
546b63fb 1208
8b3e912b
RK
1209 insn_needs.output.regs[j][i] += out_max;
1210 insn_needs.other.regs[j][i]
1211 += MAX (MAX (insn_needs.input.regs[j][i],
1212 insn_needs.output.regs[j][i]),
1213 insn_needs.other_addr.regs[j][i]);
546b63fb 1214
ce0e109b
RK
1215 }
1216
8b3e912b 1217 /* Now compute group needs. */
546b63fb
RK
1218 for (in_max = 0, out_max = 0, j = 0;
1219 j < reload_n_operands; j++)
1220 {
8b3e912b
RK
1221 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1222 out_max
1223 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
546b63fb
RK
1224 }
1225
893bc853
RK
1226 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1227 insn_needs.op_addr_reload.groups[i]),
1228 in_max);
8b3e912b 1229 out_max = MAX (out_max, insn_needs.insn.groups[i]);
546b63fb 1230
8b3e912b
RK
1231 insn_needs.input.groups[i]
1232 = MAX (insn_needs.input.groups[i]
1233 + insn_needs.op_addr.groups[i]
1234 + insn_needs.insn.groups[i],
1235 in_max + insn_needs.input.groups[i]);
546b63fb 1236
8b3e912b
RK
1237 insn_needs.output.groups[i] += out_max;
1238 insn_needs.other.groups[i]
1239 += MAX (MAX (insn_needs.input.groups[i],
1240 insn_needs.output.groups[i]),
1241 insn_needs.other_addr.groups[i]);
546b63fb
RK
1242 }
1243
a8efe40d
RK
1244 /* If this is a CALL_INSN and caller-saves will need
1245 a spill register, act as if the spill register is
1246 needed for this insn. However, the spill register
1247 can be used by any reload of this insn, so we only
1248 need do something if no need for that class has
a8fdc208 1249 been recorded.
a8efe40d
RK
1250
1251 The assumption that every CALL_INSN will trigger a
1252 caller-save is highly conservative, however, the number
1253 of cases where caller-saves will need a spill register but
1254 a block containing a CALL_INSN won't need a spill register
1255 of that class should be quite rare.
1256
1257 If a group is needed, the size and mode of the group will
d45cf215 1258 have been set up at the beginning of this loop. */
a8efe40d
RK
1259
1260 if (GET_CODE (insn) == CALL_INSN
1261 && caller_save_spill_class != NO_REGS)
1262 {
8b3e912b
RK
1263 /* See if this register would conflict with any reload
1264 that needs a group. */
1265 int nongroup_need = 0;
1266 int *caller_save_needs;
1267
1268 for (j = 0; j < n_reloads; j++)
1269 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1270 (GET_MODE_SIZE (reload_outmode[j])
1271 > GET_MODE_SIZE (reload_inmode[j]))
1272 ? reload_outmode[j]
1273 : reload_inmode[j])
1274 > 1)
1275 && reg_classes_intersect_p (caller_save_spill_class,
1276 reload_reg_class[j]))
1277 {
1278 nongroup_need = 1;
1279 break;
1280 }
1281
1282 caller_save_needs
1283 = (caller_save_group_size > 1
1284 ? insn_needs.other.groups
1285 : insn_needs.other.regs[nongroup_need]);
a8efe40d
RK
1286
1287 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1288 {
1289 register enum reg_class *p
1290 = reg_class_superclasses[(int) caller_save_spill_class];
1291
1292 caller_save_needs[(int) caller_save_spill_class]++;
1293
1294 while (*p != LIM_REG_CLASSES)
0aaa6af8 1295 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1296 }
1297
8b3e912b 1298 /* Show that this basic block will need a register of
d1c1397e
RS
1299 this class. */
1300
8b3e912b
RK
1301 if (global
1302 && ! (basic_block_needs[(int) caller_save_spill_class]
1303 [this_block]))
1304 {
1305 basic_block_needs[(int) caller_save_spill_class]
1306 [this_block] = 1;
1307 new_basic_block_needs = 1;
1308 }
a8efe40d
RK
1309 }
1310
32131a9c
RK
1311#ifdef SMALL_REGISTER_CLASSES
1312 /* If this insn stores the value of a function call,
1313 and that value is in a register that has been spilled,
1314 and if the insn needs a reload in a class
1315 that might use that register as the reload register,
1316 then add add an extra need in that class.
1317 This makes sure we have a register available that does
1318 not overlap the return value. */
8b3e912b 1319
32131a9c
RK
1320 if (avoid_return_reg)
1321 {
1322 int regno = REGNO (avoid_return_reg);
1323 int nregs
1324 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1325 int r;
546b63fb
RK
1326 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1327
1328 /* First compute the "basic needs", which counts a
1329 need only in the smallest class in which it
1330 is required. */
1331
8b3e912b
RK
1332 bcopy (insn_needs.other.regs[0], basic_needs,
1333 sizeof basic_needs);
1334 bcopy (insn_needs.other.groups, basic_groups,
1335 sizeof basic_groups);
546b63fb
RK
1336
1337 for (i = 0; i < N_REG_CLASSES; i++)
1338 {
1339 enum reg_class *p;
1340
1341 if (basic_needs[i] >= 0)
1342 for (p = reg_class_superclasses[i];
1343 *p != LIM_REG_CLASSES; p++)
1344 basic_needs[(int) *p] -= basic_needs[i];
1345
1346 if (basic_groups[i] >= 0)
1347 for (p = reg_class_superclasses[i];
1348 *p != LIM_REG_CLASSES; p++)
1349 basic_groups[(int) *p] -= basic_groups[i];
1350 }
1351
1352 /* Now count extra regs if there might be a conflict with
1353 the return value register.
1354
1355 ??? This is not quite correct because we don't properly
1356 handle the case of groups, but if we end up doing
1357 something wrong, it either will end up not mattering or
1358 we will abort elsewhere. */
1359
32131a9c
RK
1360 for (r = regno; r < regno + nregs; r++)
1361 if (spill_reg_order[r] >= 0)
1362 for (i = 0; i < N_REG_CLASSES; i++)
1363 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1364 {
546b63fb
RK
1365 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1366 {
1367 enum reg_class *p;
1368
8b3e912b 1369 insn_needs.other.regs[0][i]++;
546b63fb
RK
1370 p = reg_class_superclasses[i];
1371 while (*p != LIM_REG_CLASSES)
8b3e912b 1372 insn_needs.other.regs[0][(int) *p++]++;
546b63fb 1373 }
32131a9c 1374 }
32131a9c
RK
1375 }
1376#endif /* SMALL_REGISTER_CLASSES */
1377
1378 /* For each class, collect maximum need of any insn. */
1379
1380 for (i = 0; i < N_REG_CLASSES; i++)
1381 {
8b3e912b 1382 if (max_needs[i] < insn_needs.other.regs[0][i])
5352b11a 1383 {
8b3e912b 1384 max_needs[i] = insn_needs.other.regs[0][i];
5352b11a
RS
1385 max_needs_insn[i] = insn;
1386 }
8b3e912b 1387 if (max_groups[i] < insn_needs.other.groups[i])
5352b11a 1388 {
8b3e912b 1389 max_groups[i] = insn_needs.other.groups[i];
5352b11a
RS
1390 max_groups_insn[i] = insn;
1391 }
8b3e912b 1392 if (max_nongroups[i] < insn_needs.other.regs[1][i])
ce0e109b 1393 {
8b3e912b 1394 max_nongroups[i] = insn_needs.other.regs[1][i];
ce0e109b
RK
1395 max_nongroups_insn[i] = insn;
1396 }
32131a9c
RK
1397 }
1398 }
1399 /* Note that there is a continue statement above. */
1400 }
1401
0dadecf6
RK
1402 /* If we allocated any new memory locations, make another pass
1403 since it might have changed elimination offsets. */
1404 if (starting_frame_size != get_frame_size ())
1405 something_changed = 1;
1406
e404a39a
RK
1407 if (dumpfile)
1408 for (i = 0; i < N_REG_CLASSES; i++)
1409 {
1410 if (max_needs[i] > 0)
1411 fprintf (dumpfile,
1412 ";; Need %d reg%s of class %s (for insn %d).\n",
1413 max_needs[i], max_needs[i] == 1 ? "" : "s",
1414 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1415 if (max_nongroups[i] > 0)
1416 fprintf (dumpfile,
1417 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1418 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1419 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1420 if (max_groups[i] > 0)
1421 fprintf (dumpfile,
1422 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1423 max_groups[i], max_groups[i] == 1 ? "" : "s",
1424 mode_name[(int) group_mode[i]],
1425 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1426 }
1427
d445b551 1428 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1429 will need a spill register. */
32131a9c 1430
d445b551 1431 if (caller_save_needed
a8efe40d
RK
1432 && ! setup_save_areas (&something_changed)
1433 && caller_save_spill_class == NO_REGS)
32131a9c 1434 {
a8efe40d
RK
1435 /* The class we will need depends on whether the machine
1436 supports the sum of two registers for an address; see
1437 find_address_reloads for details. */
1438
a8fdc208 1439 caller_save_spill_class
a8efe40d
RK
1440 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1441 caller_save_group_size
1442 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1443 something_changed = 1;
32131a9c
RK
1444 }
1445
5c23c401
RK
1446 /* See if anything that happened changes which eliminations are valid.
1447 For example, on the Sparc, whether or not the frame pointer can
1448 be eliminated can depend on what registers have been used. We need
1449 not check some conditions again (such as flag_omit_frame_pointer)
1450 since they can't have changed. */
1451
1452 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3ec2ea3e 1453 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5c23c401
RK
1454#ifdef ELIMINABLE_REGS
1455 || ! CAN_ELIMINATE (ep->from, ep->to)
1456#endif
1457 )
1458 ep->can_eliminate = 0;
1459
32131a9c
RK
1460 /* Look for the case where we have discovered that we can't replace
1461 register A with register B and that means that we will now be
1462 trying to replace register A with register C. This means we can
1463 no longer replace register C with register B and we need to disable
1464 such an elimination, if it exists. This occurs often with A == ap,
1465 B == sp, and C == fp. */
a8fdc208 1466
32131a9c
RK
1467 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1468 {
1469 struct elim_table *op;
1470 register int new_to = -1;
1471
1472 if (! ep->can_eliminate && ep->can_eliminate_previous)
1473 {
1474 /* Find the current elimination for ep->from, if there is a
1475 new one. */
1476 for (op = reg_eliminate;
1477 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1478 if (op->from == ep->from && op->can_eliminate)
1479 {
1480 new_to = op->to;
1481 break;
1482 }
1483
1484 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1485 disable it. */
1486 for (op = reg_eliminate;
1487 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1488 if (op->from == new_to && op->to == ep->to)
1489 op->can_eliminate = 0;
1490 }
1491 }
1492
1493 /* See if any registers that we thought we could eliminate the previous
1494 time are no longer eliminable. If so, something has changed and we
1495 must spill the register. Also, recompute the number of eliminable
1496 registers and see if the frame pointer is needed; it is if there is
1497 no elimination of the frame pointer that we can perform. */
1498
1499 frame_pointer_needed = 1;
1500 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1501 {
3ec2ea3e
DE
1502 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1503 && ep->to != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
1504 frame_pointer_needed = 0;
1505
1506 if (! ep->can_eliminate && ep->can_eliminate_previous)
1507 {
1508 ep->can_eliminate_previous = 0;
1509 spill_hard_reg (ep->from, global, dumpfile, 1);
32131a9c
RK
1510 something_changed = 1;
1511 num_eliminable--;
1512 }
1513 }
1514
9ff3516a
RK
1515#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1516 /* If we didn't need a frame pointer last time, but we do now, spill
1517 the hard frame pointer. */
1518 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1519 {
1520 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1521 something_changed = 1;
1522 }
1523#endif
1524
32131a9c
RK
1525 /* If all needs are met, we win. */
1526
1527 for (i = 0; i < N_REG_CLASSES; i++)
1528 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1529 break;
1530 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1531 break;
1532
546b63fb
RK
1533 /* Not all needs are met; must spill some hard regs. */
1534
1535 /* Put all registers spilled so far back in potential_reload_regs, but
1536 put them at the front, since we've already spilled most of the
1537 psuedos in them (we might have left some pseudos unspilled if they
1538 were in a block that didn't need any spill registers of a conflicting
1539 class. We used to try to mark off the need for those registers,
1540 but doing so properly is very complex and reallocating them is the
1541 simpler approach. First, "pack" potential_reload_regs by pushing
1542 any nonnegative entries towards the end. That will leave room
1543 for the registers we already spilled.
1544
1545 Also, undo the marking of the spill registers from the last time
1546 around in FORBIDDEN_REGS since we will be probably be allocating
1547 them again below.
1548
1549 ??? It is theoretically possible that we might end up not using one
1550 of our previously-spilled registers in this allocation, even though
1551 they are at the head of the list. It's not clear what to do about
1552 this, but it was no better before, when we marked off the needs met
1553 by the previously-spilled registers. With the current code, globals
1554 can be allocated into these registers, but locals cannot. */
1555
1556 if (n_spills)
1557 {
1558 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1559 if (potential_reload_regs[i] != -1)
1560 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1561
546b63fb
RK
1562 for (i = 0; i < n_spills; i++)
1563 {
1564 potential_reload_regs[i] = spill_regs[i];
1565 spill_reg_order[spill_regs[i]] = -1;
1566 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1567 }
32131a9c 1568
546b63fb
RK
1569 n_spills = 0;
1570 }
32131a9c
RK
1571
1572 /* Now find more reload regs to satisfy the remaining need
1573 Do it by ascending class number, since otherwise a reg
1574 might be spilled for a big class and might fail to count
1575 for a smaller class even though it belongs to that class.
1576
1577 Count spilled regs in `spills', and add entries to
1578 `spill_regs' and `spill_reg_order'.
1579
1580 ??? Note there is a problem here.
1581 When there is a need for a group in a high-numbered class,
1582 and also need for non-group regs that come from a lower class,
1583 the non-group regs are chosen first. If there aren't many regs,
1584 they might leave no room for a group.
1585
1586 This was happening on the 386. To fix it, we added the code
1587 that calls possible_group_p, so that the lower class won't
1588 break up the last possible group.
1589
1590 Really fixing the problem would require changes above
1591 in counting the regs already spilled, and in choose_reload_regs.
1592 It might be hard to avoid introducing bugs there. */
1593
546b63fb
RK
1594 CLEAR_HARD_REG_SET (counted_for_groups);
1595 CLEAR_HARD_REG_SET (counted_for_nongroups);
1596
32131a9c
RK
1597 for (class = 0; class < N_REG_CLASSES; class++)
1598 {
1599 /* First get the groups of registers.
1600 If we got single registers first, we might fragment
1601 possible groups. */
1602 while (max_groups[class] > 0)
1603 {
1604 /* If any single spilled regs happen to form groups,
1605 count them now. Maybe we don't really need
1606 to spill another group. */
066aca28
RK
1607 count_possible_groups (group_size, group_mode, max_groups,
1608 class);
32131a9c 1609
93193ab5
RK
1610 if (max_groups[class] <= 0)
1611 break;
1612
32131a9c
RK
1613 /* Groups of size 2 (the only groups used on most machines)
1614 are treated specially. */
1615 if (group_size[class] == 2)
1616 {
1617 /* First, look for a register that will complete a group. */
1618 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1619 {
32131a9c 1620 int other;
546b63fb
RK
1621
1622 j = potential_reload_regs[i];
32131a9c
RK
1623 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1624 &&
1625 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1626 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1627 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1628 && HARD_REGNO_MODE_OK (other, group_mode[class])
1629 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1630 other)
1631 /* We don't want one part of another group.
1632 We could get "two groups" that overlap! */
1633 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1634 ||
1635 (j < FIRST_PSEUDO_REGISTER - 1
1636 && (other = j + 1, spill_reg_order[other] >= 0)
1637 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1638 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1639 && HARD_REGNO_MODE_OK (j, group_mode[class])
1640 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1641 other)
1642 && ! TEST_HARD_REG_BIT (counted_for_groups,
1643 other))))
1644 {
1645 register enum reg_class *p;
1646
1647 /* We have found one that will complete a group,
1648 so count off one group as provided. */
1649 max_groups[class]--;
1650 p = reg_class_superclasses[class];
1651 while (*p != LIM_REG_CLASSES)
1652 max_groups[(int) *p++]--;
1653
1654 /* Indicate both these regs are part of a group. */
1655 SET_HARD_REG_BIT (counted_for_groups, j);
1656 SET_HARD_REG_BIT (counted_for_groups, other);
1657 break;
1658 }
1659 }
1660 /* We can't complete a group, so start one. */
92b0556d
RS
1661#ifdef SMALL_REGISTER_CLASSES
1662 /* Look for a pair neither of which is explicitly used. */
1663 if (i == FIRST_PSEUDO_REGISTER)
1664 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1665 {
1666 int k;
1667 j = potential_reload_regs[i];
1668 /* Verify that J+1 is a potential reload reg. */
1669 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1670 if (potential_reload_regs[k] == j + 1)
1671 break;
1672 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1673 && k < FIRST_PSEUDO_REGISTER
1674 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1675 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1676 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1677 && HARD_REGNO_MODE_OK (j, group_mode[class])
1678 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1679 j + 1)
1680 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1681 /* Reject J at this stage
1682 if J+1 was explicitly used. */
1683 && ! regs_explicitly_used[j + 1])
1684 break;
1685 }
1686#endif
1687 /* Now try any group at all
1688 whose registers are not in bad_spill_regs. */
32131a9c
RK
1689 if (i == FIRST_PSEUDO_REGISTER)
1690 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1691 {
57697575 1692 int k;
546b63fb 1693 j = potential_reload_regs[i];
57697575
RS
1694 /* Verify that J+1 is a potential reload reg. */
1695 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1696 if (potential_reload_regs[k] == j + 1)
1697 break;
32131a9c 1698 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1699 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1700 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1701 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1702 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1703 && HARD_REGNO_MODE_OK (j, group_mode[class])
1704 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1705 j + 1)
1706 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1707 break;
1708 }
1709
1710 /* I should be the index in potential_reload_regs
1711 of the new reload reg we have found. */
1712
5352b11a
RS
1713 if (i >= FIRST_PSEUDO_REGISTER)
1714 {
1715 /* There are no groups left to spill. */
1716 spill_failure (max_groups_insn[class]);
1717 failure = 1;
1718 goto failed;
1719 }
1720 else
1721 something_changed
fb3821f7 1722 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1723 global, dumpfile);
32131a9c
RK
1724 }
1725 else
1726 {
1727 /* For groups of more than 2 registers,
1728 look for a sufficient sequence of unspilled registers,
1729 and spill them all at once. */
1730 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1731 {
32131a9c 1732 int k;
546b63fb
RK
1733
1734 j = potential_reload_regs[i];
9d1a4667
RS
1735 if (j >= 0
1736 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1737 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1738 {
1739 /* Check each reg in the sequence. */
1740 for (k = 0; k < group_size[class]; k++)
1741 if (! (spill_reg_order[j + k] < 0
1742 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1743 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1744 break;
1745 /* We got a full sequence, so spill them all. */
1746 if (k == group_size[class])
1747 {
1748 register enum reg_class *p;
1749 for (k = 0; k < group_size[class]; k++)
1750 {
1751 int idx;
1752 SET_HARD_REG_BIT (counted_for_groups, j + k);
1753 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1754 if (potential_reload_regs[idx] == j + k)
1755 break;
9d1a4667
RS
1756 something_changed
1757 |= new_spill_reg (idx, class,
1758 max_needs, NULL_PTR,
1759 global, dumpfile);
32131a9c
RK
1760 }
1761
1762 /* We have found one that will complete a group,
1763 so count off one group as provided. */
1764 max_groups[class]--;
1765 p = reg_class_superclasses[class];
1766 while (*p != LIM_REG_CLASSES)
1767 max_groups[(int) *p++]--;
1768
1769 break;
1770 }
1771 }
1772 }
fa52261e 1773 /* We couldn't find any registers for this reload.
9d1a4667
RS
1774 Avoid going into an infinite loop. */
1775 if (i >= FIRST_PSEUDO_REGISTER)
1776 {
1777 /* There are no groups left. */
1778 spill_failure (max_groups_insn[class]);
1779 failure = 1;
1780 goto failed;
1781 }
32131a9c
RK
1782 }
1783 }
1784
1785 /* Now similarly satisfy all need for single registers. */
1786
1787 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1788 {
9a6cde3a
RS
1789#ifdef SMALL_REGISTER_CLASSES
1790 /* This should be right for all machines, but only the 386
1791 is known to need it, so this conditional plays safe.
1792 ??? For 2.5, try making this unconditional. */
1793 /* If we spilled enough regs, but they weren't counted
1794 against the non-group need, see if we can count them now.
1795 If so, we can avoid some actual spilling. */
1796 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1797 for (i = 0; i < n_spills; i++)
1798 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1799 spill_regs[i])
1800 && !TEST_HARD_REG_BIT (counted_for_groups,
1801 spill_regs[i])
1802 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1803 spill_regs[i])
1804 && max_nongroups[class] > 0)
1805 {
1806 register enum reg_class *p;
1807
1808 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1809 max_nongroups[class]--;
1810 p = reg_class_superclasses[class];
1811 while (*p != LIM_REG_CLASSES)
1812 max_nongroups[(int) *p++]--;
1813 }
1814 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1815 break;
1816#endif
1817
32131a9c
RK
1818 /* Consider the potential reload regs that aren't
1819 yet in use as reload regs, in order of preference.
1820 Find the most preferred one that's in this class. */
1821
1822 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1823 if (potential_reload_regs[i] >= 0
1824 && TEST_HARD_REG_BIT (reg_class_contents[class],
1825 potential_reload_regs[i])
1826 /* If this reg will not be available for groups,
1827 pick one that does not foreclose possible groups.
1828 This is a kludge, and not very general,
1829 but it should be sufficient to make the 386 work,
1830 and the problem should not occur on machines with
1831 more registers. */
1832 && (max_nongroups[class] == 0
1833 || possible_group_p (potential_reload_regs[i], max_groups)))
1834 break;
1835
e404a39a
RK
1836 /* If we couldn't get a register, try to get one even if we
1837 might foreclose possible groups. This may cause problems
1838 later, but that's better than aborting now, since it is
1839 possible that we will, in fact, be able to form the needed
1840 group even with this allocation. */
1841
1842 if (i >= FIRST_PSEUDO_REGISTER
1843 && (asm_noperands (max_needs[class] > 0
1844 ? max_needs_insn[class]
1845 : max_nongroups_insn[class])
1846 < 0))
1847 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1848 if (potential_reload_regs[i] >= 0
1849 && TEST_HARD_REG_BIT (reg_class_contents[class],
1850 potential_reload_regs[i]))
1851 break;
1852
32131a9c
RK
1853 /* I should be the index in potential_reload_regs
1854 of the new reload reg we have found. */
1855
5352b11a
RS
1856 if (i >= FIRST_PSEUDO_REGISTER)
1857 {
1858 /* There are no possible registers left to spill. */
1859 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1860 : max_nongroups_insn[class]);
1861 failure = 1;
1862 goto failed;
1863 }
1864 else
1865 something_changed
1866 |= new_spill_reg (i, class, max_needs, max_nongroups,
1867 global, dumpfile);
32131a9c
RK
1868 }
1869 }
1870 }
1871
1872 /* If global-alloc was run, notify it of any register eliminations we have
1873 done. */
1874 if (global)
1875 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1876 if (ep->can_eliminate)
1877 mark_elimination (ep->from, ep->to);
1878
32131a9c 1879 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1880 around calls. Tell if what mode to use so that we will process
1881 those insns in reload_as_needed if we have to. */
32131a9c
RK
1882
1883 if (caller_save_needed)
a8efe40d
RK
1884 save_call_clobbered_regs (num_eliminable ? QImode
1885 : caller_save_spill_class != NO_REGS ? HImode
1886 : VOIDmode);
32131a9c
RK
1887
1888 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1889 If that insn didn't set the register (i.e., it copied the register to
1890 memory), just delete that insn instead of the equivalencing insn plus
1891 anything now dead. If we call delete_dead_insn on that insn, we may
1892 delete the insn that actually sets the register if the register die
1893 there and that is incorrect. */
1894
1895 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1896 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1897 && GET_CODE (reg_equiv_init[i]) != NOTE)
1898 {
1899 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1900 delete_dead_insn (reg_equiv_init[i]);
1901 else
1902 {
1903 PUT_CODE (reg_equiv_init[i], NOTE);
1904 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1905 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1906 }
1907 }
1908
1909 /* Use the reload registers where necessary
1910 by generating move instructions to move the must-be-register
1911 values into or out of the reload registers. */
1912
a8efe40d
RK
1913 if (something_needs_reloads || something_needs_elimination
1914 || (caller_save_needed && num_eliminable)
1915 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1916 reload_as_needed (first, global);
1917
2a1f8b6b 1918 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1919 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1920 virtue of being in a pseudo, that pseudo will be marked live
1921 and hence the frame pointer will be known to be live via that
1922 pseudo. */
1923
1924 if (! frame_pointer_needed)
1925 for (i = 0; i < n_basic_blocks; i++)
3ec2ea3e
DE
1926 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1927 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1928 % REGSET_ELT_BITS));
2a1f8b6b 1929
5352b11a
RS
1930 /* Come here (with failure set nonzero) if we can't get enough spill regs
1931 and we decide not to abort about it. */
1932 failed:
1933
a3ec87a8
RS
1934 reload_in_progress = 0;
1935
32131a9c
RK
1936 /* Now eliminate all pseudo regs by modifying them into
1937 their equivalent memory references.
1938 The REG-rtx's for the pseudos are modified in place,
1939 so all insns that used to refer to them now refer to memory.
1940
1941 For a reg that has a reg_equiv_address, all those insns
1942 were changed by reloading so that no insns refer to it any longer;
1943 but the DECL_RTL of a variable decl may refer to it,
1944 and if so this causes the debugging info to mention the variable. */
1945
1946 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1947 {
1948 rtx addr = 0;
ab1fd483 1949 int in_struct = 0;
32131a9c 1950 if (reg_equiv_mem[i])
ab1fd483
RS
1951 {
1952 addr = XEXP (reg_equiv_mem[i], 0);
1953 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1954 }
32131a9c
RK
1955 if (reg_equiv_address[i])
1956 addr = reg_equiv_address[i];
1957 if (addr)
1958 {
1959 if (reg_renumber[i] < 0)
1960 {
1961 rtx reg = regno_reg_rtx[i];
1962 XEXP (reg, 0) = addr;
1963 REG_USERVAR_P (reg) = 0;
ab1fd483 1964 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1965 PUT_CODE (reg, MEM);
1966 }
1967 else if (reg_equiv_mem[i])
1968 XEXP (reg_equiv_mem[i], 0) = addr;
1969 }
1970 }
1971
1972#ifdef PRESERVE_DEATH_INFO_REGNO_P
1973 /* Make a pass over all the insns and remove death notes for things that
1974 are no longer registers or no longer die in the insn (e.g., an input
1975 and output pseudo being tied). */
1976
1977 for (insn = first; insn; insn = NEXT_INSN (insn))
1978 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1979 {
1980 rtx note, next;
1981
1982 for (note = REG_NOTES (insn); note; note = next)
1983 {
1984 next = XEXP (note, 1);
1985 if (REG_NOTE_KIND (note) == REG_DEAD
1986 && (GET_CODE (XEXP (note, 0)) != REG
1987 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1988 remove_note (insn, note);
1989 }
1990 }
1991#endif
1992
1993 /* Indicate that we no longer have known memory locations or constants. */
1994 reg_equiv_constant = 0;
1995 reg_equiv_memory_loc = 0;
5352b11a 1996
c8ab4464
RS
1997 if (scratch_list)
1998 free (scratch_list);
c307c237 1999 scratch_list = 0;
c8ab4464
RS
2000 if (scratch_block)
2001 free (scratch_block);
c307c237
RK
2002 scratch_block = 0;
2003
5352b11a 2004 return failure;
32131a9c
RK
2005}
2006\f
2007/* Nonzero if, after spilling reg REGNO for non-groups,
2008 it will still be possible to find a group if we still need one. */
2009
2010static int
2011possible_group_p (regno, max_groups)
2012 int regno;
2013 int *max_groups;
2014{
2015 int i;
2016 int class = (int) NO_REGS;
2017
2018 for (i = 0; i < (int) N_REG_CLASSES; i++)
2019 if (max_groups[i] > 0)
2020 {
2021 class = i;
2022 break;
2023 }
2024
2025 if (class == (int) NO_REGS)
2026 return 1;
2027
2028 /* Consider each pair of consecutive registers. */
2029 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2030 {
2031 /* Ignore pairs that include reg REGNO. */
2032 if (i == regno || i + 1 == regno)
2033 continue;
2034
2035 /* Ignore pairs that are outside the class that needs the group.
2036 ??? Here we fail to handle the case where two different classes
2037 independently need groups. But this never happens with our
2038 current machine descriptions. */
2039 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2040 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2041 continue;
2042
2043 /* A pair of consecutive regs we can still spill does the trick. */
2044 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2045 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2046 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2047 return 1;
2048
2049 /* A pair of one already spilled and one we can spill does it
2050 provided the one already spilled is not otherwise reserved. */
2051 if (spill_reg_order[i] < 0
2052 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2053 && spill_reg_order[i + 1] >= 0
2054 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2055 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2056 return 1;
2057 if (spill_reg_order[i + 1] < 0
2058 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2059 && spill_reg_order[i] >= 0
2060 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2061 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2062 return 1;
2063 }
2064
2065 return 0;
2066}
2067\f
066aca28
RK
2068/* Count any groups of CLASS that can be formed from the registers recently
2069 spilled. */
32131a9c
RK
2070
2071static void
066aca28 2072count_possible_groups (group_size, group_mode, max_groups, class)
546b63fb 2073 int *group_size;
32131a9c 2074 enum machine_mode *group_mode;
546b63fb 2075 int *max_groups;
066aca28 2076 int class;
32131a9c 2077{
066aca28
RK
2078 HARD_REG_SET new;
2079 int i, j;
2080
32131a9c
RK
2081 /* Now find all consecutive groups of spilled registers
2082 and mark each group off against the need for such groups.
2083 But don't count them against ordinary need, yet. */
2084
066aca28
RK
2085 if (group_size[class] == 0)
2086 return;
2087
2088 CLEAR_HARD_REG_SET (new);
2089
2090 /* Make a mask of all the regs that are spill regs in class I. */
2091 for (i = 0; i < n_spills; i++)
2092 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2093 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2094 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2095 SET_HARD_REG_BIT (new, spill_regs[i]);
2096
2097 /* Find each consecutive group of them. */
2098 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2099 if (TEST_HARD_REG_BIT (new, i)
2100 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2101 && HARD_REGNO_MODE_OK (i, group_mode[class]))
32131a9c 2102 {
066aca28
RK
2103 for (j = 1; j < group_size[class]; j++)
2104 if (! TEST_HARD_REG_BIT (new, i + j))
2105 break;
32131a9c 2106
066aca28
RK
2107 if (j == group_size[class])
2108 {
2109 /* We found a group. Mark it off against this class's need for
2110 groups, and against each superclass too. */
2111 register enum reg_class *p;
2112
2113 max_groups[class]--;
2114 p = reg_class_superclasses[class];
2115 while (*p != LIM_REG_CLASSES)
2116 max_groups[(int) *p++]--;
2117
2118 /* Don't count these registers again. */
2119 for (j = 0; j < group_size[j]; j++)
2120 SET_HARD_REG_BIT (counted_for_groups, i + j);
2121 }
2122
2123 /* Skip to the last reg in this group. When i is incremented above,
2124 it will then point to the first reg of the next possible group. */
2125 i += j - 1;
2126 }
32131a9c
RK
2127}
2128\f
2129/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2130 another mode that needs to be reloaded for the same register class CLASS.
2131 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2132 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2133
2134 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2135 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2136 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2137 causes unnecessary failures on machines requiring alignment of register
2138 groups when the two modes are different sizes, because the larger mode has
2139 more strict alignment rules than the smaller mode. */
2140
2141static int
2142modes_equiv_for_class_p (allocate_mode, other_mode, class)
2143 enum machine_mode allocate_mode, other_mode;
2144 enum reg_class class;
2145{
2146 register int regno;
2147 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2148 {
2149 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2150 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2151 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2152 return 0;
2153 }
2154 return 1;
2155}
2156
5352b11a
RS
2157/* Handle the failure to find a register to spill.
2158 INSN should be one of the insns which needed this particular spill reg. */
2159
2160static void
2161spill_failure (insn)
2162 rtx insn;
2163{
2164 if (asm_noperands (PATTERN (insn)) >= 0)
2165 error_for_asm (insn, "`asm' needs too many reloads");
2166 else
a89b2cc4 2167 fatal_insn ("Unable to find a register to spill.", insn);
5352b11a
RS
2168}
2169
32131a9c
RK
2170/* Add a new register to the tables of available spill-registers
2171 (as well as spilling all pseudos allocated to the register).
2172 I is the index of this register in potential_reload_regs.
2173 CLASS is the regclass whose need is being satisfied.
2174 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2175 so that this register can count off against them.
2176 MAX_NONGROUPS is 0 if this register is part of a group.
2177 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2178
2179static int
2180new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2181 int i;
2182 int class;
2183 int *max_needs;
2184 int *max_nongroups;
2185 int global;
2186 FILE *dumpfile;
2187{
2188 register enum reg_class *p;
2189 int val;
2190 int regno = potential_reload_regs[i];
2191
2192 if (i >= FIRST_PSEUDO_REGISTER)
2193 abort (); /* Caller failed to find any register. */
2194
2195 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2196 fatal ("fixed or forbidden register was spilled.\n\
56f58d3a
RK
2197This may be due to a compiler bug or to impossible asm\n\
2198statements or clauses.");
32131a9c
RK
2199
2200 /* Make reg REGNO an additional reload reg. */
2201
2202 potential_reload_regs[i] = -1;
2203 spill_regs[n_spills] = regno;
2204 spill_reg_order[regno] = n_spills;
2205 if (dumpfile)
2206 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2207
2208 /* Clear off the needs we just satisfied. */
2209
2210 max_needs[class]--;
2211 p = reg_class_superclasses[class];
2212 while (*p != LIM_REG_CLASSES)
2213 max_needs[(int) *p++]--;
2214
2215 if (max_nongroups && max_nongroups[class] > 0)
2216 {
2217 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2218 max_nongroups[class]--;
2219 p = reg_class_superclasses[class];
2220 while (*p != LIM_REG_CLASSES)
2221 max_nongroups[(int) *p++]--;
2222 }
2223
2224 /* Spill every pseudo reg that was allocated to this reg
2225 or to something that overlaps this reg. */
2226
2227 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2228
2229 /* If there are some registers still to eliminate and this register
2230 wasn't ever used before, additional stack space may have to be
2231 allocated to store this register. Thus, we may have changed the offset
2232 between the stack and frame pointers, so mark that something has changed.
2233 (If new pseudos were spilled, thus requiring more space, VAL would have
2234 been set non-zero by the call to spill_hard_reg above since additional
2235 reloads may be needed in that case.
2236
2237 One might think that we need only set VAL to 1 if this is a call-used
2238 register. However, the set of registers that must be saved by the
2239 prologue is not identical to the call-used set. For example, the
2240 register used by the call insn for the return PC is a call-used register,
2241 but must be saved by the prologue. */
2242 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2243 val = 1;
2244
2245 regs_ever_live[spill_regs[n_spills]] = 1;
2246 n_spills++;
2247
2248 return val;
2249}
2250\f
2251/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2252 data that is dead in INSN. */
2253
2254static void
2255delete_dead_insn (insn)
2256 rtx insn;
2257{
2258 rtx prev = prev_real_insn (insn);
2259 rtx prev_dest;
2260
2261 /* If the previous insn sets a register that dies in our insn, delete it
2262 too. */
2263 if (prev && GET_CODE (PATTERN (prev)) == SET
2264 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2265 && reg_mentioned_p (prev_dest, PATTERN (insn))
2266 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2267 delete_dead_insn (prev);
2268
2269 PUT_CODE (insn, NOTE);
2270 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2271 NOTE_SOURCE_FILE (insn) = 0;
2272}
2273
2274/* Modify the home of pseudo-reg I.
2275 The new home is present in reg_renumber[I].
2276
2277 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2278 or it may be -1, meaning there is none or it is not relevant.
2279 This is used so that all pseudos spilled from a given hard reg
2280 can share one stack slot. */
2281
2282static void
2283alter_reg (i, from_reg)
2284 register int i;
2285 int from_reg;
2286{
2287 /* When outputting an inline function, this can happen
2288 for a reg that isn't actually used. */
2289 if (regno_reg_rtx[i] == 0)
2290 return;
2291
2292 /* If the reg got changed to a MEM at rtl-generation time,
2293 ignore it. */
2294 if (GET_CODE (regno_reg_rtx[i]) != REG)
2295 return;
2296
2297 /* Modify the reg-rtx to contain the new hard reg
2298 number or else to contain its pseudo reg number. */
2299 REGNO (regno_reg_rtx[i])
2300 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2301
2302 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2303 allocate a stack slot for it. */
2304
2305 if (reg_renumber[i] < 0
2306 && reg_n_refs[i] > 0
2307 && reg_equiv_constant[i] == 0
2308 && reg_equiv_memory_loc[i] == 0)
2309 {
2310 register rtx x;
2311 int inherent_size = PSEUDO_REGNO_BYTES (i);
2312 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2313 int adjust = 0;
2314
2315 /* Each pseudo reg has an inherent size which comes from its own mode,
2316 and a total size which provides room for paradoxical subregs
2317 which refer to the pseudo reg in wider modes.
2318
2319 We can use a slot already allocated if it provides both
2320 enough inherent space and enough total space.
2321 Otherwise, we allocate a new slot, making sure that it has no less
2322 inherent space, and no less total space, then the previous slot. */
2323 if (from_reg == -1)
2324 {
2325 /* No known place to spill from => no slot to reuse. */
2326 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2327#if BYTES_BIG_ENDIAN
2328 /* Cancel the big-endian correction done in assign_stack_local.
2329 Get the address of the beginning of the slot.
2330 This is so we can do a big-endian correction unconditionally
2331 below. */
2332 adjust = inherent_size - total_size;
2333#endif
2334 }
2335 /* Reuse a stack slot if possible. */
2336 else if (spill_stack_slot[from_reg] != 0
2337 && spill_stack_slot_width[from_reg] >= total_size
2338 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2339 >= inherent_size))
2340 x = spill_stack_slot[from_reg];
2341 /* Allocate a bigger slot. */
2342 else
2343 {
2344 /* Compute maximum size needed, both for inherent size
2345 and for total size. */
2346 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2347 if (spill_stack_slot[from_reg])
2348 {
2349 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2350 > inherent_size)
2351 mode = GET_MODE (spill_stack_slot[from_reg]);
2352 if (spill_stack_slot_width[from_reg] > total_size)
2353 total_size = spill_stack_slot_width[from_reg];
2354 }
2355 /* Make a slot with that size. */
2356 x = assign_stack_local (mode, total_size, -1);
2357#if BYTES_BIG_ENDIAN
2358 /* Cancel the big-endian correction done in assign_stack_local.
2359 Get the address of the beginning of the slot.
2360 This is so we can do a big-endian correction unconditionally
2361 below. */
2362 adjust = GET_MODE_SIZE (mode) - total_size;
2363#endif
2364 spill_stack_slot[from_reg] = x;
2365 spill_stack_slot_width[from_reg] = total_size;
2366 }
2367
2368#if BYTES_BIG_ENDIAN
2369 /* On a big endian machine, the "address" of the slot
2370 is the address of the low part that fits its inherent mode. */
2371 if (inherent_size < total_size)
2372 adjust += (total_size - inherent_size);
2373#endif /* BYTES_BIG_ENDIAN */
2374
2375 /* If we have any adjustment to make, or if the stack slot is the
2376 wrong mode, make a new stack slot. */
2377 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2378 {
2379 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2380 plus_constant (XEXP (x, 0), adjust));
2381 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2382 }
2383
2384 /* Save the stack slot for later. */
2385 reg_equiv_memory_loc[i] = x;
2386 }
2387}
2388
2389/* Mark the slots in regs_ever_live for the hard regs
2390 used by pseudo-reg number REGNO. */
2391
2392void
2393mark_home_live (regno)
2394 int regno;
2395{
2396 register int i, lim;
2397 i = reg_renumber[regno];
2398 if (i < 0)
2399 return;
2400 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2401 while (i < lim)
2402 regs_ever_live[i++] = 1;
2403}
c307c237
RK
2404
2405/* Mark the registers used in SCRATCH as being live. */
2406
2407static void
2408mark_scratch_live (scratch)
2409 rtx scratch;
2410{
2411 register int i;
2412 int regno = REGNO (scratch);
2413 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2414
2415 for (i = regno; i < lim; i++)
2416 regs_ever_live[i] = 1;
2417}
32131a9c
RK
2418\f
2419/* This function handles the tracking of elimination offsets around branches.
2420
2421 X is a piece of RTL being scanned.
2422
2423 INSN is the insn that it came from, if any.
2424
2425 INITIAL_P is non-zero if we are to set the offset to be the initial
2426 offset and zero if we are setting the offset of the label to be the
2427 current offset. */
2428
2429static void
2430set_label_offsets (x, insn, initial_p)
2431 rtx x;
2432 rtx insn;
2433 int initial_p;
2434{
2435 enum rtx_code code = GET_CODE (x);
2436 rtx tem;
2437 int i;
2438 struct elim_table *p;
2439
2440 switch (code)
2441 {
2442 case LABEL_REF:
8be386d9
RS
2443 if (LABEL_REF_NONLOCAL_P (x))
2444 return;
2445
32131a9c
RK
2446 x = XEXP (x, 0);
2447
2448 /* ... fall through ... */
2449
2450 case CODE_LABEL:
2451 /* If we know nothing about this label, set the desired offsets. Note
2452 that this sets the offset at a label to be the offset before a label
2453 if we don't know anything about the label. This is not correct for
2454 the label after a BARRIER, but is the best guess we can make. If
2455 we guessed wrong, we will suppress an elimination that might have
2456 been possible had we been able to guess correctly. */
2457
2458 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2459 {
2460 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2461 offsets_at[CODE_LABEL_NUMBER (x)][i]
2462 = (initial_p ? reg_eliminate[i].initial_offset
2463 : reg_eliminate[i].offset);
2464 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2465 }
2466
2467 /* Otherwise, if this is the definition of a label and it is
d45cf215 2468 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2469 that label. */
2470
2471 else if (x == insn
2472 && (tem = prev_nonnote_insn (insn)) != 0
2473 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2474 {
2475 num_not_at_initial_offset = 0;
2476 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2477 {
2478 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2479 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2480 if (reg_eliminate[i].can_eliminate
2481 && (reg_eliminate[i].offset
2482 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2483 num_not_at_initial_offset++;
2484 }
2485 }
32131a9c
RK
2486
2487 else
2488 /* If neither of the above cases is true, compare each offset
2489 with those previously recorded and suppress any eliminations
2490 where the offsets disagree. */
a8fdc208 2491
32131a9c
RK
2492 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2493 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2494 != (initial_p ? reg_eliminate[i].initial_offset
2495 : reg_eliminate[i].offset))
2496 reg_eliminate[i].can_eliminate = 0;
2497
2498 return;
2499
2500 case JUMP_INSN:
2501 set_label_offsets (PATTERN (insn), insn, initial_p);
2502
2503 /* ... fall through ... */
2504
2505 case INSN:
2506 case CALL_INSN:
2507 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2508 and hence must have all eliminations at their initial offsets. */
2509 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2510 if (REG_NOTE_KIND (tem) == REG_LABEL)
2511 set_label_offsets (XEXP (tem, 0), insn, 1);
2512 return;
2513
2514 case ADDR_VEC:
2515 case ADDR_DIFF_VEC:
2516 /* Each of the labels in the address vector must be at their initial
2517 offsets. We want the first first for ADDR_VEC and the second
2518 field for ADDR_DIFF_VEC. */
2519
2520 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2521 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2522 insn, initial_p);
2523 return;
2524
2525 case SET:
2526 /* We only care about setting PC. If the source is not RETURN,
2527 IF_THEN_ELSE, or a label, disable any eliminations not at
2528 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2529 isn't one of those possibilities. For branches to a label,
2530 call ourselves recursively.
2531
2532 Note that this can disable elimination unnecessarily when we have
2533 a non-local goto since it will look like a non-constant jump to
2534 someplace in the current function. This isn't a significant
2535 problem since such jumps will normally be when all elimination
2536 pairs are back to their initial offsets. */
2537
2538 if (SET_DEST (x) != pc_rtx)
2539 return;
2540
2541 switch (GET_CODE (SET_SRC (x)))
2542 {
2543 case PC:
2544 case RETURN:
2545 return;
2546
2547 case LABEL_REF:
2548 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2549 return;
2550
2551 case IF_THEN_ELSE:
2552 tem = XEXP (SET_SRC (x), 1);
2553 if (GET_CODE (tem) == LABEL_REF)
2554 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2555 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2556 break;
2557
2558 tem = XEXP (SET_SRC (x), 2);
2559 if (GET_CODE (tem) == LABEL_REF)
2560 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2561 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2562 break;
2563 return;
2564 }
2565
2566 /* If we reach here, all eliminations must be at their initial
2567 offset because we are doing a jump to a variable address. */
2568 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2569 if (p->offset != p->initial_offset)
2570 p->can_eliminate = 0;
2571 }
2572}
2573\f
2574/* Used for communication between the next two function to properly share
2575 the vector for an ASM_OPERANDS. */
2576
2577static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2578
a8fdc208 2579/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2580 replacement (such as sp), plus an offset.
2581
2582 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2583 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2584 MEM, we are allowed to replace a sum of a register and the constant zero
2585 with the register, which we cannot do outside a MEM. In addition, we need
2586 to record the fact that a register is referenced outside a MEM.
2587
ff32812a 2588 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2589 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2590 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2591 that the REG is being modified.
2592
ff32812a
RS
2593 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2594 That's used when we eliminate in expressions stored in notes.
2595 This means, do not set ref_outside_mem even if the reference
2596 is outside of MEMs.
2597
32131a9c
RK
2598 If we see a modification to a register we know about, take the
2599 appropriate action (see case SET, below).
2600
2601 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2602 replacements done assuming all offsets are at their initial values. If
2603 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2604 encounter, return the actual location so that find_reloads will do
2605 the proper thing. */
2606
2607rtx
2608eliminate_regs (x, mem_mode, insn)
2609 rtx x;
2610 enum machine_mode mem_mode;
2611 rtx insn;
2612{
2613 enum rtx_code code = GET_CODE (x);
2614 struct elim_table *ep;
2615 int regno;
2616 rtx new;
2617 int i, j;
2618 char *fmt;
2619 int copied = 0;
2620
2621 switch (code)
2622 {
2623 case CONST_INT:
2624 case CONST_DOUBLE:
2625 case CONST:
2626 case SYMBOL_REF:
2627 case CODE_LABEL:
2628 case PC:
2629 case CC0:
2630 case ASM_INPUT:
2631 case ADDR_VEC:
2632 case ADDR_DIFF_VEC:
2633 case RETURN:
2634 return x;
2635
2636 case REG:
2637 regno = REGNO (x);
2638
2639 /* First handle the case where we encounter a bare register that
2640 is eliminable. Replace it with a PLUS. */
2641 if (regno < FIRST_PSEUDO_REGISTER)
2642 {
2643 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2644 ep++)
2645 if (ep->from_rtx == x && ep->can_eliminate)
2646 {
ff32812a
RS
2647 if (! mem_mode
2648 /* Refs inside notes don't count for this purpose. */
fe089a90 2649 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2650 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2651 ep->ref_outside_mem = 1;
2652 return plus_constant (ep->to_rtx, ep->previous_offset);
2653 }
2654
2655 }
2656 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2657 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2658 {
2659 /* In this case, find_reloads would attempt to either use an
2660 incorrect address (if something is not at its initial offset)
2661 or substitute an replaced address into an insn (which loses
2662 if the offset is changed by some later action). So we simply
2663 return the replaced stack slot (assuming it is changed by
2664 elimination) and ignore the fact that this is actually a
2665 reference to the pseudo. Ensure we make a copy of the
2666 address in case it is shared. */
fb3821f7 2667 new = eliminate_regs (reg_equiv_memory_loc[regno],
e5687447 2668 mem_mode, insn);
32131a9c 2669 if (new != reg_equiv_memory_loc[regno])
208dffa5
RS
2670 {
2671 cannot_omit_stores[regno] = 1;
2672 return copy_rtx (new);
2673 }
32131a9c
RK
2674 }
2675 return x;
2676
2677 case PLUS:
2678 /* If this is the sum of an eliminable register and a constant, rework
2679 the sum. */
2680 if (GET_CODE (XEXP (x, 0)) == REG
2681 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2682 && CONSTANT_P (XEXP (x, 1)))
2683 {
2684 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2685 ep++)
2686 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2687 {
e5687447
JW
2688 if (! mem_mode
2689 /* Refs inside notes don't count for this purpose. */
2690 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2691 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2692 ep->ref_outside_mem = 1;
2693
2694 /* The only time we want to replace a PLUS with a REG (this
2695 occurs when the constant operand of the PLUS is the negative
2696 of the offset) is when we are inside a MEM. We won't want
2697 to do so at other times because that would change the
2698 structure of the insn in a way that reload can't handle.
2699 We special-case the commonest situation in
2700 eliminate_regs_in_insn, so just replace a PLUS with a
2701 PLUS here, unless inside a MEM. */
a23b64d5 2702 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2703 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2704 return ep->to_rtx;
2705 else
2706 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2707 plus_constant (XEXP (x, 1),
2708 ep->previous_offset));
2709 }
2710
2711 /* If the register is not eliminable, we are done since the other
2712 operand is a constant. */
2713 return x;
2714 }
2715
2716 /* If this is part of an address, we want to bring any constant to the
2717 outermost PLUS. We will do this by doing register replacement in
2718 our operands and seeing if a constant shows up in one of them.
2719
2720 We assume here this is part of an address (or a "load address" insn)
2721 since an eliminable register is not likely to appear in any other
2722 context.
2723
2724 If we have (plus (eliminable) (reg)), we want to produce
2725 (plus (plus (replacement) (reg) (const))). If this was part of a
2726 normal add insn, (plus (replacement) (reg)) will be pushed as a
2727 reload. This is the desired action. */
2728
2729 {
e5687447
JW
2730 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2731 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2732
2733 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2734 {
2735 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2736 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2737 we must replace the constant here since it may no longer
2738 be in the position of any operand. */
2739 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2740 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2741 && reg_renumber[REGNO (new1)] < 0
2742 && reg_equiv_constant != 0
2743 && reg_equiv_constant[REGNO (new1)] != 0)
2744 new1 = reg_equiv_constant[REGNO (new1)];
2745 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2746 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2747 && reg_renumber[REGNO (new0)] < 0
2748 && reg_equiv_constant[REGNO (new0)] != 0)
2749 new0 = reg_equiv_constant[REGNO (new0)];
2750
2751 new = form_sum (new0, new1);
2752
2753 /* As above, if we are not inside a MEM we do not want to
2754 turn a PLUS into something else. We might try to do so here
2755 for an addition of 0 if we aren't optimizing. */
2756 if (! mem_mode && GET_CODE (new) != PLUS)
2757 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2758 else
2759 return new;
2760 }
2761 }
2762 return x;
2763
981c7390
RK
2764 case MULT:
2765 /* If this is the product of an eliminable register and a
2766 constant, apply the distribute law and move the constant out
2767 so that we have (plus (mult ..) ..). This is needed in order
2768 to keep load-address insns valid. This case is pathalogical.
2769 We ignore the possibility of overflow here. */
2770 if (GET_CODE (XEXP (x, 0)) == REG
2771 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2772 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2773 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2774 ep++)
2775 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2776 {
2777 if (! mem_mode
2778 /* Refs inside notes don't count for this purpose. */
2779 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2780 || GET_CODE (insn) == INSN_LIST)))
2781 ep->ref_outside_mem = 1;
2782
2783 return
2784 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2785 ep->previous_offset * INTVAL (XEXP (x, 1)));
2786 }
32131a9c
RK
2787
2788 /* ... fall through ... */
2789
32131a9c
RK
2790 case CALL:
2791 case COMPARE:
2792 case MINUS:
32131a9c
RK
2793 case DIV: case UDIV:
2794 case MOD: case UMOD:
2795 case AND: case IOR: case XOR:
45620ed4
RK
2796 case ROTATERT: case ROTATE:
2797 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
2798 case NE: case EQ:
2799 case GE: case GT: case GEU: case GTU:
2800 case LE: case LT: case LEU: case LTU:
2801 {
e5687447 2802 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2803 rtx new1
e5687447 2804 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2805
2806 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2807 return gen_rtx (code, GET_MODE (x), new0, new1);
2808 }
2809 return x;
2810
981c7390
RK
2811 case EXPR_LIST:
2812 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2813 if (XEXP (x, 0))
2814 {
2815 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2816 if (new != XEXP (x, 0))
2817 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2818 }
2819
2820 /* ... fall through ... */
2821
2822 case INSN_LIST:
2823 /* Now do eliminations in the rest of the chain. If this was
2824 an EXPR_LIST, this might result in allocating more memory than is
2825 strictly needed, but it simplifies the code. */
2826 if (XEXP (x, 1))
2827 {
2828 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2829 if (new != XEXP (x, 1))
2830 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2831 }
2832 return x;
2833
32131a9c
RK
2834 case PRE_INC:
2835 case POST_INC:
2836 case PRE_DEC:
2837 case POST_DEC:
2838 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2839 if (ep->to_rtx == XEXP (x, 0))
2840 {
4c05b187
RK
2841 int size = GET_MODE_SIZE (mem_mode);
2842
2843 /* If more bytes than MEM_MODE are pushed, account for them. */
2844#ifdef PUSH_ROUNDING
2845 if (ep->to_rtx == stack_pointer_rtx)
2846 size = PUSH_ROUNDING (size);
2847#endif
32131a9c 2848 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2849 ep->offset += size;
32131a9c 2850 else
4c05b187 2851 ep->offset -= size;
32131a9c
RK
2852 }
2853
2854 /* Fall through to generic unary operation case. */
2855 case USE:
2856 case STRICT_LOW_PART:
2857 case NEG: case NOT:
2858 case SIGN_EXTEND: case ZERO_EXTEND:
2859 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2860 case FLOAT: case FIX:
2861 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2862 case ABS:
2863 case SQRT:
2864 case FFS:
e5687447 2865 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2866 if (new != XEXP (x, 0))
2867 return gen_rtx (code, GET_MODE (x), new);
2868 return x;
2869
2870 case SUBREG:
2871 /* Similar to above processing, but preserve SUBREG_WORD.
2872 Convert (subreg (mem)) to (mem) if not paradoxical.
2873 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2874 pseudo didn't get a hard reg, we must replace this with the
2875 eliminated version of the memory location because push_reloads
2876 may do the replacement in certain circumstances. */
2877 if (GET_CODE (SUBREG_REG (x)) == REG
2878 && (GET_MODE_SIZE (GET_MODE (x))
2879 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2880 && reg_equiv_memory_loc != 0
2881 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2882 {
2883 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
e5687447 2884 mem_mode, insn);
32131a9c
RK
2885
2886 /* If we didn't change anything, we must retain the pseudo. */
2887 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
59e2c378 2888 new = SUBREG_REG (x);
32131a9c 2889 else
59e2c378
RK
2890 {
2891 /* Otherwise, ensure NEW isn't shared in case we have to reload
2892 it. */
2893 new = copy_rtx (new);
2894
2895 /* In this case, we must show that the pseudo is used in this
2896 insn so that delete_output_reload will do the right thing. */
2897 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2898 && GET_CODE (insn) != INSN_LIST)
2899 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2900 insn);
2901 }
32131a9c
RK
2902 }
2903 else
e5687447 2904 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
2905
2906 if (new != XEXP (x, 0))
2907 {
2908 if (GET_CODE (new) == MEM
2909 && (GET_MODE_SIZE (GET_MODE (x))
a3b75c07 2910 <= GET_MODE_SIZE (GET_MODE (new)))
e90d3cbb 2911#ifdef LOAD_EXTEND_OP
a3b75c07
RS
2912 /* On these machines we will be reloading what is
2913 inside the SUBREG if it originally was a pseudo and
2914 the inner and outer modes are both a word or
2915 smaller. So leave the SUBREG then. */
2916 && ! (GET_CODE (SUBREG_REG (x)) == REG
2917 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
fc4a0dca
JW
2918 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2919 && (GET_MODE_SIZE (GET_MODE (x))
2920 > GET_MODE_SIZE (GET_MODE (new)))
2921 && INTEGRAL_MODE_P (GET_MODE (new))
2922 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
a3b75c07
RS
2923#endif
2924 )
32131a9c
RK
2925 {
2926 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2927 enum machine_mode mode = GET_MODE (x);
2928
2929#if BYTES_BIG_ENDIAN
2930 offset += (MIN (UNITS_PER_WORD,
2931 GET_MODE_SIZE (GET_MODE (new)))
2932 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2933#endif
2934
2935 PUT_MODE (new, mode);
2936 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2937 return new;
2938 }
2939 else
2940 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2941 }
2942
2943 return x;
2944
2945 case CLOBBER:
2946 /* If clobbering a register that is the replacement register for an
d45cf215 2947 elimination we still think can be performed, note that it cannot
32131a9c
RK
2948 be performed. Otherwise, we need not be concerned about it. */
2949 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2950 if (ep->to_rtx == XEXP (x, 0))
2951 ep->can_eliminate = 0;
2952
e5687447 2953 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c
JVA
2954 if (new != XEXP (x, 0))
2955 return gen_rtx (code, GET_MODE (x), new);
32131a9c
RK
2956 return x;
2957
2958 case ASM_OPERANDS:
2959 {
2960 rtx *temp_vec;
2961 /* Properly handle sharing input and constraint vectors. */
2962 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2963 {
2964 /* When we come to a new vector not seen before,
2965 scan all its elements; keep the old vector if none
2966 of them changes; otherwise, make a copy. */
2967 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2968 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2969 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2970 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
e5687447 2971 mem_mode, insn);
32131a9c
RK
2972
2973 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2974 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2975 break;
2976
2977 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2978 new_asm_operands_vec = old_asm_operands_vec;
2979 else
2980 new_asm_operands_vec
2981 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2982 }
2983
2984 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2985 if (new_asm_operands_vec == old_asm_operands_vec)
2986 return x;
2987
2988 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2989 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2990 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2991 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2992 ASM_OPERANDS_SOURCE_FILE (x),
2993 ASM_OPERANDS_SOURCE_LINE (x));
2994 new->volatil = x->volatil;
2995 return new;
2996 }
2997
2998 case SET:
2999 /* Check for setting a register that we know about. */
3000 if (GET_CODE (SET_DEST (x)) == REG)
3001 {
3002 /* See if this is setting the replacement register for an
a8fdc208 3003 elimination.
32131a9c 3004
3ec2ea3e
DE
3005 If DEST is the hard frame pointer, we do nothing because we
3006 assume that all assignments to the frame pointer are for
3007 non-local gotos and are being done at a time when they are valid
3008 and do not disturb anything else. Some machines want to
3009 eliminate a fake argument pointer (or even a fake frame pointer)
3010 with either the real frame or the stack pointer. Assignments to
3011 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
3012
3013 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3014 ep++)
3015 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 3016 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 3017 {
6dc42e49 3018 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
3019 this elimination can't be done. */
3020 rtx src = SET_SRC (x);
3021
3022 if (GET_CODE (src) == PLUS
3023 && XEXP (src, 0) == SET_DEST (x)
3024 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3025 ep->offset -= INTVAL (XEXP (src, 1));
3026 else
3027 ep->can_eliminate = 0;
3028 }
3029
3030 /* Now check to see we are assigning to a register that can be
3031 eliminated. If so, it must be as part of a PARALLEL, since we
3032 will not have been called if this is a single SET. So indicate
3033 that we can no longer eliminate this reg. */
3034 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3035 ep++)
3036 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3037 ep->can_eliminate = 0;
3038 }
3039
3040 /* Now avoid the loop below in this common case. */
3041 {
e5687447
JW
3042 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3043 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3044
ff32812a 3045 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3046 write a CLOBBER insn. */
3047 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3048 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3049 && GET_CODE (insn) != INSN_LIST)
32131a9c
RK
3050 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3051
3052 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3053 return gen_rtx (SET, VOIDmode, new0, new1);
3054 }
3055
3056 return x;
3057
3058 case MEM:
3059 /* Our only special processing is to pass the mode of the MEM to our
3060 recursive call and copy the flags. While we are here, handle this
3061 case more efficiently. */
e5687447 3062 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3063 if (new != XEXP (x, 0))
3064 {
3065 new = gen_rtx (MEM, GET_MODE (x), new);
3066 new->volatil = x->volatil;
3067 new->unchanging = x->unchanging;
3068 new->in_struct = x->in_struct;
3069 return new;
3070 }
3071 else
3072 return x;
3073 }
3074
3075 /* Process each of our operands recursively. If any have changed, make a
3076 copy of the rtx. */
3077 fmt = GET_RTX_FORMAT (code);
3078 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3079 {
3080 if (*fmt == 'e')
3081 {
e5687447 3082 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3083 if (new != XEXP (x, i) && ! copied)
3084 {
3085 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3086 bcopy ((char *) x, (char *) new_x,
3087 (sizeof (*new_x) - sizeof (new_x->fld)
3088 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
32131a9c
RK
3089 x = new_x;
3090 copied = 1;
3091 }
3092 XEXP (x, i) = new;
3093 }
3094 else if (*fmt == 'E')
3095 {
3096 int copied_vec = 0;
3097 for (j = 0; j < XVECLEN (x, i); j++)
3098 {
3099 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3100 if (new != XVECEXP (x, i, j) && ! copied_vec)
3101 {
3102 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3103 &XVECEXP (x, i, 0));
3104 if (! copied)
3105 {
3106 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3107 bcopy ((char *) x, (char *) new_x,
3108 (sizeof (*new_x) - sizeof (new_x->fld)
3109 + (sizeof (new_x->fld[0])
3110 * GET_RTX_LENGTH (code))));
32131a9c
RK
3111 x = new_x;
3112 copied = 1;
3113 }
3114 XVEC (x, i) = new_v;
3115 copied_vec = 1;
3116 }
3117 XVECEXP (x, i, j) = new;
3118 }
3119 }
3120 }
3121
3122 return x;
3123}
3124\f
3125/* Scan INSN and eliminate all eliminable registers in it.
3126
3127 If REPLACE is nonzero, do the replacement destructively. Also
3128 delete the insn as dead it if it is setting an eliminable register.
3129
3130 If REPLACE is zero, do all our allocations in reload_obstack.
3131
3132 If no eliminations were done and this insn doesn't require any elimination
3133 processing (these are not identical conditions: it might be updating sp,
3134 but not referencing fp; this needs to be seen during reload_as_needed so
3135 that the offset between fp and sp can be taken into consideration), zero
3136 is returned. Otherwise, 1 is returned. */
3137
3138static int
3139eliminate_regs_in_insn (insn, replace)
3140 rtx insn;
3141 int replace;
3142{
3143 rtx old_body = PATTERN (insn);
774672d2 3144 rtx old_set = single_set (insn);
32131a9c
RK
3145 rtx new_body;
3146 int val = 0;
3147 struct elim_table *ep;
3148
3149 if (! replace)
3150 push_obstacks (&reload_obstack, &reload_obstack);
3151
774672d2
RK
3152 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3153 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
3154 {
3155 /* Check for setting an eliminable register. */
3156 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
774672d2 3157 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
32131a9c 3158 {
dd1eab0a
RK
3159#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3160 /* If this is setting the frame pointer register to the
3161 hardware frame pointer register and this is an elimination
3162 that will be done (tested above), this insn is really
3163 adjusting the frame pointer downward to compensate for
3164 the adjustment done before a nonlocal goto. */
3165 if (ep->from == FRAME_POINTER_REGNUM
3166 && ep->to == HARD_FRAME_POINTER_REGNUM)
3167 {
3168 rtx src = SET_SRC (old_set);
3169 int offset, ok = 0;
3170
3171 if (src == ep->to_rtx)
3172 offset = 0, ok = 1;
3173 else if (GET_CODE (src) == PLUS
3174 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3175 offset = INTVAL (XEXP (src, 0)), ok = 1;
3176
3177 if (ok)
3178 {
3179 if (replace)
3180 {
3181 rtx src
3182 = plus_constant (ep->to_rtx, offset - ep->offset);
3183
3184 /* First see if this insn remains valid when we
3185 make the change. If not, keep the INSN_CODE
3186 the same and let reload fit it up. */
3187 validate_change (insn, &SET_SRC (old_set), src, 1);
3188 validate_change (insn, &SET_DEST (old_set),
3189 ep->to_rtx, 1);
3190 if (! apply_change_group ())
3191 {
3192 SET_SRC (old_set) = src;
3193 SET_DEST (old_set) = ep->to_rtx;
3194 }
3195 }
3196
3197 val = 1;
3198 goto done;
3199 }
3200 }
3201#endif
3202
32131a9c
RK
3203 /* In this case this insn isn't serving a useful purpose. We
3204 will delete it in reload_as_needed once we know that this
3205 elimination is, in fact, being done.
3206
3207 If REPLACE isn't set, we can't delete this insn, but neededn't
3208 process it since it won't be used unless something changes. */
3209 if (replace)
3210 delete_dead_insn (insn);
3211 val = 1;
3212 goto done;
3213 }
3214
3215 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3216 in the insn is the negative of the offset in FROM. Substitute
3217 (set (reg) (reg to)) for the insn and change its code.
3218
3219 We have to do this here, rather than in eliminate_regs, do that we can
3220 change the insn code. */
3221
774672d2
RK
3222 if (GET_CODE (SET_SRC (old_set)) == PLUS
3223 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3224 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
32131a9c
RK
3225 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3226 ep++)
774672d2 3227 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
922d9d40 3228 && ep->can_eliminate)
32131a9c 3229 {
922d9d40
RK
3230 /* We must stop at the first elimination that will be used.
3231 If this one would replace the PLUS with a REG, do it
3232 now. Otherwise, quit the loop and let eliminate_regs
3233 do its normal replacement. */
774672d2 3234 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
922d9d40 3235 {
774672d2
RK
3236 /* We assume here that we don't need a PARALLEL of
3237 any CLOBBERs for this assignment. There's not
3238 much we can do if we do need it. */
922d9d40 3239 PATTERN (insn) = gen_rtx (SET, VOIDmode,
774672d2 3240 SET_DEST (old_set), ep->to_rtx);
922d9d40
RK
3241 INSN_CODE (insn) = -1;
3242 val = 1;
3243 goto done;
3244 }
3245
3246 break;
32131a9c
RK
3247 }
3248 }
3249
3250 old_asm_operands_vec = 0;
3251
3252 /* Replace the body of this insn with a substituted form. If we changed
05b4c365 3253 something, return non-zero.
32131a9c
RK
3254
3255 If we are replacing a body that was a (set X (plus Y Z)), try to
3256 re-recognize the insn. We do this in case we had a simple addition
3257 but now can do this as a load-address. This saves an insn in this
3258 common case. */
3259
fb3821f7 3260 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3261 if (new_body != old_body)
3262 {
7c791b13
RK
3263 /* If we aren't replacing things permanently and we changed something,
3264 make another copy to ensure that all the RTL is new. Otherwise
3265 things can go wrong if find_reload swaps commutative operands
3266 and one is inside RTL that has been copied while the other is not. */
3267
4d411872
RS
3268 /* Don't copy an asm_operands because (1) there's no need and (2)
3269 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3270 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3271 new_body = copy_rtx (new_body);
3272
774672d2
RK
3273 /* If we had a move insn but now we don't, rerecognize it. This will
3274 cause spurious re-recognition if the old move had a PARALLEL since
3275 the new one still will, but we can't call single_set without
3276 having put NEW_BODY into the insn and the re-recognition won't
3277 hurt in this rare case. */
3278 if (old_set != 0
3279 && ((GET_CODE (SET_SRC (old_set)) == REG
3280 && (GET_CODE (new_body) != SET
3281 || GET_CODE (SET_SRC (new_body)) != REG))
3282 /* If this was a load from or store to memory, compare
3283 the MEM in recog_operand to the one in the insn. If they
3284 are not equal, then rerecognize the insn. */
3285 || (old_set != 0
3286 && ((GET_CODE (SET_SRC (old_set)) == MEM
3287 && SET_SRC (old_set) != recog_operand[1])
3288 || (GET_CODE (SET_DEST (old_set)) == MEM
3289 && SET_DEST (old_set) != recog_operand[0])))
3290 /* If this was an add insn before, rerecognize. */
3291 || GET_CODE (SET_SRC (old_set)) == PLUS))
4a5d0fb5
RS
3292 {
3293 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3294 /* If recognition fails, store the new body anyway.
3295 It's normal to have recognition failures here
3296 due to bizarre memory addresses; reloading will fix them. */
3297 PATTERN (insn) = new_body;
4a5d0fb5 3298 }
0ba846c7 3299 else
32131a9c
RK
3300 PATTERN (insn) = new_body;
3301
32131a9c
RK
3302 val = 1;
3303 }
a8fdc208 3304
32131a9c
RK
3305 /* Loop through all elimination pairs. See if any have changed and
3306 recalculate the number not at initial offset.
3307
a8efe40d
RK
3308 Compute the maximum offset (minimum offset if the stack does not
3309 grow downward) for each elimination pair.
3310
32131a9c
RK
3311 We also detect a cases where register elimination cannot be done,
3312 namely, if a register would be both changed and referenced outside a MEM
3313 in the resulting insn since such an insn is often undefined and, even if
3314 not, we cannot know what meaning will be given to it. Note that it is
3315 valid to have a register used in an address in an insn that changes it
3316 (presumably with a pre- or post-increment or decrement).
3317
3318 If anything changes, return nonzero. */
3319
3320 num_not_at_initial_offset = 0;
3321 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3322 {
3323 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3324 ep->can_eliminate = 0;
3325
3326 ep->ref_outside_mem = 0;
3327
3328 if (ep->previous_offset != ep->offset)
3329 val = 1;
3330
3331 ep->previous_offset = ep->offset;
3332 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3333 num_not_at_initial_offset++;
a8efe40d
RK
3334
3335#ifdef STACK_GROWS_DOWNWARD
3336 ep->max_offset = MAX (ep->max_offset, ep->offset);
3337#else
3338 ep->max_offset = MIN (ep->max_offset, ep->offset);
3339#endif
32131a9c
RK
3340 }
3341
3342 done:
05b4c365
RK
3343 /* If we changed something, perform elmination in REG_NOTES. This is
3344 needed even when REPLACE is zero because a REG_DEAD note might refer
3345 to a register that we eliminate and could cause a different number
3346 of spill registers to be needed in the final reload pass than in
3347 the pre-passes. */
20748cab 3348 if (val && REG_NOTES (insn) != 0)
ff32812a 3349 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3350
32131a9c
RK
3351 if (! replace)
3352 pop_obstacks ();
3353
3354 return val;
3355}
3356
3357/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3358 replacement we currently believe is valid, mark it as not eliminable if X
3359 modifies DEST in any way other than by adding a constant integer to it.
3360
3361 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3362 all assignments to the hard frame pointer are nonlocal gotos and are being
3363 done at a time when they are valid and do not disturb anything else.
32131a9c 3364 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3365 frame or stack pointer. Assignments to the hard frame pointer must not
3366 prevent this elimination.
32131a9c
RK
3367
3368 Called via note_stores from reload before starting its passes to scan
3369 the insns of the function. */
3370
3371static void
3372mark_not_eliminable (dest, x)
3373 rtx dest;
3374 rtx x;
3375{
3376 register int i;
3377
3378 /* A SUBREG of a hard register here is just changing its mode. We should
3379 not see a SUBREG of an eliminable hard register, but check just in
3380 case. */
3381 if (GET_CODE (dest) == SUBREG)
3382 dest = SUBREG_REG (dest);
3383
3ec2ea3e 3384 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3385 return;
3386
3387 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3388 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3389 && (GET_CODE (x) != SET
3390 || GET_CODE (SET_SRC (x)) != PLUS
3391 || XEXP (SET_SRC (x), 0) != dest
3392 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3393 {
3394 reg_eliminate[i].can_eliminate_previous
3395 = reg_eliminate[i].can_eliminate = 0;
3396 num_eliminable--;
3397 }
3398}
3399\f
3400/* Kick all pseudos out of hard register REGNO.
3401 If GLOBAL is nonzero, try to find someplace else to put them.
3402 If DUMPFILE is nonzero, log actions taken on that file.
3403
3404 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3405 because we found we can't eliminate some register. In the case, no pseudos
3406 are allowed to be in the register, even if they are only in a block that
3407 doesn't require spill registers, unlike the case when we are spilling this
3408 hard reg to produce another spill register.
3409
3410 Return nonzero if any pseudos needed to be kicked out. */
3411
3412static int
3413spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3414 register int regno;
3415 int global;
3416 FILE *dumpfile;
3417 int cant_eliminate;
3418{
c307c237 3419 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3420 int something_changed = 0;
3421 register int i;
3422
3423 SET_HARD_REG_BIT (forbidden_regs, regno);
3424
9ff3516a
RK
3425 if (cant_eliminate)
3426 regs_ever_live[regno] = 1;
3427
32131a9c
RK
3428 /* Spill every pseudo reg that was allocated to this reg
3429 or to something that overlaps this reg. */
3430
3431 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3432 if (reg_renumber[i] >= 0
3433 && reg_renumber[i] <= regno
a8fdc208 3434 && (reg_renumber[i]
32131a9c
RK
3435 + HARD_REGNO_NREGS (reg_renumber[i],
3436 PSEUDO_REGNO_MODE (i))
3437 > regno))
3438 {
32131a9c
RK
3439 /* If this register belongs solely to a basic block which needed no
3440 spilling of any class that this register is contained in,
3441 leave it be, unless we are spilling this register because
3442 it was a hard register that can't be eliminated. */
3443
3444 if (! cant_eliminate
3445 && basic_block_needs[0]
3446 && reg_basic_block[i] >= 0
3447 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3448 {
3449 enum reg_class *p;
3450
3451 for (p = reg_class_superclasses[(int) class];
3452 *p != LIM_REG_CLASSES; p++)
3453 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3454 break;
a8fdc208 3455
32131a9c
RK
3456 if (*p == LIM_REG_CLASSES)
3457 continue;
3458 }
3459
3460 /* Mark it as no longer having a hard register home. */
3461 reg_renumber[i] = -1;
3462 /* We will need to scan everything again. */
3463 something_changed = 1;
3464 if (global)
3465 retry_global_alloc (i, forbidden_regs);
3466
3467 alter_reg (i, regno);
3468 if (dumpfile)
3469 {
3470 if (reg_renumber[i] == -1)
3471 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3472 else
3473 fprintf (dumpfile, " Register %d now in %d.\n\n",
3474 i, reg_renumber[i]);
3475 }
3476 }
c307c237
RK
3477 for (i = 0; i < scratch_list_length; i++)
3478 {
3479 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3480 {
3481 if (! cant_eliminate && basic_block_needs[0]
3482 && ! basic_block_needs[(int) class][scratch_block[i]])
3483 {
3484 enum reg_class *p;
3485
3486 for (p = reg_class_superclasses[(int) class];
3487 *p != LIM_REG_CLASSES; p++)
3488 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3489 break;
3490
3491 if (*p == LIM_REG_CLASSES)
3492 continue;
3493 }
3494 PUT_CODE (scratch_list[i], SCRATCH);
3495 scratch_list[i] = 0;
3496 something_changed = 1;
3497 continue;
3498 }
3499 }
32131a9c
RK
3500
3501 return something_changed;
3502}
3503\f
56f58d3a
RK
3504/* Find all paradoxical subregs within X and update reg_max_ref_width.
3505 Also mark any hard registers used to store user variables as
3506 forbidden from being used for spill registers. */
32131a9c
RK
3507
3508static void
3509scan_paradoxical_subregs (x)
3510 register rtx x;
3511{
3512 register int i;
3513 register char *fmt;
3514 register enum rtx_code code = GET_CODE (x);
3515
3516 switch (code)
3517 {
56f58d3a
RK
3518 case REG:
3519#ifdef SMALL_REGISTER_CLASSES
3520 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3521 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3522#endif
3523 return;
3524
32131a9c
RK
3525 case CONST_INT:
3526 case CONST:
3527 case SYMBOL_REF:
3528 case LABEL_REF:
3529 case CONST_DOUBLE:
3530 case CC0:
3531 case PC:
32131a9c
RK
3532 case USE:
3533 case CLOBBER:
3534 return;
3535
3536 case SUBREG:
3537 if (GET_CODE (SUBREG_REG (x)) == REG
3538 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3539 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3540 = GET_MODE_SIZE (GET_MODE (x));
3541 return;
3542 }
3543
3544 fmt = GET_RTX_FORMAT (code);
3545 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3546 {
3547 if (fmt[i] == 'e')
3548 scan_paradoxical_subregs (XEXP (x, i));
3549 else if (fmt[i] == 'E')
3550 {
3551 register int j;
3552 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3553 scan_paradoxical_subregs (XVECEXP (x, i, j));
3554 }
3555 }
3556}
3557\f
32131a9c
RK
3558static int
3559hard_reg_use_compare (p1, p2)
3560 struct hard_reg_n_uses *p1, *p2;
3561{
3562 int tem = p1->uses - p2->uses;
3563 if (tem != 0) return tem;
3564 /* If regs are equally good, sort by regno,
3565 so that the results of qsort leave nothing to chance. */
3566 return p1->regno - p2->regno;
3567}
3568
3569/* Choose the order to consider regs for use as reload registers
3570 based on how much trouble would be caused by spilling one.
3571 Store them in order of decreasing preference in potential_reload_regs. */
3572
3573static void
3574order_regs_for_reload ()
3575{
3576 register int i;
3577 register int o = 0;
3578 int large = 0;
3579
3580 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3581
3582 CLEAR_HARD_REG_SET (bad_spill_regs);
3583
3584 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3585 potential_reload_regs[i] = -1;
3586
3587 /* Count number of uses of each hard reg by pseudo regs allocated to it
3588 and then order them by decreasing use. */
3589
3590 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3591 {
3592 hard_reg_n_uses[i].uses = 0;
3593 hard_reg_n_uses[i].regno = i;
3594 }
3595
3596 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3597 {
3598 int regno = reg_renumber[i];
3599 if (regno >= 0)
3600 {
3601 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3602 while (regno < lim)
3603 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3604 }
3605 large += reg_n_refs[i];
3606 }
3607
3608 /* Now fixed registers (which cannot safely be used for reloading)
3609 get a very high use count so they will be considered least desirable.
3610 Registers used explicitly in the rtl code are almost as bad. */
3611
3612 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3613 {
3614 if (fixed_regs[i])
3615 {
3616 hard_reg_n_uses[i].uses += 2 * large + 2;
3617 SET_HARD_REG_BIT (bad_spill_regs, i);
3618 }
3619 else if (regs_explicitly_used[i])
3620 {
3621 hard_reg_n_uses[i].uses += large + 1;
546b63fb 3622#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
3623 /* ??? We are doing this here because of the potential that
3624 bad code may be generated if a register explicitly used in
3625 an insn was used as a spill register for that insn. But
3626 not using these are spill registers may lose on some machine.
3627 We'll have to see how this works out. */
3628 SET_HARD_REG_BIT (bad_spill_regs, i);
546b63fb 3629#endif
32131a9c
RK
3630 }
3631 }
3ec2ea3e
DE
3632 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3633 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
32131a9c
RK
3634
3635#ifdef ELIMINABLE_REGS
3636 /* If registers other than the frame pointer are eliminable, mark them as
3637 poor choices. */
3638 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3639 {
3640 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3641 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3642 }
3643#endif
3644
3645 /* Prefer registers not so far used, for use in temporary loading.
3646 Among them, if REG_ALLOC_ORDER is defined, use that order.
3647 Otherwise, prefer registers not preserved by calls. */
3648
3649#ifdef REG_ALLOC_ORDER
3650 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3651 {
3652 int regno = reg_alloc_order[i];
3653
3654 if (hard_reg_n_uses[regno].uses == 0)
3655 potential_reload_regs[o++] = regno;
3656 }
3657#else
3658 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3659 {
3660 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3661 potential_reload_regs[o++] = i;
3662 }
3663 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3664 {
3665 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3666 potential_reload_regs[o++] = i;
3667 }
3668#endif
3669
3670 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3671 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3672
3673 /* Now add the regs that are already used,
3674 preferring those used less often. The fixed and otherwise forbidden
3675 registers will be at the end of this list. */
3676
3677 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3678 if (hard_reg_n_uses[i].uses != 0)
3679 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3680}
3681\f
a5339699 3682/* Used in reload_as_needed to sort the spilled regs. */
2f23a46d 3683
a5339699
RK
3684static int
3685compare_spill_regs (r1, r2)
3686 short *r1, *r2;
3687{
2f23a46d 3688 return *r1 - *r2;
a5339699
RK
3689}
3690
32131a9c
RK
3691/* Reload pseudo-registers into hard regs around each insn as needed.
3692 Additional register load insns are output before the insn that needs it
3693 and perhaps store insns after insns that modify the reloaded pseudo reg.
3694
3695 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3696 which registers are already available in reload registers.
32131a9c
RK
3697 We update these for the reloads that we perform,
3698 as the insns are scanned. */
3699
3700static void
3701reload_as_needed (first, live_known)
3702 rtx first;
3703 int live_known;
3704{
3705 register rtx insn;
3706 register int i;
3707 int this_block = 0;
3708 rtx x;
3709 rtx after_call = 0;
3710
4c9a05bc
RK
3711 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3712 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
32131a9c 3713 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 3714 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
32131a9c
RK
3715 reg_has_output_reload = (char *) alloca (max_regno);
3716 for (i = 0; i < n_spills; i++)
3717 {
3718 reg_reloaded_contents[i] = -1;
3719 reg_reloaded_insn[i] = 0;
3720 }
3721
3722 /* Reset all offsets on eliminable registers to their initial values. */
3723#ifdef ELIMINABLE_REGS
3724 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3725 {
3726 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3727 reg_eliminate[i].initial_offset);
32131a9c
RK
3728 reg_eliminate[i].previous_offset
3729 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3730 }
3731#else
3732 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3733 reg_eliminate[0].previous_offset
3734 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3735#endif
3736
3737 num_not_at_initial_offset = 0;
3738
a5339699
RK
3739 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3740 pack registers with group needs. */
3741 if (n_spills > 1)
5f40cc2d
RK
3742 {
3743 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3744 for (i = 0; i < n_spills; i++)
3745 spill_reg_order[spill_regs[i]] = i;
3746 }
a5339699 3747
32131a9c
RK
3748 for (insn = first; insn;)
3749 {
3750 register rtx next = NEXT_INSN (insn);
3751
3752 /* Notice when we move to a new basic block. */
aa2c50d6 3753 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3754 && insn == basic_block_head[this_block+1])
3755 ++this_block;
3756
3757 /* If we pass a label, copy the offsets from the label information
3758 into the current offsets of each elimination. */
3759 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3760 {
3761 num_not_at_initial_offset = 0;
3762 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3763 {
3764 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3765 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3766 if (reg_eliminate[i].can_eliminate
3767 && (reg_eliminate[i].offset
3768 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3769 num_not_at_initial_offset++;
3770 }
3771 }
32131a9c
RK
3772
3773 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3774 {
3775 rtx avoid_return_reg = 0;
3776
3777#ifdef SMALL_REGISTER_CLASSES
3778 /* Set avoid_return_reg if this is an insn
3779 that might use the value of a function call. */
3780 if (GET_CODE (insn) == CALL_INSN)
3781 {
3782 if (GET_CODE (PATTERN (insn)) == SET)
3783 after_call = SET_DEST (PATTERN (insn));
3784 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3785 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3786 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3787 else
3788 after_call = 0;
3789 }
3790 else if (after_call != 0
3791 && !(GET_CODE (PATTERN (insn)) == SET
3792 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3793 {
2b979c57 3794 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
3795 avoid_return_reg = after_call;
3796 after_call = 0;
3797 }
3798#endif /* SMALL_REGISTER_CLASSES */
3799
2758481d
RS
3800 /* If this is a USE and CLOBBER of a MEM, ensure that any
3801 references to eliminable registers have been removed. */
3802
3803 if ((GET_CODE (PATTERN (insn)) == USE
3804 || GET_CODE (PATTERN (insn)) == CLOBBER)
3805 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3806 XEXP (XEXP (PATTERN (insn), 0), 0)
3807 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3808 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3809
32131a9c
RK
3810 /* If we need to do register elimination processing, do so.
3811 This might delete the insn, in which case we are done. */
3812 if (num_eliminable && GET_MODE (insn) == QImode)
3813 {
3814 eliminate_regs_in_insn (insn, 1);
3815 if (GET_CODE (insn) == NOTE)
3816 {
3817 insn = next;
3818 continue;
3819 }
3820 }
3821
3822 if (GET_MODE (insn) == VOIDmode)
3823 n_reloads = 0;
3824 /* First find the pseudo regs that must be reloaded for this insn.
3825 This info is returned in the tables reload_... (see reload.h).
3826 Also modify the body of INSN by substituting RELOAD
3827 rtx's for those pseudo regs. */
3828 else
3829 {
3830 bzero (reg_has_output_reload, max_regno);
3831 CLEAR_HARD_REG_SET (reg_is_output_reload);
3832
3833 find_reloads (insn, 1, spill_indirect_levels, live_known,
3834 spill_reg_order);
3835 }
3836
3837 if (n_reloads > 0)
3838 {
3c3eeea6
RK
3839 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3840 rtx p;
32131a9c
RK
3841 int class;
3842
3843 /* If this block has not had spilling done for a
546b63fb
RK
3844 particular clas and we have any non-optionals that need a
3845 spill reg in that class, abort. */
32131a9c
RK
3846
3847 for (class = 0; class < N_REG_CLASSES; class++)
3848 if (basic_block_needs[class] != 0
3849 && basic_block_needs[class][this_block] == 0)
3850 for (i = 0; i < n_reloads; i++)
546b63fb
RK
3851 if (class == (int) reload_reg_class[i]
3852 && reload_reg_rtx[i] == 0
3853 && ! reload_optional[i]
3854 && (reload_in[i] != 0 || reload_out[i] != 0
3855 || reload_secondary_p[i] != 0))
a89b2cc4 3856 fatal_insn ("Non-optional registers need a spill register", insn);
32131a9c
RK
3857
3858 /* Now compute which reload regs to reload them into. Perhaps
3859 reusing reload regs from previous insns, or else output
3860 load insns to reload them. Maybe output store insns too.
3861 Record the choices of reload reg in reload_reg_rtx. */
3862 choose_reload_regs (insn, avoid_return_reg);
3863
546b63fb
RK
3864#ifdef SMALL_REGISTER_CLASSES
3865 /* Merge any reloads that we didn't combine for fear of
3866 increasing the number of spill registers needed but now
3867 discover can be safely merged. */
3868 merge_assigned_reloads (insn);
3869#endif
3870
32131a9c
RK
3871 /* Generate the insns to reload operands into or out of
3872 their reload regs. */
3873 emit_reload_insns (insn);
3874
3875 /* Substitute the chosen reload regs from reload_reg_rtx
3876 into the insn's body (or perhaps into the bodies of other
3877 load and store insn that we just made for reloading
3878 and that we moved the structure into). */
3879 subst_reloads ();
3c3eeea6
RK
3880
3881 /* If this was an ASM, make sure that all the reload insns
3882 we have generated are valid. If not, give an error
3883 and delete them. */
3884
3885 if (asm_noperands (PATTERN (insn)) >= 0)
3886 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3887 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3888 && (recog_memoized (p) < 0
3889 || (insn_extract (p),
3890 ! constrain_operands (INSN_CODE (p), 1))))
3891 {
3892 error_for_asm (insn,
3893 "`asm' operand requires impossible reload");
3894 PUT_CODE (p, NOTE);
3895 NOTE_SOURCE_FILE (p) = 0;
3896 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3897 }
32131a9c
RK
3898 }
3899 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3900 is no longer validly lying around to save a future reload.
3901 Note that this does not detect pseudos that were reloaded
3902 for this insn in order to be stored in
3903 (obeying register constraints). That is correct; such reload
3904 registers ARE still valid. */
3905 note_stores (PATTERN (insn), forget_old_reloads_1);
3906
3907 /* There may have been CLOBBER insns placed after INSN. So scan
3908 between INSN and NEXT and use them to forget old reloads. */
3909 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3910 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3911 note_stores (PATTERN (x), forget_old_reloads_1);
3912
3913#ifdef AUTO_INC_DEC
3914 /* Likewise for regs altered by auto-increment in this insn.
3915 But note that the reg-notes are not changed by reloading:
3916 they still contain the pseudo-regs, not the spill regs. */
3917 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3918 if (REG_NOTE_KIND (x) == REG_INC)
3919 {
3920 /* See if this pseudo reg was reloaded in this insn.
3921 If so, its last-reload info is still valid
3922 because it is based on this insn's reload. */
3923 for (i = 0; i < n_reloads; i++)
3924 if (reload_out[i] == XEXP (x, 0))
3925 break;
3926
08fb99fa 3927 if (i == n_reloads)
9a881562 3928 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
3929 }
3930#endif
3931 }
3932 /* A reload reg's contents are unknown after a label. */
3933 if (GET_CODE (insn) == CODE_LABEL)
3934 for (i = 0; i < n_spills; i++)
3935 {
3936 reg_reloaded_contents[i] = -1;
3937 reg_reloaded_insn[i] = 0;
3938 }
3939
3940 /* Don't assume a reload reg is still good after a call insn
3941 if it is a call-used reg. */
546b63fb 3942 else if (GET_CODE (insn) == CALL_INSN)
32131a9c
RK
3943 for (i = 0; i < n_spills; i++)
3944 if (call_used_regs[spill_regs[i]])
3945 {
3946 reg_reloaded_contents[i] = -1;
3947 reg_reloaded_insn[i] = 0;
3948 }
3949
3950 /* In case registers overlap, allow certain insns to invalidate
3951 particular hard registers. */
3952
3953#ifdef INSN_CLOBBERS_REGNO_P
3954 for (i = 0 ; i < n_spills ; i++)
3955 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3956 {
3957 reg_reloaded_contents[i] = -1;
3958 reg_reloaded_insn[i] = 0;
3959 }
3960#endif
3961
3962 insn = next;
3963
3964#ifdef USE_C_ALLOCA
3965 alloca (0);
3966#endif
3967 }
3968}
3969
3970/* Discard all record of any value reloaded from X,
3971 or reloaded in X from someplace else;
3972 unless X is an output reload reg of the current insn.
3973
3974 X may be a hard reg (the reload reg)
3975 or it may be a pseudo reg that was reloaded from. */
3976
3977static void
9a881562 3978forget_old_reloads_1 (x, ignored)
32131a9c 3979 rtx x;
9a881562 3980 rtx ignored;
32131a9c
RK
3981{
3982 register int regno;
3983 int nr;
0a2e51a9
RS
3984 int offset = 0;
3985
3986 /* note_stores does give us subregs of hard regs. */
3987 while (GET_CODE (x) == SUBREG)
3988 {
3989 offset += SUBREG_WORD (x);
3990 x = SUBREG_REG (x);
3991 }
32131a9c
RK
3992
3993 if (GET_CODE (x) != REG)
3994 return;
3995
0a2e51a9 3996 regno = REGNO (x) + offset;
32131a9c
RK
3997
3998 if (regno >= FIRST_PSEUDO_REGISTER)
3999 nr = 1;
4000 else
4001 {
4002 int i;
4003 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4004 /* Storing into a spilled-reg invalidates its contents.
4005 This can happen if a block-local pseudo is allocated to that reg
4006 and it wasn't spilled because this block's total need is 0.
4007 Then some insn might have an optional reload and use this reg. */
4008 for (i = 0; i < nr; i++)
4009 if (spill_reg_order[regno + i] >= 0
4010 /* But don't do this if the reg actually serves as an output
4011 reload reg in the current instruction. */
4012 && (n_reloads == 0
4013 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4014 {
4015 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4016 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4017 }
4018 }
4019
4020 /* Since value of X has changed,
4021 forget any value previously copied from it. */
4022
4023 while (nr-- > 0)
4024 /* But don't forget a copy if this is the output reload
4025 that establishes the copy's validity. */
4026 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4027 reg_last_reload_reg[regno + nr] = 0;
4028}
4029\f
4030/* For each reload, the mode of the reload register. */
4031static enum machine_mode reload_mode[MAX_RELOADS];
4032
4033/* For each reload, the largest number of registers it will require. */
4034static int reload_nregs[MAX_RELOADS];
4035
4036/* Comparison function for qsort to decide which of two reloads
4037 should be handled first. *P1 and *P2 are the reload numbers. */
4038
4039static int
4040reload_reg_class_lower (p1, p2)
4041 short *p1, *p2;
4042{
4043 register int r1 = *p1, r2 = *p2;
4044 register int t;
a8fdc208 4045
32131a9c
RK
4046 /* Consider required reloads before optional ones. */
4047 t = reload_optional[r1] - reload_optional[r2];
4048 if (t != 0)
4049 return t;
4050
4051 /* Count all solitary classes before non-solitary ones. */
4052 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4053 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4054 if (t != 0)
4055 return t;
4056
4057 /* Aside from solitaires, consider all multi-reg groups first. */
4058 t = reload_nregs[r2] - reload_nregs[r1];
4059 if (t != 0)
4060 return t;
4061
4062 /* Consider reloads in order of increasing reg-class number. */
4063 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4064 if (t != 0)
4065 return t;
4066
4067 /* If reloads are equally urgent, sort by reload number,
4068 so that the results of qsort leave nothing to chance. */
4069 return r1 - r2;
4070}
4071\f
4072/* The following HARD_REG_SETs indicate when each hard register is
4073 used for a reload of various parts of the current insn. */
4074
4075/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4076static HARD_REG_SET reload_reg_used;
546b63fb
RK
4077/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4078static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4079/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4080static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4081/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4082static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4083/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4084static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
4085/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4086static HARD_REG_SET reload_reg_used_in_op_addr;
893bc853
RK
4087/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4088static HARD_REG_SET reload_reg_used_in_op_addr_reload;
546b63fb
RK
4089/* If reg is in use for a RELOAD_FOR_INSN reload. */
4090static HARD_REG_SET reload_reg_used_in_insn;
4091/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4092static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
4093
4094/* If reg is in use as a reload reg for any sort of reload. */
4095static HARD_REG_SET reload_reg_used_at_all;
4096
be7ae2a4
RK
4097/* If reg is use as an inherited reload. We just mark the first register
4098 in the group. */
4099static HARD_REG_SET reload_reg_used_for_inherit;
4100
546b63fb
RK
4101/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4102 TYPE. MODE is used to indicate how many consecutive regs are
4103 actually used. */
32131a9c
RK
4104
4105static void
546b63fb 4106mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 4107 int regno;
546b63fb
RK
4108 int opnum;
4109 enum reload_type type;
32131a9c
RK
4110 enum machine_mode mode;
4111{
4112 int nregs = HARD_REGNO_NREGS (regno, mode);
4113 int i;
4114
4115 for (i = regno; i < nregs + regno; i++)
4116 {
546b63fb 4117 switch (type)
32131a9c
RK
4118 {
4119 case RELOAD_OTHER:
4120 SET_HARD_REG_BIT (reload_reg_used, i);
4121 break;
4122
546b63fb
RK
4123 case RELOAD_FOR_INPUT_ADDRESS:
4124 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4125 break;
4126
546b63fb
RK
4127 case RELOAD_FOR_OUTPUT_ADDRESS:
4128 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4129 break;
4130
4131 case RELOAD_FOR_OPERAND_ADDRESS:
4132 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4133 break;
4134
893bc853
RK
4135 case RELOAD_FOR_OPADDR_ADDR:
4136 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4137 break;
4138
546b63fb
RK
4139 case RELOAD_FOR_OTHER_ADDRESS:
4140 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4141 break;
4142
32131a9c 4143 case RELOAD_FOR_INPUT:
546b63fb 4144 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4145 break;
4146
4147 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4148 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4149 break;
4150
4151 case RELOAD_FOR_INSN:
4152 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4153 break;
4154 }
4155
4156 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4157 }
4158}
4159
be7ae2a4
RK
4160/* Similarly, but show REGNO is no longer in use for a reload. */
4161
4162static void
4163clear_reload_reg_in_use (regno, opnum, type, mode)
4164 int regno;
4165 int opnum;
4166 enum reload_type type;
4167 enum machine_mode mode;
4168{
4169 int nregs = HARD_REGNO_NREGS (regno, mode);
4170 int i;
4171
4172 for (i = regno; i < nregs + regno; i++)
4173 {
4174 switch (type)
4175 {
4176 case RELOAD_OTHER:
4177 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4178 break;
4179
4180 case RELOAD_FOR_INPUT_ADDRESS:
4181 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4182 break;
4183
4184 case RELOAD_FOR_OUTPUT_ADDRESS:
4185 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4186 break;
4187
4188 case RELOAD_FOR_OPERAND_ADDRESS:
4189 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4190 break;
4191
893bc853
RK
4192 case RELOAD_FOR_OPADDR_ADDR:
4193 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4194 break;
4195
be7ae2a4
RK
4196 case RELOAD_FOR_OTHER_ADDRESS:
4197 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4198 break;
4199
4200 case RELOAD_FOR_INPUT:
4201 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4202 break;
4203
4204 case RELOAD_FOR_OUTPUT:
4205 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4206 break;
4207
4208 case RELOAD_FOR_INSN:
4209 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4210 break;
4211 }
4212 }
4213}
4214
32131a9c 4215/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4216 specified by OPNUM and TYPE. */
32131a9c
RK
4217
4218static int
546b63fb 4219reload_reg_free_p (regno, opnum, type)
32131a9c 4220 int regno;
546b63fb
RK
4221 int opnum;
4222 enum reload_type type;
32131a9c 4223{
546b63fb
RK
4224 int i;
4225
4226 /* In use for a RELOAD_OTHER means it's not available for anything except
4227 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4228 to be used only for inputs. */
4229
4230 if (type != RELOAD_FOR_OTHER_ADDRESS
4231 && TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4232 return 0;
546b63fb
RK
4233
4234 switch (type)
32131a9c
RK
4235 {
4236 case RELOAD_OTHER:
224f1d71
RK
4237 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4238 we can't use it for RELOAD_OTHER. */
4239 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4240 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4241 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4242 return 0;
4243
4244 for (i = 0; i < reload_n_operands; i++)
4245 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4246 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4247 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4248 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4249 return 0;
4250
4251 return 1;
32131a9c 4252
32131a9c 4253 case RELOAD_FOR_INPUT:
546b63fb
RK
4254 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4255 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4256 return 0;
4257
893bc853
RK
4258 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4259 return 0;
4260
546b63fb
RK
4261 /* If it is used for some other input, can't use it. */
4262 for (i = 0; i < reload_n_operands; i++)
4263 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4264 return 0;
4265
4266 /* If it is used in a later operand's address, can't use it. */
4267 for (i = opnum + 1; i < reload_n_operands; i++)
4268 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4269 return 0;
4270
4271 return 1;
4272
4273 case RELOAD_FOR_INPUT_ADDRESS:
4274 /* Can't use a register if it is used for an input address for this
4275 operand or used as an input in an earlier one. */
4276 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4277 return 0;
4278
4279 for (i = 0; i < opnum; i++)
4280 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4281 return 0;
4282
4283 return 1;
4284
4285 case RELOAD_FOR_OUTPUT_ADDRESS:
4286 /* Can't use a register if it is used for an output address for this
4287 operand or used as an output in this or a later operand. */
4288 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4289 return 0;
4290
4291 for (i = opnum; i < reload_n_operands; i++)
4292 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4293 return 0;
4294
4295 return 1;
4296
32131a9c 4297 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4298 for (i = 0; i < reload_n_operands; i++)
4299 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4300 return 0;
4301
4302 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4303 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4304
893bc853
RK
4305 case RELOAD_FOR_OPADDR_ADDR:
4306 for (i = 0; i < reload_n_operands; i++)
4307 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4308 return 0;
4309
4310 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4311
32131a9c 4312 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4313 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4314 outputs, or an operand address for this or an earlier output. */
4315 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4316 return 0;
4317
4318 for (i = 0; i < reload_n_operands; i++)
4319 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4320 return 0;
4321
4322 for (i = 0; i <= opnum; i++)
4323 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4324 return 0;
4325
4326 return 1;
4327
4328 case RELOAD_FOR_INSN:
4329 for (i = 0; i < reload_n_operands; i++)
4330 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4331 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4332 return 0;
4333
4334 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4335 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4336
4337 case RELOAD_FOR_OTHER_ADDRESS:
4338 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4339 }
4340 abort ();
4341}
4342
4343/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4344 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4345 is not in use for a reload in any prior part of the insn.
4346
4347 We can assume that the reload reg was already tested for availability
4348 at the time it is needed, and we should not check this again,
4349 in case the reg has already been marked in use. */
4350
4351static int
546b63fb 4352reload_reg_free_before_p (regno, opnum, type)
32131a9c 4353 int regno;
546b63fb
RK
4354 int opnum;
4355 enum reload_type type;
32131a9c 4356{
546b63fb
RK
4357 int i;
4358
4359 switch (type)
32131a9c 4360 {
546b63fb
RK
4361 case RELOAD_FOR_OTHER_ADDRESS:
4362 /* These always come first. */
32131a9c
RK
4363 return 1;
4364
546b63fb
RK
4365 case RELOAD_OTHER:
4366 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4367
32131a9c 4368 /* If this use is for part of the insn,
546b63fb
RK
4369 check the reg is not in use for any prior part. It is tempting
4370 to try to do this by falling through from objecs that occur
4371 later in the insn to ones that occur earlier, but that will not
4372 correctly take into account the fact that here we MUST ignore
4373 things that would prevent the register from being allocated in
4374 the first place, since we know that it was allocated. */
4375
4376 case RELOAD_FOR_OUTPUT_ADDRESS:
4377 /* Earlier reloads are for earlier outputs or their addresses,
4378 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4379 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4380 RELOAD_OTHER).. */
4381 for (i = 0; i < opnum; i++)
4382 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4383 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4384 return 0;
4385
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4387 return 0;
546b63fb
RK
4388
4389 for (i = 0; i < reload_n_operands; i++)
4390 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4391 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4392 return 0;
4393
4394 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4395 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4396 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4397
32131a9c 4398 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4399 /* This can't be used in the output address for this operand and
4400 anything that can't be used for it, except that we've already
4401 tested for RELOAD_FOR_INSN objects. */
4402
4403 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
32131a9c 4404 return 0;
546b63fb
RK
4405
4406 for (i = 0; i < opnum; i++)
4407 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4408 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4409 return 0;
4410
4411 for (i = 0; i < reload_n_operands; i++)
4412 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4413 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4414 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4415 return 0;
4416
4417 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4418
32131a9c 4419 case RELOAD_FOR_OPERAND_ADDRESS:
893bc853 4420 case RELOAD_FOR_OPADDR_ADDR:
546b63fb
RK
4421 case RELOAD_FOR_INSN:
4422 /* These can't conflict with inputs, or each other, so all we have to
4423 test is input addresses and the addresses of OTHER items. */
4424
4425 for (i = 0; i < reload_n_operands; i++)
4426 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4427 return 0;
4428
4429 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4430
32131a9c 4431 case RELOAD_FOR_INPUT:
546b63fb
RK
4432 /* The only things earlier are the address for this and
4433 earlier inputs, other inputs (which we know we don't conflict
4434 with), and addresses of RELOAD_OTHER objects. */
4435
4436 for (i = 0; i <= opnum; i++)
4437 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4438 return 0;
4439
4440 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4441
4442 case RELOAD_FOR_INPUT_ADDRESS:
4443 /* Similarly, all we have to check is for use in earlier inputs'
4444 addresses. */
4445 for (i = 0; i < opnum; i++)
4446 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4447 return 0;
4448
4449 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4450 }
4451 abort ();
4452}
4453
4454/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4455 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4456 is still available in REGNO at the end of the insn.
4457
4458 We can assume that the reload reg was already tested for availability
4459 at the time it is needed, and we should not check this again,
4460 in case the reg has already been marked in use. */
4461
4462static int
546b63fb 4463reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4464 int regno;
546b63fb
RK
4465 int opnum;
4466 enum reload_type type;
32131a9c 4467{
546b63fb
RK
4468 int i;
4469
4470 switch (type)
32131a9c
RK
4471 {
4472 case RELOAD_OTHER:
4473 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4474 its value must reach the end. */
4475 return 1;
4476
4477 /* If this use is for part of the insn,
546b63fb
RK
4478 its value reaches if no subsequent part uses the same register.
4479 Just like the above function, don't try to do this with lots
4480 of fallthroughs. */
4481
4482 case RELOAD_FOR_OTHER_ADDRESS:
4483 /* Here we check for everything else, since these don't conflict
4484 with anything else and everything comes later. */
4485
4486 for (i = 0; i < reload_n_operands; i++)
4487 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4488 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4489 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4490 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4491 return 0;
4492
4493 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4494 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4495 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4496
4497 case RELOAD_FOR_INPUT_ADDRESS:
4498 /* Similar, except that we check only for this and subsequent inputs
4499 and the address of only subsequent inputs and we do not need
4500 to check for RELOAD_OTHER objects since they are known not to
4501 conflict. */
4502
4503 for (i = opnum; i < reload_n_operands; i++)
4504 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4505 return 0;
4506
4507 for (i = opnum + 1; i < reload_n_operands; i++)
4508 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4509 return 0;
4510
4511 for (i = 0; i < reload_n_operands; i++)
4512 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4513 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4514 return 0;
4515
893bc853
RK
4516 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4517 return 0;
4518
546b63fb
RK
4519 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4520 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4521
32131a9c 4522 case RELOAD_FOR_INPUT:
546b63fb
RK
4523 /* Similar to input address, except we start at the next operand for
4524 both input and input address and we do not check for
4525 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4526 would conflict. */
4527
4528 for (i = opnum + 1; i < reload_n_operands; i++)
4529 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4530 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4531 return 0;
4532
4533 /* ... fall through ... */
4534
32131a9c 4535 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4536 /* Check outputs and their addresses. */
4537
4538 for (i = 0; i < reload_n_operands; i++)
4539 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4540 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4541 return 0;
4542
4543 return 1;
4544
893bc853
RK
4545 case RELOAD_FOR_OPADDR_ADDR:
4546 for (i = 0; i < reload_n_operands; i++)
4547 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4548 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4549 return 0;
4550
4551 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4552 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4553
546b63fb 4554 case RELOAD_FOR_INSN:
893bc853 4555 /* These conflict with other outputs with RELOAD_OTHER. So
546b63fb
RK
4556 we need only check for output addresses. */
4557
4558 opnum = -1;
4559
4560 /* ... fall through ... */
4561
32131a9c 4562 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4563 case RELOAD_FOR_OUTPUT_ADDRESS:
4564 /* We already know these can't conflict with a later output. So the
4565 only thing to check are later output addresses. */
4566 for (i = opnum + 1; i < reload_n_operands; i++)
4567 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4568 return 0;
4569
32131a9c
RK
4570 return 1;
4571 }
546b63fb 4572
32131a9c
RK
4573 abort ();
4574}
4575\f
351aa1c1
RK
4576/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4577 Return 0 otherwise.
4578
4579 This function uses the same algorithm as reload_reg_free_p above. */
4580
4581static int
4582reloads_conflict (r1, r2)
4583 int r1, r2;
4584{
4585 enum reload_type r1_type = reload_when_needed[r1];
4586 enum reload_type r2_type = reload_when_needed[r2];
4587 int r1_opnum = reload_opnum[r1];
4588 int r2_opnum = reload_opnum[r2];
4589
adab4fc5 4590 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
351aa1c1 4591
adab4fc5 4592 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
351aa1c1
RK
4593 return 1;
4594
4595 /* Otherwise, check conflicts differently for each type. */
4596
4597 switch (r1_type)
4598 {
4599 case RELOAD_FOR_INPUT:
4600 return (r2_type == RELOAD_FOR_INSN
4601 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
893bc853 4602 || r2_type == RELOAD_FOR_OPADDR_ADDR
351aa1c1
RK
4603 || r2_type == RELOAD_FOR_INPUT
4604 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4605
4606 case RELOAD_FOR_INPUT_ADDRESS:
4607 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4608 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4609
4610 case RELOAD_FOR_OUTPUT_ADDRESS:
4611 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4612 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4613
4614 case RELOAD_FOR_OPERAND_ADDRESS:
4615 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4616 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4617
893bc853
RK
4618 case RELOAD_FOR_OPADDR_ADDR:
4619 return (r2_type == RELOAD_FOR_INPUT
4620 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4621
351aa1c1
RK
4622 case RELOAD_FOR_OUTPUT:
4623 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
0aaf4a58 4624 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
351aa1c1
RK
4625 && r2_opnum >= r1_opnum));
4626
4627 case RELOAD_FOR_INSN:
4628 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4629 || r2_type == RELOAD_FOR_INSN
4630 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4631
4632 case RELOAD_FOR_OTHER_ADDRESS:
4633 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4634
adab4fc5
RK
4635 case RELOAD_OTHER:
4636 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4637
351aa1c1
RK
4638 default:
4639 abort ();
4640 }
4641}
4642\f
32131a9c
RK
4643/* Vector of reload-numbers showing the order in which the reloads should
4644 be processed. */
4645short reload_order[MAX_RELOADS];
4646
4647/* Indexed by reload number, 1 if incoming value
4648 inherited from previous insns. */
4649char reload_inherited[MAX_RELOADS];
4650
4651/* For an inherited reload, this is the insn the reload was inherited from,
4652 if we know it. Otherwise, this is 0. */
4653rtx reload_inheritance_insn[MAX_RELOADS];
4654
4655/* If non-zero, this is a place to get the value of the reload,
4656 rather than using reload_in. */
4657rtx reload_override_in[MAX_RELOADS];
4658
4659/* For each reload, the index in spill_regs of the spill register used,
4660 or -1 if we did not need one of the spill registers for this reload. */
4661int reload_spill_index[MAX_RELOADS];
4662
4663/* Index of last register assigned as a spill register. We allocate in
4664 a round-robin fashio. */
4665
1d2310f3 4666static int last_spill_reg = 0;
32131a9c
RK
4667
4668/* Find a spill register to use as a reload register for reload R.
4669 LAST_RELOAD is non-zero if this is the last reload for the insn being
4670 processed.
4671
4672 Set reload_reg_rtx[R] to the register allocated.
4673
4674 If NOERROR is nonzero, we return 1 if successful,
4675 or 0 if we couldn't find a spill reg and we didn't change anything. */
4676
4677static int
4678allocate_reload_reg (r, insn, last_reload, noerror)
4679 int r;
4680 rtx insn;
4681 int last_reload;
4682 int noerror;
4683{
4684 int i;
4685 int pass;
4686 int count;
4687 rtx new;
4688 int regno;
4689
4690 /* If we put this reload ahead, thinking it is a group,
4691 then insist on finding a group. Otherwise we can grab a
a8fdc208 4692 reg that some other reload needs.
32131a9c
RK
4693 (That can happen when we have a 68000 DATA_OR_FP_REG
4694 which is a group of data regs or one fp reg.)
4695 We need not be so restrictive if there are no more reloads
4696 for this insn.
4697
4698 ??? Really it would be nicer to have smarter handling
4699 for that kind of reg class, where a problem like this is normal.
4700 Perhaps those classes should be avoided for reloading
4701 by use of more alternatives. */
4702
4703 int force_group = reload_nregs[r] > 1 && ! last_reload;
4704
4705 /* If we want a single register and haven't yet found one,
4706 take any reg in the right class and not in use.
4707 If we want a consecutive group, here is where we look for it.
4708
4709 We use two passes so we can first look for reload regs to
4710 reuse, which are already in use for other reloads in this insn,
4711 and only then use additional registers.
4712 I think that maximizing reuse is needed to make sure we don't
4713 run out of reload regs. Suppose we have three reloads, and
4714 reloads A and B can share regs. These need two regs.
4715 Suppose A and B are given different regs.
4716 That leaves none for C. */
4717 for (pass = 0; pass < 2; pass++)
4718 {
4719 /* I is the index in spill_regs.
4720 We advance it round-robin between insns to use all spill regs
4721 equally, so that inherited reloads have a chance
a5339699
RK
4722 of leapfrogging each other. Don't do this, however, when we have
4723 group needs and failure would be fatal; if we only have a relatively
4724 small number of spill registers, and more than one of them has
4725 group needs, then by starting in the middle, we may end up
4726 allocating the first one in such a way that we are not left with
4727 sufficient groups to handle the rest. */
4728
4729 if (noerror || ! force_group)
4730 i = last_spill_reg;
4731 else
4732 i = -1;
4733
4734 for (count = 0; count < n_spills; count++)
32131a9c
RK
4735 {
4736 int class = (int) reload_reg_class[r];
4737
4738 i = (i + 1) % n_spills;
4739
546b63fb
RK
4740 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4741 reload_when_needed[r])
32131a9c
RK
4742 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4743 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
4744 /* Look first for regs to share, then for unshared. But
4745 don't share regs used for inherited reloads; they are
4746 the ones we want to preserve. */
4747 && (pass
4748 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4749 spill_regs[i])
4750 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4751 spill_regs[i]))))
32131a9c
RK
4752 {
4753 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4754 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4755 (on 68000) got us two FP regs. If NR is 1,
4756 we would reject both of them. */
4757 if (force_group)
4758 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4759 /* If we need only one reg, we have already won. */
4760 if (nr == 1)
4761 {
4762 /* But reject a single reg if we demand a group. */
4763 if (force_group)
4764 continue;
4765 break;
4766 }
4767 /* Otherwise check that as many consecutive regs as we need
4768 are available here.
4769 Also, don't use for a group registers that are
4770 needed for nongroups. */
4771 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4772 while (nr > 1)
4773 {
4774 regno = spill_regs[i] + nr - 1;
4775 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4776 && spill_reg_order[regno] >= 0
546b63fb
RK
4777 && reload_reg_free_p (regno, reload_opnum[r],
4778 reload_when_needed[r])
32131a9c
RK
4779 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4780 regno)))
4781 break;
4782 nr--;
4783 }
4784 if (nr == 1)
4785 break;
4786 }
4787 }
4788
4789 /* If we found something on pass 1, omit pass 2. */
4790 if (count < n_spills)
4791 break;
4792 }
4793
4794 /* We should have found a spill register by now. */
4795 if (count == n_spills)
4796 {
4797 if (noerror)
4798 return 0;
139fc12e 4799 goto failure;
32131a9c
RK
4800 }
4801
be7ae2a4
RK
4802 /* I is the index in SPILL_REG_RTX of the reload register we are to
4803 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
4804
4805 new = spill_reg_rtx[i];
4806
4807 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4
RK
4808 spill_reg_rtx[i] = new
4809 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4810
32131a9c
RK
4811 regno = true_regnum (new);
4812
4813 /* Detect when the reload reg can't hold the reload mode.
4814 This used to be one `if', but Sequent compiler can't handle that. */
4815 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4816 {
4817 enum machine_mode test_mode = VOIDmode;
4818 if (reload_in[r])
4819 test_mode = GET_MODE (reload_in[r]);
4820 /* If reload_in[r] has VOIDmode, it means we will load it
4821 in whatever mode the reload reg has: to wit, reload_mode[r].
4822 We have already tested that for validity. */
4823 /* Aside from that, we need to test that the expressions
4824 to reload from or into have modes which are valid for this
4825 reload register. Otherwise the reload insns would be invalid. */
4826 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4827 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4828 if (! (reload_out[r] != 0
4829 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
4830 {
4831 /* The reg is OK. */
4832 last_spill_reg = i;
4833
4834 /* Mark as in use for this insn the reload regs we use
4835 for this. */
4836 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4837 reload_when_needed[r], reload_mode[r]);
4838
4839 reload_reg_rtx[r] = new;
4840 reload_spill_index[r] = i;
4841 return 1;
4842 }
32131a9c
RK
4843 }
4844
4845 /* The reg is not OK. */
4846 if (noerror)
4847 return 0;
4848
139fc12e 4849 failure:
32131a9c
RK
4850 if (asm_noperands (PATTERN (insn)) < 0)
4851 /* It's the compiler's fault. */
a89b2cc4 4852 fatal_insn ("Could not find a spill register", insn);
32131a9c
RK
4853
4854 /* It's the user's fault; the operand's mode and constraint
4855 don't match. Disable this reload so we don't crash in final. */
4856 error_for_asm (insn,
4857 "`asm' operand constraint incompatible with operand size");
4858 reload_in[r] = 0;
4859 reload_out[r] = 0;
4860 reload_reg_rtx[r] = 0;
4861 reload_optional[r] = 1;
4862 reload_secondary_p[r] = 1;
4863
4864 return 1;
4865}
4866\f
4867/* Assign hard reg targets for the pseudo-registers we must reload
4868 into hard regs for this insn.
4869 Also output the instructions to copy them in and out of the hard regs.
4870
4871 For machines with register classes, we are responsible for
4872 finding a reload reg in the proper class. */
4873
4874static void
4875choose_reload_regs (insn, avoid_return_reg)
4876 rtx insn;
32131a9c
RK
4877 rtx avoid_return_reg;
4878{
4879 register int i, j;
4880 int max_group_size = 1;
4881 enum reg_class group_class = NO_REGS;
4882 int inheritance;
4883
4884 rtx save_reload_reg_rtx[MAX_RELOADS];
4885 char save_reload_inherited[MAX_RELOADS];
4886 rtx save_reload_inheritance_insn[MAX_RELOADS];
4887 rtx save_reload_override_in[MAX_RELOADS];
4888 int save_reload_spill_index[MAX_RELOADS];
4889 HARD_REG_SET save_reload_reg_used;
546b63fb
RK
4890 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4891 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4892 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4893 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 4894 HARD_REG_SET save_reload_reg_used_in_op_addr;
893bc853 4895 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
546b63fb
RK
4896 HARD_REG_SET save_reload_reg_used_in_insn;
4897 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
4898 HARD_REG_SET save_reload_reg_used_at_all;
4899
4900 bzero (reload_inherited, MAX_RELOADS);
4c9a05bc
RK
4901 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4902 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
32131a9c
RK
4903
4904 CLEAR_HARD_REG_SET (reload_reg_used);
4905 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 4906 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
893bc853 4907 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
546b63fb
RK
4908 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4909 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 4910
546b63fb
RK
4911 for (i = 0; i < reload_n_operands; i++)
4912 {
4913 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4914 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4915 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4916 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4917 }
32131a9c
RK
4918
4919#ifdef SMALL_REGISTER_CLASSES
4920 /* Don't bother with avoiding the return reg
4921 if we have no mandatory reload that could use it. */
4922 if (avoid_return_reg)
4923 {
4924 int do_avoid = 0;
4925 int regno = REGNO (avoid_return_reg);
4926 int nregs
4927 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4928 int r;
4929
4930 for (r = regno; r < regno + nregs; r++)
4931 if (spill_reg_order[r] >= 0)
4932 for (j = 0; j < n_reloads; j++)
4933 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4934 && (reload_in[j] != 0 || reload_out[j] != 0
4935 || reload_secondary_p[j])
4936 &&
4937 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4938 do_avoid = 1;
4939 if (!do_avoid)
4940 avoid_return_reg = 0;
4941 }
4942#endif /* SMALL_REGISTER_CLASSES */
4943
4944#if 0 /* Not needed, now that we can always retry without inheritance. */
4945 /* See if we have more mandatory reloads than spill regs.
4946 If so, then we cannot risk optimizations that could prevent
a8fdc208 4947 reloads from sharing one spill register.
32131a9c
RK
4948
4949 Since we will try finding a better register than reload_reg_rtx
4950 unless it is equal to reload_in or reload_out, count such reloads. */
4951
4952 {
4953 int tem = 0;
4954#ifdef SMALL_REGISTER_CLASSES
4955 int tem = (avoid_return_reg != 0);
a8fdc208 4956#endif
32131a9c
RK
4957 for (j = 0; j < n_reloads; j++)
4958 if (! reload_optional[j]
4959 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4960 && (reload_reg_rtx[j] == 0
4961 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4962 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4963 tem++;
4964 if (tem > n_spills)
4965 must_reuse = 1;
4966 }
4967#endif
4968
4969#ifdef SMALL_REGISTER_CLASSES
4970 /* Don't use the subroutine call return reg for a reload
4971 if we are supposed to avoid it. */
4972 if (avoid_return_reg)
4973 {
4974 int regno = REGNO (avoid_return_reg);
4975 int nregs
4976 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4977 int r;
4978
4979 for (r = regno; r < regno + nregs; r++)
4980 if (spill_reg_order[r] >= 0)
4981 SET_HARD_REG_BIT (reload_reg_used, r);
4982 }
4983#endif /* SMALL_REGISTER_CLASSES */
4984
4985 /* In order to be certain of getting the registers we need,
4986 we must sort the reloads into order of increasing register class.
4987 Then our grabbing of reload registers will parallel the process
a8fdc208 4988 that provided the reload registers.
32131a9c
RK
4989
4990 Also note whether any of the reloads wants a consecutive group of regs.
4991 If so, record the maximum size of the group desired and what
4992 register class contains all the groups needed by this insn. */
4993
4994 for (j = 0; j < n_reloads; j++)
4995 {
4996 reload_order[j] = j;
4997 reload_spill_index[j] = -1;
4998
4999 reload_mode[j]
546b63fb
RK
5000 = (reload_inmode[j] == VOIDmode
5001 || (GET_MODE_SIZE (reload_outmode[j])
5002 > GET_MODE_SIZE (reload_inmode[j])))
5003 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
5004
5005 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5006
5007 if (reload_nregs[j] > 1)
5008 {
5009 max_group_size = MAX (reload_nregs[j], max_group_size);
5010 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5011 }
5012
5013 /* If we have already decided to use a certain register,
5014 don't use it in another way. */
5015 if (reload_reg_rtx[j])
546b63fb 5016 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
5017 reload_when_needed[j], reload_mode[j]);
5018 }
5019
5020 if (n_reloads > 1)
5021 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5022
4c9a05bc
RK
5023 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5024 sizeof reload_reg_rtx);
32131a9c 5025 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4c9a05bc
RK
5026 bcopy ((char *) reload_inheritance_insn,
5027 (char *) save_reload_inheritance_insn,
32131a9c 5028 sizeof reload_inheritance_insn);
4c9a05bc 5029 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
32131a9c 5030 sizeof reload_override_in);
4c9a05bc 5031 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
32131a9c
RK
5032 sizeof reload_spill_index);
5033 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5034 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
5035 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5036 reload_reg_used_in_op_addr);
893bc853
RK
5037
5038 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5039 reload_reg_used_in_op_addr_reload);
5040
546b63fb
RK
5041 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5042 reload_reg_used_in_insn);
5043 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5044 reload_reg_used_in_other_addr);
5045
5046 for (i = 0; i < reload_n_operands; i++)
5047 {
5048 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5049 reload_reg_used_in_output[i]);
5050 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5051 reload_reg_used_in_input[i]);
5052 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5053 reload_reg_used_in_input_addr[i]);
5054 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5055 reload_reg_used_in_output_addr[i]);
5056 }
32131a9c 5057
58b1581b
RS
5058 /* If -O, try first with inheritance, then turning it off.
5059 If not -O, don't do inheritance.
5060 Using inheritance when not optimizing leads to paradoxes
5061 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5062 because one side of the comparison might be inherited. */
32131a9c 5063
58b1581b 5064 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
5065 {
5066 /* Process the reloads in order of preference just found.
5067 Beyond this point, subregs can be found in reload_reg_rtx.
5068
5069 This used to look for an existing reloaded home for all
5070 of the reloads, and only then perform any new reloads.
5071 But that could lose if the reloads were done out of reg-class order
5072 because a later reload with a looser constraint might have an old
5073 home in a register needed by an earlier reload with a tighter constraint.
5074
5075 To solve this, we make two passes over the reloads, in the order
5076 described above. In the first pass we try to inherit a reload
5077 from a previous insn. If there is a later reload that needs a
5078 class that is a proper subset of the class being processed, we must
5079 also allocate a spill register during the first pass.
5080
5081 Then make a second pass over the reloads to allocate any reloads
5082 that haven't been given registers yet. */
5083
be7ae2a4
RK
5084 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5085
32131a9c
RK
5086 for (j = 0; j < n_reloads; j++)
5087 {
5088 register int r = reload_order[j];
5089
5090 /* Ignore reloads that got marked inoperative. */
5091 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5092 continue;
5093
5094 /* If find_reloads chose a to use reload_in or reload_out as a reload
5095 register, we don't need to chose one. Otherwise, try even if it found
5096 one since we might save an insn if we find the value lying around. */
5097 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5098 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5099 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5100 continue;
5101
5102#if 0 /* No longer needed for correct operation.
5103 It might give better code, or might not; worth an experiment? */
5104 /* If this is an optional reload, we can't inherit from earlier insns
5105 until we are sure that any non-optional reloads have been allocated.
5106 The following code takes advantage of the fact that optional reloads
5107 are at the end of reload_order. */
5108 if (reload_optional[r] != 0)
5109 for (i = 0; i < j; i++)
5110 if ((reload_out[reload_order[i]] != 0
5111 || reload_in[reload_order[i]] != 0
5112 || reload_secondary_p[reload_order[i]])
5113 && ! reload_optional[reload_order[i]]
5114 && reload_reg_rtx[reload_order[i]] == 0)
5115 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5116#endif
5117
5118 /* First see if this pseudo is already available as reloaded
5119 for a previous insn. We cannot try to inherit for reloads
5120 that are smaller than the maximum number of registers needed
5121 for groups unless the register we would allocate cannot be used
5122 for the groups.
5123
5124 We could check here to see if this is a secondary reload for
5125 an object that is already in a register of the desired class.
5126 This would avoid the need for the secondary reload register.
5127 But this is complex because we can't easily determine what
5128 objects might want to be loaded via this reload. So let a register
5129 be allocated here. In `emit_reload_insns' we suppress one of the
5130 loads in the case described above. */
5131
5132 if (inheritance)
5133 {
5134 register int regno = -1;
db660765 5135 enum machine_mode mode;
32131a9c
RK
5136
5137 if (reload_in[r] == 0)
5138 ;
5139 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
5140 {
5141 regno = REGNO (reload_in[r]);
5142 mode = GET_MODE (reload_in[r]);
5143 }
32131a9c 5144 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
5145 {
5146 regno = REGNO (reload_in_reg[r]);
5147 mode = GET_MODE (reload_in_reg[r]);
5148 }
32131a9c
RK
5149#if 0
5150 /* This won't work, since REGNO can be a pseudo reg number.
5151 Also, it takes much more hair to keep track of all the things
5152 that can invalidate an inherited reload of part of a pseudoreg. */
5153 else if (GET_CODE (reload_in[r]) == SUBREG
5154 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5155 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5156#endif
5157
5158 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5159 {
5160 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5161
5162 if (reg_reloaded_contents[i] == regno
db660765
TW
5163 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5164 >= GET_MODE_SIZE (mode))
32131a9c
RK
5165 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5166 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5167 spill_regs[i])
5168 && (reload_nregs[r] == max_group_size
5169 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5170 spill_regs[i]))
546b63fb
RK
5171 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5172 reload_when_needed[r])
32131a9c 5173 && reload_reg_free_before_p (spill_regs[i],
546b63fb 5174 reload_opnum[r],
32131a9c
RK
5175 reload_when_needed[r]))
5176 {
5177 /* If a group is needed, verify that all the subsequent
5178 registers still have their values intact. */
5179 int nr
5180 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5181 int k;
5182
5183 for (k = 1; k < nr; k++)
5184 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5185 != regno)
5186 break;
5187
5188 if (k == nr)
5189 {
c74fa651
RS
5190 int i1;
5191
5192 /* We found a register that contains the
5193 value we need. If this register is the
5194 same as an `earlyclobber' operand of the
5195 current insn, just mark it as a place to
5196 reload from since we can't use it as the
5197 reload register itself. */
5198
5199 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5200 if (reg_overlap_mentioned_for_reload_p
5201 (reg_last_reload_reg[regno],
5202 reload_earlyclobbers[i1]))
5203 break;
5204
8908158d
RS
5205 if (i1 != n_earlyclobbers
5206 /* Don't really use the inherited spill reg
5207 if we need it wider than we've got it. */
5208 || (GET_MODE_SIZE (reload_mode[r])
5209 > GET_MODE_SIZE (mode)))
c74fa651
RS
5210 reload_override_in[r] = reg_last_reload_reg[regno];
5211 else
5212 {
54c40e68 5213 int k;
c74fa651
RS
5214 /* We can use this as a reload reg. */
5215 /* Mark the register as in use for this part of
5216 the insn. */
5217 mark_reload_reg_in_use (spill_regs[i],
5218 reload_opnum[r],
5219 reload_when_needed[r],
5220 reload_mode[r]);
5221 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5222 reload_inherited[r] = 1;
5223 reload_inheritance_insn[r]
5224 = reg_reloaded_insn[i];
5225 reload_spill_index[r] = i;
54c40e68
RS
5226 for (k = 0; k < nr; k++)
5227 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5228 spill_regs[i + k]);
c74fa651 5229 }
32131a9c
RK
5230 }
5231 }
5232 }
5233 }
5234
5235 /* Here's another way to see if the value is already lying around. */
5236 if (inheritance
5237 && reload_in[r] != 0
5238 && ! reload_inherited[r]
5239 && reload_out[r] == 0
5240 && (CONSTANT_P (reload_in[r])
5241 || GET_CODE (reload_in[r]) == PLUS
5242 || GET_CODE (reload_in[r]) == REG
5243 || GET_CODE (reload_in[r]) == MEM)
5244 && (reload_nregs[r] == max_group_size
5245 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5246 {
5247 register rtx equiv
5248 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 5249 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
5250 int regno;
5251
5252 if (equiv != 0)
5253 {
5254 if (GET_CODE (equiv) == REG)
5255 regno = REGNO (equiv);
5256 else if (GET_CODE (equiv) == SUBREG)
5257 {
f8a9e02b
RK
5258 /* This must be a SUBREG of a hard register.
5259 Make a new REG since this might be used in an
5260 address and not all machines support SUBREGs
5261 there. */
5262 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5263 equiv = gen_rtx (REG, reload_mode[r], regno);
32131a9c
RK
5264 }
5265 else
5266 abort ();
5267 }
5268
5269 /* If we found a spill reg, reject it unless it is free
5270 and of the desired class. */
5271 if (equiv != 0
5272 && ((spill_reg_order[regno] >= 0
546b63fb 5273 && ! reload_reg_free_before_p (regno, reload_opnum[r],
32131a9c
RK
5274 reload_when_needed[r]))
5275 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5276 regno)))
5277 equiv = 0;
5278
5279 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5280 equiv = 0;
5281
5282 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5283 equiv = 0;
5284
5285 /* We found a register that contains the value we need.
5286 If this register is the same as an `earlyclobber' operand
5287 of the current insn, just mark it as a place to reload from
5288 since we can't use it as the reload register itself. */
5289
5290 if (equiv != 0)
5291 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5292 if (reg_overlap_mentioned_for_reload_p (equiv,
5293 reload_earlyclobbers[i]))
32131a9c
RK
5294 {
5295 reload_override_in[r] = equiv;
5296 equiv = 0;
5297 break;
5298 }
5299
5300 /* JRV: If the equiv register we have found is explicitly
5301 clobbered in the current insn, mark but don't use, as above. */
5302
5303 if (equiv != 0 && regno_clobbered_p (regno, insn))
5304 {
5305 reload_override_in[r] = equiv;
5306 equiv = 0;
5307 }
5308
5309 /* If we found an equivalent reg, say no code need be generated
5310 to load it, and use it as our reload reg. */
3ec2ea3e 5311 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
5312 {
5313 reload_reg_rtx[r] = equiv;
5314 reload_inherited[r] = 1;
5315 /* If it is a spill reg,
5316 mark the spill reg as in use for this insn. */
5317 i = spill_reg_order[regno];
5318 if (i >= 0)
be7ae2a4 5319 {
54c40e68
RS
5320 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5321 int k;
be7ae2a4
RK
5322 mark_reload_reg_in_use (regno, reload_opnum[r],
5323 reload_when_needed[r],
5324 reload_mode[r]);
54c40e68
RS
5325 for (k = 0; k < nr; k++)
5326 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
be7ae2a4 5327 }
32131a9c
RK
5328 }
5329 }
5330
5331 /* If we found a register to use already, or if this is an optional
5332 reload, we are done. */
5333 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5334 continue;
5335
5336#if 0 /* No longer needed for correct operation. Might or might not
5337 give better code on the average. Want to experiment? */
5338
5339 /* See if there is a later reload that has a class different from our
5340 class that intersects our class or that requires less register
5341 than our reload. If so, we must allocate a register to this
5342 reload now, since that reload might inherit a previous reload
5343 and take the only available register in our class. Don't do this
5344 for optional reloads since they will force all previous reloads
5345 to be allocated. Also don't do this for reloads that have been
5346 turned off. */
5347
5348 for (i = j + 1; i < n_reloads; i++)
5349 {
5350 int s = reload_order[i];
5351
d45cf215
RS
5352 if ((reload_in[s] == 0 && reload_out[s] == 0
5353 && ! reload_secondary_p[s])
32131a9c
RK
5354 || reload_optional[s])
5355 continue;
5356
5357 if ((reload_reg_class[s] != reload_reg_class[r]
5358 && reg_classes_intersect_p (reload_reg_class[r],
5359 reload_reg_class[s]))
5360 || reload_nregs[s] < reload_nregs[r])
5361 break;
5362 }
5363
5364 if (i == n_reloads)
5365 continue;
5366
5367 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5368#endif
5369 }
5370
5371 /* Now allocate reload registers for anything non-optional that
5372 didn't get one yet. */
5373 for (j = 0; j < n_reloads; j++)
5374 {
5375 register int r = reload_order[j];
5376
5377 /* Ignore reloads that got marked inoperative. */
5378 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5379 continue;
5380
5381 /* Skip reloads that already have a register allocated or are
5382 optional. */
5383 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5384 continue;
5385
5386 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5387 break;
5388 }
5389
5390 /* If that loop got all the way, we have won. */
5391 if (j == n_reloads)
5392 break;
5393
5394 fail:
5395 /* Loop around and try without any inheritance. */
5396 /* First undo everything done by the failed attempt
5397 to allocate with inheritance. */
4c9a05bc
RK
5398 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5399 sizeof reload_reg_rtx);
5400 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5401 sizeof reload_inherited);
5402 bcopy ((char *) save_reload_inheritance_insn,
5403 (char *) reload_inheritance_insn,
32131a9c 5404 sizeof reload_inheritance_insn);
4c9a05bc 5405 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
32131a9c 5406 sizeof reload_override_in);
4c9a05bc 5407 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
32131a9c
RK
5408 sizeof reload_spill_index);
5409 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5410 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5411 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5412 save_reload_reg_used_in_op_addr);
893bc853
RK
5413 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5414 save_reload_reg_used_in_op_addr_reload);
546b63fb
RK
5415 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5416 save_reload_reg_used_in_insn);
5417 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5418 save_reload_reg_used_in_other_addr);
5419
5420 for (i = 0; i < reload_n_operands; i++)
5421 {
5422 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5423 save_reload_reg_used_in_input[i]);
5424 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5425 save_reload_reg_used_in_output[i]);
5426 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5427 save_reload_reg_used_in_input_addr[i]);
5428 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5429 save_reload_reg_used_in_output_addr[i]);
5430 }
32131a9c
RK
5431 }
5432
5433 /* If we thought we could inherit a reload, because it seemed that
5434 nothing else wanted the same reload register earlier in the insn,
5435 verify that assumption, now that all reloads have been assigned. */
5436
5437 for (j = 0; j < n_reloads; j++)
5438 {
5439 register int r = reload_order[j];
5440
5441 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5442 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
546b63fb 5443 reload_opnum[r],
32131a9c
RK
5444 reload_when_needed[r]))
5445 reload_inherited[r] = 0;
5446
5447 /* If we found a better place to reload from,
5448 validate it in the same fashion, if it is a reload reg. */
5449 if (reload_override_in[r]
5450 && (GET_CODE (reload_override_in[r]) == REG
5451 || GET_CODE (reload_override_in[r]) == SUBREG))
5452 {
5453 int regno = true_regnum (reload_override_in[r]);
5454 if (spill_reg_order[regno] >= 0
546b63fb
RK
5455 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5456 reload_when_needed[r]))
32131a9c
RK
5457 reload_override_in[r] = 0;
5458 }
5459 }
5460
5461 /* Now that reload_override_in is known valid,
5462 actually override reload_in. */
5463 for (j = 0; j < n_reloads; j++)
5464 if (reload_override_in[j])
5465 reload_in[j] = reload_override_in[j];
5466
5467 /* If this reload won't be done because it has been cancelled or is
5468 optional and not inherited, clear reload_reg_rtx so other
5469 routines (such as subst_reloads) don't get confused. */
5470 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
5471 if (reload_reg_rtx[j] != 0
5472 && ((reload_optional[j] && ! reload_inherited[j])
5473 || (reload_in[j] == 0 && reload_out[j] == 0
5474 && ! reload_secondary_p[j])))
5475 {
5476 int regno = true_regnum (reload_reg_rtx[j]);
5477
5478 if (spill_reg_order[regno] >= 0)
5479 clear_reload_reg_in_use (regno, reload_opnum[j],
5480 reload_when_needed[j], reload_mode[j]);
5481 reload_reg_rtx[j] = 0;
5482 }
32131a9c
RK
5483
5484 /* Record which pseudos and which spill regs have output reloads. */
5485 for (j = 0; j < n_reloads; j++)
5486 {
5487 register int r = reload_order[j];
5488
5489 i = reload_spill_index[r];
5490
5491 /* I is nonneg if this reload used one of the spill regs.
5492 If reload_reg_rtx[r] is 0, this is an optional reload
5493 that we opted to ignore. */
5494 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5495 && reload_reg_rtx[r] != 0)
5496 {
5497 register int nregno = REGNO (reload_out[r]);
372e033b
RS
5498 int nr = 1;
5499
5500 if (nregno < FIRST_PSEUDO_REGISTER)
5501 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
5502
5503 while (--nr >= 0)
372e033b
RS
5504 reg_has_output_reload[nregno + nr] = 1;
5505
5506 if (i >= 0)
32131a9c 5507 {
372e033b
RS
5508 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5509 while (--nr >= 0)
32131a9c
RK
5510 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5511 }
5512
5513 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
5514 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5515 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
5516 abort ();
5517 }
5518 }
5519}
5520\f
546b63fb
RK
5521/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5522 reloads of the same item for fear that we might not have enough reload
5523 registers. However, normally they will get the same reload register
5524 and hence actually need not be loaded twice.
5525
5526 Here we check for the most common case of this phenomenon: when we have
5527 a number of reloads for the same object, each of which were allocated
5528 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5529 reload, and is not modified in the insn itself. If we find such,
5530 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5531 This will not increase the number of spill registers needed and will
5532 prevent redundant code. */
5533
5534#ifdef SMALL_REGISTER_CLASSES
5535
5536static void
5537merge_assigned_reloads (insn)
5538 rtx insn;
5539{
5540 int i, j;
5541
5542 /* Scan all the reloads looking for ones that only load values and
5543 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5544 assigned and not modified by INSN. */
5545
5546 for (i = 0; i < n_reloads; i++)
5547 {
5548 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5549 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5550 || reg_set_p (reload_reg_rtx[i], insn))
5551 continue;
5552
5553 /* Look at all other reloads. Ensure that the only use of this
5554 reload_reg_rtx is in a reload that just loads the same value
5555 as we do. Note that any secondary reloads must be of the identical
5556 class since the values, modes, and result registers are the
5557 same, so we need not do anything with any secondary reloads. */
5558
5559 for (j = 0; j < n_reloads; j++)
5560 {
5561 if (i == j || reload_reg_rtx[j] == 0
5562 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5563 reload_reg_rtx[i]))
5564 continue;
5565
5566 /* If the reload regs aren't exactly the same (e.g, different modes)
5567 or if the values are different, we can't merge anything with this
5568 reload register. */
5569
5570 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5571 || reload_out[j] != 0 || reload_in[j] == 0
5572 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5573 break;
5574 }
5575
5576 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5577 we, in fact, found any matching reloads. */
5578
5579 if (j == n_reloads)
5580 {
5581 for (j = 0; j < n_reloads; j++)
5582 if (i != j && reload_reg_rtx[j] != 0
5583 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5584 {
5585 reload_when_needed[i] = RELOAD_OTHER;
5586 reload_in[j] = 0;
5587 transfer_replacements (i, j);
5588 }
5589
5590 /* If this is now RELOAD_OTHER, look for any reloads that load
5591 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5592 if they were for inputs, RELOAD_OTHER for outputs. Note that
5593 this test is equivalent to looking for reloads for this operand
5594 number. */
5595
5596 if (reload_when_needed[i] == RELOAD_OTHER)
5597 for (j = 0; j < n_reloads; j++)
5598 if (reload_in[j] != 0
5599 && reload_when_needed[i] != RELOAD_OTHER
5600 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5601 reload_in[i]))
5602 reload_when_needed[j]
5603 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5604 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5605 }
5606 }
5607}
5608#endif /* SMALL_RELOAD_CLASSES */
5609\f
32131a9c
RK
5610/* Output insns to reload values in and out of the chosen reload regs. */
5611
5612static void
5613emit_reload_insns (insn)
5614 rtx insn;
5615{
5616 register int j;
546b63fb
RK
5617 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5618 rtx other_input_address_reload_insns = 0;
5619 rtx other_input_reload_insns = 0;
5620 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5621 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5622 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5623 rtx operand_reload_insns = 0;
893bc853 5624 rtx other_operand_reload_insns = 0;
32131a9c 5625 rtx following_insn = NEXT_INSN (insn);
a8efe40d 5626 rtx before_insn = insn;
32131a9c
RK
5627 int special;
5628 /* Values to be put in spill_reg_store are put here first. */
5629 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5630
546b63fb
RK
5631 for (j = 0; j < reload_n_operands; j++)
5632 input_reload_insns[j] = input_address_reload_insns[j]
5633 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5634
32131a9c
RK
5635 /* Now output the instructions to copy the data into and out of the
5636 reload registers. Do these in the order that the reloads were reported,
5637 since reloads of base and index registers precede reloads of operands
5638 and the operands may need the base and index registers reloaded. */
5639
5640 for (j = 0; j < n_reloads; j++)
5641 {
5642 register rtx old;
5643 rtx oldequiv_reg = 0;
73b2ad9e
RK
5644
5645 if (reload_spill_index[j] >= 0)
5646 new_spill_reg_store[reload_spill_index[j]] = 0;
32131a9c
RK
5647
5648 old = reload_in[j];
5649 if (old != 0 && ! reload_inherited[j]
5650 && ! rtx_equal_p (reload_reg_rtx[j], old)
5651 && reload_reg_rtx[j] != 0)
5652 {
5653 register rtx reloadreg = reload_reg_rtx[j];
5654 rtx oldequiv = 0;
5655 enum machine_mode mode;
546b63fb 5656 rtx *where;
32131a9c
RK
5657
5658 /* Determine the mode to reload in.
5659 This is very tricky because we have three to choose from.
5660 There is the mode the insn operand wants (reload_inmode[J]).
5661 There is the mode of the reload register RELOADREG.
5662 There is the intrinsic mode of the operand, which we could find
5663 by stripping some SUBREGs.
5664 It turns out that RELOADREG's mode is irrelevant:
5665 we can change that arbitrarily.
5666
5667 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5668 then the reload reg may not support QImode moves, so use SImode.
5669 If foo is in memory due to spilling a pseudo reg, this is safe,
5670 because the QImode value is in the least significant part of a
5671 slot big enough for a SImode. If foo is some other sort of
5672 memory reference, then it is impossible to reload this case,
5673 so previous passes had better make sure this never happens.
5674
5675 Then consider a one-word union which has SImode and one of its
5676 members is a float, being fetched as (SUBREG:SF union:SI).
5677 We must fetch that as SFmode because we could be loading into
5678 a float-only register. In this case OLD's mode is correct.
5679
5680 Consider an immediate integer: it has VOIDmode. Here we need
5681 to get a mode from something else.
5682
5683 In some cases, there is a fourth mode, the operand's
5684 containing mode. If the insn specifies a containing mode for
5685 this operand, it overrides all others.
5686
5687 I am not sure whether the algorithm here is always right,
5688 but it does the right things in those cases. */
5689
5690 mode = GET_MODE (old);
5691 if (mode == VOIDmode)
5692 mode = reload_inmode[j];
32131a9c
RK
5693
5694#ifdef SECONDARY_INPUT_RELOAD_CLASS
5695 /* If we need a secondary register for this operation, see if
5696 the value is already in a register in that class. Don't
5697 do this if the secondary register will be used as a scratch
5698 register. */
5699
b80bba27
RK
5700 if (reload_secondary_in_reload[j] >= 0
5701 && reload_secondary_in_icode[j] == CODE_FOR_nothing
58b1581b 5702 && optimize)
32131a9c
RK
5703 oldequiv
5704 = find_equiv_reg (old, insn,
b80bba27 5705 reload_reg_class[reload_secondary_in_reload[j]],
fb3821f7 5706 -1, NULL_PTR, 0, mode);
32131a9c
RK
5707#endif
5708
5709 /* If reloading from memory, see if there is a register
5710 that already holds the same value. If so, reload from there.
5711 We can pass 0 as the reload_reg_p argument because
5712 any other reload has either already been emitted,
5713 in which case find_equiv_reg will see the reload-insn,
5714 or has yet to be emitted, in which case it doesn't matter
5715 because we will use this equiv reg right away. */
5716
58b1581b 5717 if (oldequiv == 0 && optimize
32131a9c
RK
5718 && (GET_CODE (old) == MEM
5719 || (GET_CODE (old) == REG
5720 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5721 && reg_renumber[REGNO (old)] < 0)))
546b63fb 5722 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 5723 -1, NULL_PTR, 0, mode);
32131a9c
RK
5724
5725 if (oldequiv)
5726 {
5727 int regno = true_regnum (oldequiv);
5728
5729 /* If OLDEQUIV is a spill register, don't use it for this
5730 if any other reload needs it at an earlier stage of this insn
a8fdc208 5731 or at this stage. */
32131a9c 5732 if (spill_reg_order[regno] >= 0
546b63fb
RK
5733 && (! reload_reg_free_p (regno, reload_opnum[j],
5734 reload_when_needed[j])
5735 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
5736 reload_when_needed[j])))
5737 oldequiv = 0;
5738
5739 /* If OLDEQUIV is not a spill register,
5740 don't use it if any other reload wants it. */
5741 if (spill_reg_order[regno] < 0)
5742 {
5743 int k;
5744 for (k = 0; k < n_reloads; k++)
5745 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
5746 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5747 oldequiv))
32131a9c
RK
5748 {
5749 oldequiv = 0;
5750 break;
5751 }
5752 }
546b63fb
RK
5753
5754 /* If it is no cheaper to copy from OLDEQUIV into the
5755 reload register than it would be to move from memory,
5756 don't use it. Likewise, if we need a secondary register
5757 or memory. */
5758
5759 if (oldequiv != 0
5760 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5761 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5762 reload_reg_class[j])
5763 >= MEMORY_MOVE_COST (mode)))
5764#ifdef SECONDARY_INPUT_RELOAD_CLASS
5765 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5766 mode, oldequiv)
5767 != NO_REGS)
5768#endif
5769#ifdef SECONDARY_MEMORY_NEEDED
5770 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5771 REGNO_REG_CLASS (regno),
5772 mode)
5773#endif
5774 ))
5775 oldequiv = 0;
32131a9c
RK
5776 }
5777
5778 if (oldequiv == 0)
5779 oldequiv = old;
5780 else if (GET_CODE (oldequiv) == REG)
5781 oldequiv_reg = oldequiv;
5782 else if (GET_CODE (oldequiv) == SUBREG)
5783 oldequiv_reg = SUBREG_REG (oldequiv);
5784
76182796
RK
5785 /* If we are reloading from a register that was recently stored in
5786 with an output-reload, see if we can prove there was
5787 actually no need to store the old value in it. */
5788
5789 if (optimize && GET_CODE (oldequiv) == REG
5790 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5791 && spill_reg_order[REGNO (oldequiv)] >= 0
c95c0732 5792 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
8aea655f 5793 && find_reg_note (insn, REG_DEAD, reload_in[j])
76182796 5794 /* This is unsafe if operand occurs more than once in current
b87b7ecd 5795 insn. Perhaps some occurrences weren't reloaded. */
c95c0732 5796 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
76182796
RK
5797 delete_output_reload
5798 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5799
32131a9c 5800 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
5801 then load RELOADREG from OLDEQUIV. Note that we cannot use
5802 gen_lowpart_common since it can do the wrong thing when
5803 RELOADREG has a multi-word mode. Note that RELOADREG
5804 must always be a REG here. */
32131a9c
RK
5805
5806 if (GET_MODE (reloadreg) != mode)
3abe6f90 5807 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
5808 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5809 oldequiv = SUBREG_REG (oldequiv);
5810 if (GET_MODE (oldequiv) != VOIDmode
5811 && mode != GET_MODE (oldequiv))
5812 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5813
546b63fb 5814 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
5815 switch (reload_when_needed[j])
5816 {
32131a9c 5817 case RELOAD_OTHER:
546b63fb
RK
5818 where = &other_input_reload_insns;
5819 break;
5820 case RELOAD_FOR_INPUT:
5821 where = &input_reload_insns[reload_opnum[j]];
32131a9c 5822 break;
546b63fb
RK
5823 case RELOAD_FOR_INPUT_ADDRESS:
5824 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 5825 break;
546b63fb
RK
5826 case RELOAD_FOR_OUTPUT_ADDRESS:
5827 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c
RK
5828 break;
5829 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5830 where = &operand_reload_insns;
5831 break;
893bc853
RK
5832 case RELOAD_FOR_OPADDR_ADDR:
5833 where = &other_operand_reload_insns;
5834 break;
546b63fb
RK
5835 case RELOAD_FOR_OTHER_ADDRESS:
5836 where = &other_input_address_reload_insns;
5837 break;
5838 default:
5839 abort ();
32131a9c
RK
5840 }
5841
546b63fb 5842 push_to_sequence (*where);
32131a9c
RK
5843 special = 0;
5844
5845 /* Auto-increment addresses must be reloaded in a special way. */
5846 if (GET_CODE (oldequiv) == POST_INC
5847 || GET_CODE (oldequiv) == POST_DEC
5848 || GET_CODE (oldequiv) == PRE_INC
5849 || GET_CODE (oldequiv) == PRE_DEC)
5850 {
5851 /* We are not going to bother supporting the case where a
5852 incremented register can't be copied directly from
5853 OLDEQUIV since this seems highly unlikely. */
b80bba27 5854 if (reload_secondary_in_reload[j] >= 0)
32131a9c
RK
5855 abort ();
5856 /* Prevent normal processing of this reload. */
5857 special = 1;
5858 /* Output a special code sequence for this case. */
546b63fb 5859 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
5860 }
5861
5862 /* If we are reloading a pseudo-register that was set by the previous
5863 insn, see if we can get rid of that pseudo-register entirely
5864 by redirecting the previous insn into our reload register. */
5865
5866 else if (optimize && GET_CODE (old) == REG
5867 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5868 && dead_or_set_p (insn, old)
5869 /* This is unsafe if some other reload
5870 uses the same reg first. */
546b63fb
RK
5871 && reload_reg_free_before_p (REGNO (reloadreg),
5872 reload_opnum[j],
5873 reload_when_needed[j]))
32131a9c
RK
5874 {
5875 rtx temp = PREV_INSN (insn);
5876 while (temp && GET_CODE (temp) == NOTE)
5877 temp = PREV_INSN (temp);
5878 if (temp
5879 && GET_CODE (temp) == INSN
5880 && GET_CODE (PATTERN (temp)) == SET
5881 && SET_DEST (PATTERN (temp)) == old
5882 /* Make sure we can access insn_operand_constraint. */
5883 && asm_noperands (PATTERN (temp)) < 0
5884 /* This is unsafe if prev insn rejects our reload reg. */
5885 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5886 reloadreg)
5887 /* This is unsafe if operand occurs more than once in current
5888 insn. Perhaps some occurrences aren't reloaded. */
5889 && count_occurrences (PATTERN (insn), old) == 1
5890 /* Don't risk splitting a matching pair of operands. */
5891 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5892 {
5893 /* Store into the reload register instead of the pseudo. */
5894 SET_DEST (PATTERN (temp)) = reloadreg;
5895 /* If these are the only uses of the pseudo reg,
5896 pretend for GDB it lives in the reload reg we used. */
5897 if (reg_n_deaths[REGNO (old)] == 1
5898 && reg_n_sets[REGNO (old)] == 1)
5899 {
5900 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5901 alter_reg (REGNO (old), -1);
5902 }
5903 special = 1;
5904 }
5905 }
5906
546b63fb
RK
5907 /* We can't do that, so output an insn to load RELOADREG. */
5908
32131a9c
RK
5909 if (! special)
5910 {
5911#ifdef SECONDARY_INPUT_RELOAD_CLASS
5912 rtx second_reload_reg = 0;
5913 enum insn_code icode;
5914
5915 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
5916 and icode, if any. If OLDEQUIV and OLD are different or
5917 if this is an in-out reload, recompute whether or not we
5918 still need a secondary register and what the icode should
5919 be. If we still need a secondary register and the class or
5920 icode is different, go back to reloading from OLD if using
5921 OLDEQUIV means that we got the wrong type of register. We
5922 cannot have different class or icode due to an in-out reload
5923 because we don't make such reloads when both the input and
5924 output need secondary reload registers. */
32131a9c 5925
b80bba27 5926 if (reload_secondary_in_reload[j] >= 0)
32131a9c 5927 {
b80bba27 5928 int secondary_reload = reload_secondary_in_reload[j];
1554c2c6
RK
5929 rtx real_oldequiv = oldequiv;
5930 rtx real_old = old;
5931
5932 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5933 and similarly for OLD.
b80bba27 5934 See comments in get_secondary_reload in reload.c. */
1554c2c6
RK
5935 if (GET_CODE (oldequiv) == REG
5936 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5937 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5938 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5939
5940 if (GET_CODE (old) == REG
5941 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5942 && reg_equiv_mem[REGNO (old)] != 0)
5943 real_old = reg_equiv_mem[REGNO (old)];
5944
32131a9c 5945 second_reload_reg = reload_reg_rtx[secondary_reload];
b80bba27 5946 icode = reload_secondary_in_icode[j];
32131a9c 5947
d445b551
RK
5948 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5949 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
5950 {
5951 enum reg_class new_class
5952 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 5953 mode, real_oldequiv);
32131a9c
RK
5954
5955 if (new_class == NO_REGS)
5956 second_reload_reg = 0;
5957 else
5958 {
5959 enum insn_code new_icode;
5960 enum machine_mode new_mode;
5961
5962 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5963 REGNO (second_reload_reg)))
1554c2c6 5964 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5965 else
5966 {
5967 new_icode = reload_in_optab[(int) mode];
5968 if (new_icode != CODE_FOR_nothing
5969 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 5970 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 5971 (reloadreg, mode)))
a8fdc208
RS
5972 || (insn_operand_predicate[(int) new_icode][1]
5973 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 5974 (real_oldequiv, mode)))))
32131a9c
RK
5975 new_icode = CODE_FOR_nothing;
5976
5977 if (new_icode == CODE_FOR_nothing)
5978 new_mode = mode;
5979 else
196ddf8a 5980 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
5981
5982 if (GET_MODE (second_reload_reg) != new_mode)
5983 {
5984 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5985 new_mode))
1554c2c6 5986 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5987 else
5988 second_reload_reg
3aaa90c7
MM
5989 = gen_rtx (REG, new_mode,
5990 REGNO (second_reload_reg));
32131a9c
RK
5991 }
5992 }
5993 }
5994 }
5995
5996 /* If we still need a secondary reload register, check
5997 to see if it is being used as a scratch or intermediate
1554c2c6
RK
5998 register and generate code appropriately. If we need
5999 a scratch register, use REAL_OLDEQUIV since the form of
6000 the insn may depend on the actual address if it is
6001 a MEM. */
32131a9c
RK
6002
6003 if (second_reload_reg)
6004 {
6005 if (icode != CODE_FOR_nothing)
6006 {
cf19d2a9
JL
6007 rtx pat;
6008#ifdef SECONDARY_MEMORY_NEEDED
6009 /* If we need a memory location to do the move, do
6010 it that way. */
6011 if (GET_CODE (real_oldequiv) == REG
6012 && REGNO (real_oldequiv) < FIRST_PSEUDO_REGISTER
6013 && SECONDARY_MEMORY_NEEDED
6014 (REGNO_REG_CLASS (REGNO (real_oldequiv)),
6015 REGNO_REG_CLASS (REGNO (second_reload_reg)),
6016 GET_MODE (second_reload_reg)))
6017 {
6018 /* Get the memory to use and rewrite both
6019 registers to its mode. */
6020 rtx loc
6021 = get_secondary_mem (real_oldequiv,
6022 GET_MODE (second_reload_reg),
6023 reload_opnum[j],
6024 reload_when_needed[j]);
6025 rtx tmp_reloadreg;
6026
6027 if (GET_MODE (loc)
6028 != GET_MODE (second_reload_reg))
6029 second_reload_reg
6030 = gen_rtx (REG,
6031 GET_MODE (loc),
6032 REGNO (second_reload_reg));
6033
6034 if (GET_MODE (loc) != GET_MODE (real_oldequiv))
6035 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6036 REGNO (real_oldequiv));
6037 else
6038 tmp_reloadreg = real_oldequiv;
6039
6040 emit_move_insn (loc, tmp_reloadreg);
6041 emit_move_insn (second_reload_reg, loc);
6042 pat = gen_move_insn (reloadreg, second_reload_reg);
6043
6044 }
6045 else
6046#endif
6047 pat = GEN_FCN (icode) (reloadreg,
6048 real_oldequiv,
6049 second_reload_reg);
6050 emit_insn (pat);
32131a9c
RK
6051 special = 1;
6052 }
6053 else
6054 {
6055 /* See if we need a scratch register to load the
6056 intermediate register (a tertiary reload). */
6057 enum insn_code tertiary_icode
b80bba27 6058 = reload_secondary_in_icode[secondary_reload];
32131a9c
RK
6059
6060 if (tertiary_icode != CODE_FOR_nothing)
6061 {
6062 rtx third_reload_reg
b80bba27 6063 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
32131a9c 6064
546b63fb
RK
6065 emit_insn ((GEN_FCN (tertiary_icode)
6066 (second_reload_reg, real_oldequiv,
6067 third_reload_reg)));
32131a9c
RK
6068 }
6069 else
546b63fb
RK
6070 gen_input_reload (second_reload_reg, oldequiv,
6071 reload_opnum[j],
6072 reload_when_needed[j]);
6073
6074 oldequiv = second_reload_reg;
32131a9c
RK
6075 }
6076 }
6077 }
6078#endif
6079
2d182c6f 6080 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
546b63fb
RK
6081 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
6082 reload_when_needed[j]);
32131a9c
RK
6083
6084#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6085 /* We may have to make a REG_DEAD note for the secondary reload
6086 register in the insns we just made. Find the last insn that
6087 mentioned the register. */
6088 if (! special && second_reload_reg
6089 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6090 {
6091 rtx prev;
6092
546b63fb 6093 for (prev = get_last_insn (); prev;
32131a9c
RK
6094 prev = PREV_INSN (prev))
6095 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
6096 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6097 PATTERN (prev)))
32131a9c
RK
6098 {
6099 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6100 second_reload_reg,
6101 REG_NOTES (prev));
6102 break;
6103 }
6104 }
6105#endif
6106 }
6107
546b63fb
RK
6108 /* End this sequence. */
6109 *where = get_insns ();
6110 end_sequence ();
32131a9c
RK
6111 }
6112
6113 /* Add a note saying the input reload reg
6114 dies in this insn, if anyone cares. */
6115#ifdef PRESERVE_DEATH_INFO_REGNO_P
6116 if (old != 0
6117 && reload_reg_rtx[j] != old
6118 && reload_reg_rtx[j] != 0
6119 && reload_out[j] == 0
6120 && ! reload_inherited[j]
6121 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6122 {
6123 register rtx reloadreg = reload_reg_rtx[j];
6124
a8fdc208 6125#if 0
32131a9c
RK
6126 /* We can't abort here because we need to support this for sched.c.
6127 It's not terrible to miss a REG_DEAD note, but we should try
6128 to figure out how to do this correctly. */
6129 /* The code below is incorrect for address-only reloads. */
6130 if (reload_when_needed[j] != RELOAD_OTHER
6131 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6132 abort ();
6133#endif
6134
6135 /* Add a death note to this insn, for an input reload. */
6136
6137 if ((reload_when_needed[j] == RELOAD_OTHER
6138 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6139 && ! dead_or_set_p (insn, reloadreg))
6140 REG_NOTES (insn)
6141 = gen_rtx (EXPR_LIST, REG_DEAD,
6142 reloadreg, REG_NOTES (insn));
6143 }
6144
6145 /* When we inherit a reload, the last marked death of the reload reg
6146 may no longer really be a death. */
6147 if (reload_reg_rtx[j] != 0
6148 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6149 && reload_inherited[j])
6150 {
6151 /* Handle inheriting an output reload.
6152 Remove the death note from the output reload insn. */
6153 if (reload_spill_index[j] >= 0
6154 && GET_CODE (reload_in[j]) == REG
6155 && spill_reg_store[reload_spill_index[j]] != 0
6156 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6157 REG_DEAD, REGNO (reload_reg_rtx[j])))
6158 remove_death (REGNO (reload_reg_rtx[j]),
6159 spill_reg_store[reload_spill_index[j]]);
6160 /* Likewise for input reloads that were inherited. */
6161 else if (reload_spill_index[j] >= 0
6162 && GET_CODE (reload_in[j]) == REG
6163 && spill_reg_store[reload_spill_index[j]] == 0
6164 && reload_inheritance_insn[j] != 0
a8fdc208 6165 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
6166 REGNO (reload_reg_rtx[j])))
6167 remove_death (REGNO (reload_reg_rtx[j]),
6168 reload_inheritance_insn[j]);
6169 else
6170 {
6171 rtx prev;
6172
6173 /* We got this register from find_equiv_reg.
6174 Search back for its last death note and get rid of it.
6175 But don't search back too far.
6176 Don't go past a place where this reg is set,
6177 since a death note before that remains valid. */
6178 for (prev = PREV_INSN (insn);
6179 prev && GET_CODE (prev) != CODE_LABEL;
6180 prev = PREV_INSN (prev))
6181 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6182 && dead_or_set_p (prev, reload_reg_rtx[j]))
6183 {
6184 if (find_regno_note (prev, REG_DEAD,
6185 REGNO (reload_reg_rtx[j])))
6186 remove_death (REGNO (reload_reg_rtx[j]), prev);
6187 break;
6188 }
6189 }
6190 }
6191
6192 /* We might have used find_equiv_reg above to choose an alternate
6193 place from which to reload. If so, and it died, we need to remove
6194 that death and move it to one of the insns we just made. */
6195
6196 if (oldequiv_reg != 0
6197 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6198 {
6199 rtx prev, prev1;
6200
6201 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6202 prev = PREV_INSN (prev))
6203 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6204 && dead_or_set_p (prev, oldequiv_reg))
6205 {
6206 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6207 {
6208 for (prev1 = this_reload_insn;
6209 prev1; prev1 = PREV_INSN (prev1))
6210 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
6211 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6212 PATTERN (prev1)))
32131a9c
RK
6213 {
6214 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6215 oldequiv_reg,
6216 REG_NOTES (prev1));
6217 break;
6218 }
6219 remove_death (REGNO (oldequiv_reg), prev);
6220 }
6221 break;
6222 }
6223 }
6224#endif
6225
6226 /* If we are reloading a register that was recently stored in with an
6227 output-reload, see if we can prove there was
6228 actually no need to store the old value in it. */
6229
6230 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 6231 && reload_in[j] != 0
32131a9c
RK
6232 && GET_CODE (reload_in[j]) == REG
6233#if 0
6234 /* There doesn't seem to be any reason to restrict this to pseudos
6235 and doing so loses in the case where we are copying from a
6236 register of the wrong class. */
6237 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6238#endif
6239 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb
RK
6240 /* This is unsafe if some other reload uses the same reg first. */
6241 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6242 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
6243 && dead_or_set_p (insn, reload_in[j])
6244 /* This is unsafe if operand occurs more than once in current
6245 insn. Perhaps some occurrences weren't reloaded. */
6246 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6247 delete_output_reload (insn, j,
6248 spill_reg_store[reload_spill_index[j]]);
6249
6250 /* Input-reloading is done. Now do output-reloading,
6251 storing the value from the reload-register after the main insn
6252 if reload_out[j] is nonzero.
6253
6254 ??? At some point we need to support handling output reloads of
6255 JUMP_INSNs or insns that set cc0. */
6256 old = reload_out[j];
6257 if (old != 0
6258 && reload_reg_rtx[j] != old
6259 && reload_reg_rtx[j] != 0)
6260 {
6261 register rtx reloadreg = reload_reg_rtx[j];
6262 register rtx second_reloadreg = 0;
32131a9c
RK
6263 rtx note, p;
6264 enum machine_mode mode;
6265 int special = 0;
6266
6267 /* An output operand that dies right away does need a reload,
6268 but need not be copied from it. Show the new location in the
6269 REG_UNUSED note. */
6270 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6271 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6272 {
6273 XEXP (note, 0) = reload_reg_rtx[j];
6274 continue;
6275 }
6276 else if (GET_CODE (old) == SCRATCH)
6277 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6278 but we don't want to make an output reload. */
6279 continue;
6280
6281#if 0
6282 /* Strip off of OLD any size-increasing SUBREGs such as
6283 (SUBREG:SI foo:QI 0). */
6284
6285 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6286 && (GET_MODE_SIZE (GET_MODE (old))
6287 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6288 old = SUBREG_REG (old);
6289#endif
6290
6291 /* If is a JUMP_INSN, we can't support output reloads yet. */
6292 if (GET_CODE (insn) == JUMP_INSN)
6293 abort ();
6294
546b63fb
RK
6295 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6296
32131a9c
RK
6297 /* Determine the mode to reload in.
6298 See comments above (for input reloading). */
6299
6300 mode = GET_MODE (old);
6301 if (mode == VOIDmode)
79a365a7
RS
6302 {
6303 /* VOIDmode should never happen for an output. */
6304 if (asm_noperands (PATTERN (insn)) < 0)
6305 /* It's the compiler's fault. */
a89b2cc4 6306 fatal_insn ("VOIDmode on an output", insn);
79a365a7
RS
6307 error_for_asm (insn, "output operand is constant in `asm'");
6308 /* Prevent crash--use something we know is valid. */
6309 mode = word_mode;
6310 old = gen_rtx (REG, mode, REGNO (reloadreg));
6311 }
32131a9c 6312
32131a9c 6313 if (GET_MODE (reloadreg) != mode)
3abe6f90 6314 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
6315
6316#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6317
6318 /* If we need two reload regs, set RELOADREG to the intermediate
6319 one, since it will be stored into OUT. We might need a secondary
6320 register only for an input reload, so check again here. */
6321
b80bba27 6322 if (reload_secondary_out_reload[j] >= 0)
32131a9c 6323 {
1554c2c6 6324 rtx real_old = old;
32131a9c 6325
1554c2c6
RK
6326 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6327 && reg_equiv_mem[REGNO (old)] != 0)
6328 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6329
1554c2c6
RK
6330 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6331 mode, real_old)
6332 != NO_REGS))
6333 {
6334 second_reloadreg = reloadreg;
b80bba27 6335 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
32131a9c 6336
1554c2c6
RK
6337 /* See if RELOADREG is to be used as a scratch register
6338 or as an intermediate register. */
b80bba27 6339 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
32131a9c 6340 {
b80bba27 6341 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
546b63fb 6342 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6343 special = 1;
32131a9c
RK
6344 }
6345 else
1554c2c6
RK
6346 {
6347 /* See if we need both a scratch and intermediate reload
6348 register. */
b80bba27 6349 int secondary_reload = reload_secondary_out_reload[j];
1554c2c6 6350 enum insn_code tertiary_icode
b80bba27 6351 = reload_secondary_out_icode[secondary_reload];
1554c2c6 6352 rtx pat;
32131a9c 6353
1554c2c6
RK
6354 if (GET_MODE (reloadreg) != mode)
6355 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6356
6357 if (tertiary_icode != CODE_FOR_nothing)
6358 {
6359 rtx third_reloadreg
b80bba27 6360 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
1554c2c6
RK
6361 pat = (GEN_FCN (tertiary_icode)
6362 (reloadreg, second_reloadreg, third_reloadreg));
6363 }
9ad5f9f6
JW
6364#ifdef SECONDARY_MEMORY_NEEDED
6365 /* If we need a memory location to do the move, do it that way. */
6366 else if (GET_CODE (reloadreg) == REG
6367 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6368 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6369 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6370 GET_MODE (second_reloadreg)))
6371 {
6372 /* Get the memory to use and rewrite both registers
6373 to its mode. */
546b63fb
RK
6374 rtx loc
6375 = get_secondary_mem (reloadreg,
6376 GET_MODE (second_reloadreg),
6377 reload_opnum[j],
6378 reload_when_needed[j]);
9ad5f9f6
JW
6379 rtx tmp_reloadreg;
6380
6381 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6382 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6383 REGNO (second_reloadreg));
6384
6385 if (GET_MODE (loc) != GET_MODE (reloadreg))
6386 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6387 REGNO (reloadreg));
6388 else
6389 tmp_reloadreg = reloadreg;
6390
546b63fb 6391 emit_move_insn (loc, second_reloadreg);
9ad5f9f6
JW
6392 pat = gen_move_insn (tmp_reloadreg, loc);
6393 }
6394#endif
1554c2c6
RK
6395 else
6396 pat = gen_move_insn (reloadreg, second_reloadreg);
6397
546b63fb 6398 emit_insn (pat);
1554c2c6 6399 }
32131a9c
RK
6400 }
6401 }
6402#endif
6403
6404 /* Output the last reload insn. */
6405 if (! special)
0dadecf6
RK
6406 {
6407#ifdef SECONDARY_MEMORY_NEEDED
6408 /* If we need a memory location to do the move, do it that way. */
6409 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6410 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6411 REGNO_REG_CLASS (REGNO (reloadreg)),
6412 GET_MODE (reloadreg)))
6413 {
6414 /* Get the memory to use and rewrite both registers to
6415 its mode. */
546b63fb
RK
6416 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6417 reload_opnum[j],
6418 reload_when_needed[j]);
0dadecf6
RK
6419
6420 if (GET_MODE (loc) != GET_MODE (reloadreg))
6421 reloadreg = gen_rtx (REG, GET_MODE (loc),
6422 REGNO (reloadreg));
6423
6424 if (GET_MODE (loc) != GET_MODE (old))
6425 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6426
546b63fb
RK
6427 emit_insn (gen_move_insn (loc, reloadreg));
6428 emit_insn (gen_move_insn (old, loc));
0dadecf6
RK
6429 }
6430 else
6431#endif
546b63fb 6432 emit_insn (gen_move_insn (old, reloadreg));
0dadecf6 6433 }
32131a9c
RK
6434
6435#ifdef PRESERVE_DEATH_INFO_REGNO_P
6436 /* If final will look at death notes for this reg,
6437 put one on the last output-reload insn to use it. Similarly
6438 for any secondary register. */
6439 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 6440 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6441 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6442 && reg_overlap_mentioned_for_reload_p (reloadreg,
6443 PATTERN (p)))
32131a9c
RK
6444 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6445 reloadreg, REG_NOTES (p));
6446
6447#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6448 if (! special
6449 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 6450 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6451 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6452 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6453 PATTERN (p)))
32131a9c
RK
6454 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6455 second_reloadreg, REG_NOTES (p));
6456#endif
6457#endif
6458 /* Look at all insns we emitted, just to be safe. */
546b63fb 6459 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
6460 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6461 {
6462 /* If this output reload doesn't come from a spill reg,
6463 clear any memory of reloaded copies of the pseudo reg.
6464 If this output reload comes from a spill reg,
6465 reg_has_output_reload will make this do nothing. */
6466 note_stores (PATTERN (p), forget_old_reloads_1);
6467
73b2ad9e
RK
6468 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6469 && reload_spill_index[j] >= 0)
6470 new_spill_reg_store[reload_spill_index[j]] = p;
32131a9c
RK
6471 }
6472
546b63fb
RK
6473 output_reload_insns[reload_opnum[j]] = get_insns ();
6474 end_sequence ();
32131a9c 6475 }
32131a9c
RK
6476 }
6477
546b63fb
RK
6478 /* Now write all the insns we made for reloads in the order expected by
6479 the allocation functions. Prior to the insn being reloaded, we write
6480 the following reloads:
6481
6482 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6483
6484 RELOAD_OTHER reloads.
6485
6486 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6487 the RELOAD_FOR_INPUT reload for the operand.
6488
893bc853
RK
6489 RELOAD_FOR_OPADDR_ADDRS reloads.
6490
546b63fb
RK
6491 RELOAD_FOR_OPERAND_ADDRESS reloads.
6492
6493 After the insn being reloaded, we write the following:
6494
6495 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6496 the RELOAD_FOR_OUTPUT reload for that operand. */
6497
6498 emit_insns_before (other_input_address_reload_insns, before_insn);
6499 emit_insns_before (other_input_reload_insns, before_insn);
6500
6501 for (j = 0; j < reload_n_operands; j++)
6502 {
6503 emit_insns_before (input_address_reload_insns[j], before_insn);
6504 emit_insns_before (input_reload_insns[j], before_insn);
6505 }
6506
893bc853 6507 emit_insns_before (other_operand_reload_insns, before_insn);
546b63fb
RK
6508 emit_insns_before (operand_reload_insns, before_insn);
6509
6510 for (j = 0; j < reload_n_operands; j++)
6511 {
6512 emit_insns_before (output_address_reload_insns[j], following_insn);
6513 emit_insns_before (output_reload_insns[j], following_insn);
6514 }
6515
32131a9c
RK
6516 /* Move death notes from INSN
6517 to output-operand-address and output reload insns. */
6518#ifdef PRESERVE_DEATH_INFO_REGNO_P
6519 {
6520 rtx insn1;
6521 /* Loop over those insns, last ones first. */
6522 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6523 insn1 = PREV_INSN (insn1))
6524 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6525 {
6526 rtx source = SET_SRC (PATTERN (insn1));
6527 rtx dest = SET_DEST (PATTERN (insn1));
6528
6529 /* The note we will examine next. */
6530 rtx reg_notes = REG_NOTES (insn);
6531 /* The place that pointed to this note. */
6532 rtx *prev_reg_note = &REG_NOTES (insn);
6533
6534 /* If the note is for something used in the source of this
6535 reload insn, or in the output address, move the note. */
6536 while (reg_notes)
6537 {
6538 rtx next_reg_notes = XEXP (reg_notes, 1);
6539 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6540 && GET_CODE (XEXP (reg_notes, 0)) == REG
6541 && ((GET_CODE (dest) != REG
bfa30b22
RK
6542 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6543 dest))
6544 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6545 source)))
32131a9c
RK
6546 {
6547 *prev_reg_note = next_reg_notes;
6548 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6549 REG_NOTES (insn1) = reg_notes;
6550 }
6551 else
6552 prev_reg_note = &XEXP (reg_notes, 1);
6553
6554 reg_notes = next_reg_notes;
6555 }
6556 }
6557 }
6558#endif
6559
6560 /* For all the spill regs newly reloaded in this instruction,
6561 record what they were reloaded from, so subsequent instructions
d445b551
RK
6562 can inherit the reloads.
6563
6564 Update spill_reg_store for the reloads of this insn.
e9e79d69 6565 Copy the elements that were updated in the loop above. */
32131a9c
RK
6566
6567 for (j = 0; j < n_reloads; j++)
6568 {
6569 register int r = reload_order[j];
6570 register int i = reload_spill_index[r];
6571
6572 /* I is nonneg if this reload used one of the spill regs.
6573 If reload_reg_rtx[r] is 0, this is an optional reload
546b63fb
RK
6574 that we opted to ignore.
6575
6576 Also ignore reloads that don't reach the end of the insn,
6577 since we will eventually see the one that does. */
d445b551 6578
546b63fb
RK
6579 if (i >= 0 && reload_reg_rtx[r] != 0
6580 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6581 reload_when_needed[r]))
32131a9c
RK
6582 {
6583 /* First, clear out memory of what used to be in this spill reg.
6584 If consecutive registers are used, clear them all. */
6585 int nr
6586 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6587 int k;
6588
6589 for (k = 0; k < nr; k++)
6590 {
6591 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6592 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6593 }
6594
6595 /* Maybe the spill reg contains a copy of reload_out. */
6596 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6597 {
6598 register int nregno = REGNO (reload_out[r]);
d08ea79f
RK
6599 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6600 : HARD_REGNO_NREGS (nregno,
6601 GET_MODE (reload_reg_rtx[r])));
d445b551
RK
6602
6603 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 6604 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6605
d08ea79f
RK
6606 /* If NREGNO is a hard register, it may occupy more than
6607 one register. If it does, say what is in the
6608 rest of the registers assuming that both registers
6609 agree on how many words the object takes. If not,
6610 invalidate the subsequent registers. */
6611
6612 if (nregno < FIRST_PSEUDO_REGISTER)
6613 for (k = 1; k < nnr; k++)
6614 reg_last_reload_reg[nregno + k]
74eb5c52
DE
6615 = (nr == nnr ? gen_rtx (REG,
6616 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
d08ea79f
RK
6617 REGNO (reload_reg_rtx[r]) + k)
6618 : 0);
6619
6620 /* Now do the inverse operation. */
32131a9c
RK
6621 for (k = 0; k < nr; k++)
6622 {
6623 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6624 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6625 : nregno + k);
32131a9c
RK
6626 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6627 }
6628 }
d445b551 6629
2c9ce2ef
RK
6630 /* Maybe the spill reg contains a copy of reload_in. Only do
6631 something if there will not be an output reload for
6632 the register being reloaded. */
32131a9c
RK
6633 else if (reload_out[r] == 0
6634 && reload_in[r] != 0
2c9ce2ef
RK
6635 && ((GET_CODE (reload_in[r]) == REG
6636 && ! reg_has_output_reload[REGNO (reload_in[r])]
6637 || (GET_CODE (reload_in_reg[r]) == REG
6638 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
32131a9c
RK
6639 {
6640 register int nregno;
d08ea79f
RK
6641 int nnr;
6642
32131a9c
RK
6643 if (GET_CODE (reload_in[r]) == REG)
6644 nregno = REGNO (reload_in[r]);
6645 else
6646 nregno = REGNO (reload_in_reg[r]);
6647
d08ea79f
RK
6648 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6649 : HARD_REGNO_NREGS (nregno,
6650 GET_MODE (reload_reg_rtx[r])));
6651
546b63fb 6652 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6653
d08ea79f
RK
6654 if (nregno < FIRST_PSEUDO_REGISTER)
6655 for (k = 1; k < nnr; k++)
6656 reg_last_reload_reg[nregno + k]
74eb5c52
DE
6657 = (nr == nnr ? gen_rtx (REG,
6658 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
d08ea79f
RK
6659 REGNO (reload_reg_rtx[r]) + k)
6660 : 0);
6661
546b63fb
RK
6662 /* Unless we inherited this reload, show we haven't
6663 recently done a store. */
6664 if (! reload_inherited[r])
6665 spill_reg_store[i] = 0;
d445b551 6666
546b63fb
RK
6667 for (k = 0; k < nr; k++)
6668 {
6669 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6670 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6671 : nregno + k);
546b63fb
RK
6672 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6673 = insn;
32131a9c
RK
6674 }
6675 }
6676 }
6677
6678 /* The following if-statement was #if 0'd in 1.34 (or before...).
6679 It's reenabled in 1.35 because supposedly nothing else
6680 deals with this problem. */
6681
6682 /* If a register gets output-reloaded from a non-spill register,
6683 that invalidates any previous reloaded copy of it.
6684 But forget_old_reloads_1 won't get to see it, because
6685 it thinks only about the original insn. So invalidate it here. */
6686 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6687 {
6688 register int nregno = REGNO (reload_out[r]);
36281332
RK
6689 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6690
6691 while (num_regs-- > 0)
6692 reg_last_reload_reg[nregno + num_regs] = 0;
32131a9c
RK
6693 }
6694 }
6695}
6696\f
546b63fb
RK
6697/* Emit code to perform an input reload of IN to RELOADREG. IN is from
6698 operand OPNUM with reload type TYPE.
6699
3c3eeea6 6700 Returns first insn emitted. */
32131a9c
RK
6701
6702rtx
546b63fb 6703gen_input_reload (reloadreg, in, opnum, type)
32131a9c
RK
6704 rtx reloadreg;
6705 rtx in;
546b63fb
RK
6706 int opnum;
6707 enum reload_type type;
32131a9c 6708{
546b63fb 6709 rtx last = get_last_insn ();
32131a9c 6710
a8fdc208 6711 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
6712 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6713 register that didn't get a hard register. In that case we can just
6714 call emit_move_insn.
6715
a7fd196c
JW
6716 We can also be asked to reload a PLUS that adds a register or a MEM to
6717 another register, constant or MEM. This can occur during frame pointer
6718 elimination and while reloading addresses. This case is handled by
6719 trying to emit a single insn to perform the add. If it is not valid,
6720 we use a two insn sequence.
32131a9c
RK
6721
6722 Finally, we could be called to handle an 'o' constraint by putting
6723 an address into a register. In that case, we first try to do this
6724 with a named pattern of "reload_load_address". If no such pattern
6725 exists, we just emit a SET insn and hope for the best (it will normally
6726 be valid on machines that use 'o').
6727
6728 This entire process is made complex because reload will never
6729 process the insns we generate here and so we must ensure that
6730 they will fit their constraints and also by the fact that parts of
6731 IN might be being reloaded separately and replaced with spill registers.
6732 Because of this, we are, in some sense, just guessing the right approach
6733 here. The one listed above seems to work.
6734
6735 ??? At some point, this whole thing needs to be rethought. */
6736
6737 if (GET_CODE (in) == PLUS
a7fd196c
JW
6738 && (GET_CODE (XEXP (in, 0)) == REG
6739 || GET_CODE (XEXP (in, 0)) == MEM)
6740 && (GET_CODE (XEXP (in, 1)) == REG
6741 || CONSTANT_P (XEXP (in, 1))
6742 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 6743 {
a7fd196c
JW
6744 /* We need to compute the sum of a register or a MEM and another
6745 register, constant, or MEM, and put it into the reload
3002e160
JW
6746 register. The best possible way of doing this is if the machine
6747 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
6748
6749 The simplest approach is to try to generate such an insn and see if it
6750 is recognized and matches its constraints. If so, it can be used.
6751
6752 It might be better not to actually emit the insn unless it is valid,
0009eff2 6753 but we need to pass the insn as an operand to `recog' and
b36d7dd7 6754 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 6755 not valid than to dummy things up. */
a8fdc208 6756
af929c62 6757 rtx op0, op1, tem, insn;
32131a9c 6758 int code;
a8fdc208 6759
af929c62
RK
6760 op0 = find_replacement (&XEXP (in, 0));
6761 op1 = find_replacement (&XEXP (in, 1));
6762
32131a9c
RK
6763 /* Since constraint checking is strict, commutativity won't be
6764 checked, so we need to do that here to avoid spurious failure
6765 if the add instruction is two-address and the second operand
6766 of the add is the same as the reload reg, which is frequently
6767 the case. If the insn would be A = B + A, rearrange it so
6768 it will be A = A + B as constrain_operands expects. */
a8fdc208 6769
32131a9c
RK
6770 if (GET_CODE (XEXP (in, 1)) == REG
6771 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
6772 tem = op0, op0 = op1, op1 = tem;
6773
6774 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6775 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c 6776
546b63fb 6777 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6778 code = recog_memoized (insn);
6779
6780 if (code >= 0)
6781 {
6782 insn_extract (insn);
6783 /* We want constrain operands to treat this insn strictly in
6784 its validity determination, i.e., the way it would after reload
6785 has completed. */
6786 if (constrain_operands (code, 1))
6787 return insn;
6788 }
6789
546b63fb 6790 delete_insns_since (last);
32131a9c
RK
6791
6792 /* If that failed, we must use a conservative two-insn sequence.
6793 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
6794 register since "move" will be able to handle an arbitrary operand,
6795 unlike add which can't, in general. Then add the registers.
32131a9c
RK
6796
6797 If there is another way to do this for a specific machine, a
6798 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6799 we emit below. */
6800
af929c62
RK
6801 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6802 || (GET_CODE (op1) == REG
6803 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6804 tem = op0, op0 = op1, op1 = tem;
32131a9c 6805
546b63fb 6806 emit_insn (gen_move_insn (reloadreg, op0));
39b56c2a
RK
6807
6808 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6809 This fixes a problem on the 32K where the stack pointer cannot
6810 be used as an operand of an add insn. */
6811
6812 if (rtx_equal_p (op0, op1))
6813 op1 = reloadreg;
6814
c77c9766
RK
6815 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6816
6817 /* If that failed, copy the address register to the reload register.
6818 Then add the constant to the reload register. */
6819
6820 code = recog_memoized (insn);
6821
6822 if (code >= 0)
6823 {
6824 insn_extract (insn);
6825 /* We want constrain operands to treat this insn strictly in
6826 its validity determination, i.e., the way it would after reload
6827 has completed. */
6828 if (constrain_operands (code, 1))
6829 return insn;
6830 }
6831
6832 delete_insns_since (last);
6833
6834 emit_insn (gen_move_insn (reloadreg, op1));
6835 emit_insn (gen_add2_insn (reloadreg, op0));
32131a9c
RK
6836 }
6837
0dadecf6
RK
6838#ifdef SECONDARY_MEMORY_NEEDED
6839 /* If we need a memory location to do the move, do it that way. */
6840 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6841 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6842 REGNO_REG_CLASS (REGNO (reloadreg)),
6843 GET_MODE (reloadreg)))
6844 {
6845 /* Get the memory to use and rewrite both registers to its mode. */
546b63fb 6846 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
0dadecf6
RK
6847
6848 if (GET_MODE (loc) != GET_MODE (reloadreg))
6849 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6850
6851 if (GET_MODE (loc) != GET_MODE (in))
6852 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6853
546b63fb
RK
6854 emit_insn (gen_move_insn (loc, in));
6855 emit_insn (gen_move_insn (reloadreg, loc));
0dadecf6
RK
6856 }
6857#endif
6858
32131a9c
RK
6859 /* If IN is a simple operand, use gen_move_insn. */
6860 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
546b63fb 6861 emit_insn (gen_move_insn (reloadreg, in));
32131a9c
RK
6862
6863#ifdef HAVE_reload_load_address
6864 else if (HAVE_reload_load_address)
546b63fb 6865 emit_insn (gen_reload_load_address (reloadreg, in));
32131a9c
RK
6866#endif
6867
6868 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6869 else
546b63fb 6870 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6871
6872 /* Return the first insn emitted.
546b63fb 6873 We can not just return get_last_insn, because there may have
32131a9c
RK
6874 been multiple instructions emitted. Also note that gen_move_insn may
6875 emit more than one insn itself, so we can not assume that there is one
6876 insn emitted per emit_insn_before call. */
6877
546b63fb 6878 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
6879}
6880\f
6881/* Delete a previously made output-reload
6882 whose result we now believe is not needed.
6883 First we double-check.
6884
6885 INSN is the insn now being processed.
6886 OUTPUT_RELOAD_INSN is the insn of the output reload.
6887 J is the reload-number for this insn. */
6888
6889static void
6890delete_output_reload (insn, j, output_reload_insn)
6891 rtx insn;
6892 int j;
6893 rtx output_reload_insn;
6894{
6895 register rtx i1;
6896
6897 /* Get the raw pseudo-register referred to. */
6898
6899 rtx reg = reload_in[j];
6900 while (GET_CODE (reg) == SUBREG)
6901 reg = SUBREG_REG (reg);
6902
6903 /* If the pseudo-reg we are reloading is no longer referenced
6904 anywhere between the store into it and here,
6905 and no jumps or labels intervene, then the value can get
6906 here through the reload reg alone.
6907 Otherwise, give up--return. */
6908 for (i1 = NEXT_INSN (output_reload_insn);
6909 i1 != insn; i1 = NEXT_INSN (i1))
6910 {
6911 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6912 return;
6913 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6914 && reg_mentioned_p (reg, PATTERN (i1)))
6915 return;
6916 }
6917
208dffa5
RS
6918 if (cannot_omit_stores[REGNO (reg)])
6919 return;
6920
32131a9c
RK
6921 /* If this insn will store in the pseudo again,
6922 the previous store can be removed. */
6923 if (reload_out[j] == reload_in[j])
6924 delete_insn (output_reload_insn);
6925
6926 /* See if the pseudo reg has been completely replaced
6927 with reload regs. If so, delete the store insn
6928 and forget we had a stack slot for the pseudo. */
6929 else if (reg_n_deaths[REGNO (reg)] == 1
6930 && reg_basic_block[REGNO (reg)] >= 0
6931 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6932 {
6933 rtx i2;
6934
6935 /* We know that it was used only between here
6936 and the beginning of the current basic block.
6937 (We also know that the last use before INSN was
6938 the output reload we are thinking of deleting, but never mind that.)
6939 Search that range; see if any ref remains. */
6940 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6941 {
d445b551
RK
6942 rtx set = single_set (i2);
6943
32131a9c
RK
6944 /* Uses which just store in the pseudo don't count,
6945 since if they are the only uses, they are dead. */
d445b551 6946 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6947 continue;
6948 if (GET_CODE (i2) == CODE_LABEL
6949 || GET_CODE (i2) == JUMP_INSN)
6950 break;
6951 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6952 && reg_mentioned_p (reg, PATTERN (i2)))
6953 /* Some other ref remains;
6954 we can't do anything. */
6955 return;
6956 }
6957
6958 /* Delete the now-dead stores into this pseudo. */
6959 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6960 {
d445b551
RK
6961 rtx set = single_set (i2);
6962
6963 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6964 delete_insn (i2);
6965 if (GET_CODE (i2) == CODE_LABEL
6966 || GET_CODE (i2) == JUMP_INSN)
6967 break;
6968 }
6969
6970 /* For the debugging info,
6971 say the pseudo lives in this reload reg. */
6972 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6973 alter_reg (REGNO (reg), -1);
6974 }
6975}
32131a9c 6976\f
a8fdc208 6977/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 6978 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
6979 is a register or memory location;
6980 so reloading involves incrementing that location.
6981
6982 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 6983 This cannot be deduced from VALUE. */
32131a9c 6984
546b63fb
RK
6985static void
6986inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
6987 rtx reloadreg;
6988 rtx value;
6989 int inc_amount;
32131a9c
RK
6990{
6991 /* REG or MEM to be copied and incremented. */
6992 rtx incloc = XEXP (value, 0);
6993 /* Nonzero if increment after copying. */
6994 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 6995 rtx last;
0009eff2
RK
6996 rtx inc;
6997 rtx add_insn;
6998 int code;
32131a9c
RK
6999
7000 /* No hard register is equivalent to this register after
7001 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7002 we could inc/dec that register as well (maybe even using it for
7003 the source), but I'm not sure it's worth worrying about. */
7004 if (GET_CODE (incloc) == REG)
7005 reg_last_reload_reg[REGNO (incloc)] = 0;
7006
7007 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7008 inc_amount = - inc_amount;
7009
fb3821f7 7010 inc = GEN_INT (inc_amount);
0009eff2
RK
7011
7012 /* If this is post-increment, first copy the location to the reload reg. */
7013 if (post)
546b63fb 7014 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
7015
7016 /* See if we can directly increment INCLOC. Use a method similar to that
7017 in gen_input_reload. */
7018
546b63fb
RK
7019 last = get_last_insn ();
7020 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7021 gen_rtx (PLUS, GET_MODE (incloc),
7022 incloc, inc)));
0009eff2
RK
7023
7024 code = recog_memoized (add_insn);
7025 if (code >= 0)
32131a9c 7026 {
0009eff2
RK
7027 insn_extract (add_insn);
7028 if (constrain_operands (code, 1))
32131a9c 7029 {
0009eff2
RK
7030 /* If this is a pre-increment and we have incremented the value
7031 where it lives, copy the incremented value to RELOADREG to
7032 be used as an address. */
7033
7034 if (! post)
546b63fb
RK
7035 emit_insn (gen_move_insn (reloadreg, incloc));
7036
7037 return;
32131a9c
RK
7038 }
7039 }
0009eff2 7040
546b63fb 7041 delete_insns_since (last);
0009eff2
RK
7042
7043 /* If couldn't do the increment directly, must increment in RELOADREG.
7044 The way we do this depends on whether this is pre- or post-increment.
7045 For pre-increment, copy INCLOC to the reload register, increment it
7046 there, then save back. */
7047
7048 if (! post)
7049 {
546b63fb
RK
7050 emit_insn (gen_move_insn (reloadreg, incloc));
7051 emit_insn (gen_add2_insn (reloadreg, inc));
7052 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 7053 }
32131a9c
RK
7054 else
7055 {
0009eff2
RK
7056 /* Postincrement.
7057 Because this might be a jump insn or a compare, and because RELOADREG
7058 may not be available after the insn in an input reload, we must do
7059 the incrementation before the insn being reloaded for.
7060
7061 We have already copied INCLOC to RELOADREG. Increment the copy in
7062 RELOADREG, save that back, then decrement RELOADREG so it has
7063 the original value. */
7064
546b63fb
RK
7065 emit_insn (gen_add2_insn (reloadreg, inc));
7066 emit_insn (gen_move_insn (incloc, reloadreg));
7067 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 7068 }
0009eff2 7069
546b63fb 7070 return;
32131a9c
RK
7071}
7072\f
7073/* Return 1 if we are certain that the constraint-string STRING allows
7074 the hard register REG. Return 0 if we can't be sure of this. */
7075
7076static int
7077constraint_accepts_reg_p (string, reg)
7078 char *string;
7079 rtx reg;
7080{
7081 int value = 0;
7082 int regno = true_regnum (reg);
7083 int c;
7084
7085 /* Initialize for first alternative. */
7086 value = 0;
7087 /* Check that each alternative contains `g' or `r'. */
7088 while (1)
7089 switch (c = *string++)
7090 {
7091 case 0:
7092 /* If an alternative lacks `g' or `r', we lose. */
7093 return value;
7094 case ',':
7095 /* If an alternative lacks `g' or `r', we lose. */
7096 if (value == 0)
7097 return 0;
7098 /* Initialize for next alternative. */
7099 value = 0;
7100 break;
7101 case 'g':
7102 case 'r':
7103 /* Any general reg wins for this alternative. */
7104 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7105 value = 1;
7106 break;
7107 default:
7108 /* Any reg in specified class wins for this alternative. */
7109 {
0009eff2 7110 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 7111
0009eff2 7112 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
7113 value = 1;
7114 }
7115 }
7116}
7117\f
d445b551
RK
7118/* Return the number of places FIND appears within X, but don't count
7119 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
7120
7121static int
7122count_occurrences (x, find)
7123 register rtx x, find;
7124{
7125 register int i, j;
7126 register enum rtx_code code;
7127 register char *format_ptr;
7128 int count;
7129
7130 if (x == find)
7131 return 1;
7132 if (x == 0)
7133 return 0;
7134
7135 code = GET_CODE (x);
7136
7137 switch (code)
7138 {
7139 case REG:
7140 case QUEUED:
7141 case CONST_INT:
7142 case CONST_DOUBLE:
7143 case SYMBOL_REF:
7144 case CODE_LABEL:
7145 case PC:
7146 case CC0:
7147 return 0;
d445b551
RK
7148
7149 case SET:
7150 if (SET_DEST (x) == find)
7151 return count_occurrences (SET_SRC (x), find);
7152 break;
32131a9c
RK
7153 }
7154
7155 format_ptr = GET_RTX_FORMAT (code);
7156 count = 0;
7157
7158 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7159 {
7160 switch (*format_ptr++)
7161 {
7162 case 'e':
7163 count += count_occurrences (XEXP (x, i), find);
7164 break;
7165
7166 case 'E':
7167 if (XVEC (x, i) != NULL)
7168 {
7169 for (j = 0; j < XVECLEN (x, i); j++)
7170 count += count_occurrences (XVECEXP (x, i, j), find);
7171 }
7172 break;
7173 }
7174 }
7175 return count;
7176}
This page took 1.17196 seconds and 5 git commands to generate.