]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(reload, reload_as_needed): Check for use of return register with
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
8c15858f 2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
546b63fb
RK
69
70
71#ifndef REGISTER_MOVE_COST
72#define REGISTER_MOVE_COST(x, y) 2
73#endif
74
75#ifndef MEMORY_MOVE_COST
76#define MEMORY_MOVE_COST(x) 4
77#endif
32131a9c
RK
78\f
79/* During reload_as_needed, element N contains a REG rtx for the hard reg
d08ea79f 80 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
81static rtx *reg_last_reload_reg;
82
83/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85static char *reg_has_output_reload;
86
87/* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89static HARD_REG_SET reg_is_output_reload;
90
91/* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95rtx *reg_equiv_constant;
96
97/* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 101rtx *reg_equiv_memory_loc;
32131a9c
RK
102
103/* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106rtx *reg_equiv_address;
107
108/* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110rtx *reg_equiv_mem;
111
112/* Widest width in which each pseudo reg is referred to (via subreg). */
113static int *reg_max_ref_width;
114
115/* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117static rtx *reg_equiv_init;
118
119/* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126/* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132/* Number of spill-regs so far; number of valid elements of spill_regs. */
133static int n_spills;
134
135/* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141/* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146/* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152/* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155HARD_REG_SET forbidden_regs;
156
157/* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
546b63fb
RK
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
32131a9c
RK
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164static HARD_REG_SET bad_spill_regs;
165
166/* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171/* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178/* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183/* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185static HARD_REG_SET counted_for_groups;
186
187/* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191static HARD_REG_SET counted_for_nongroups;
192
208dffa5
RS
193/* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197static char *cannot_omit_stores;
198
32131a9c
RK
199/* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206static char spill_indirect_levels;
207
208/* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212char indirect_symref_ok;
213
214/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216char double_reg_address_ok;
217
218/* Record the stack slot for each spilled hard register. */
219
220static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222/* Width allocated so far for that stack slot. */
223
224static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226/* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231char *basic_block_needs[N_REG_CLASSES];
232
233/* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235int reload_first_uid;
236
237/* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240int caller_save_needed;
241
242/* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245int reload_in_progress = 0;
246
247/* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
d45cf215 254/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258struct obstack reload_obstack;
259char *reload_firstobj;
260
261#define obstack_chunk_alloc xmalloc
262#define obstack_chunk_free free
263
32131a9c
RK
264/* List of labels that must never be deleted. */
265extern rtx forced_labels;
266\f
267/* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272static struct elim_table
273{
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
a8efe40d 281 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290} reg_eliminate[] =
291
292/* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296#ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298#else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300#endif
301
302#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304/* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307static int num_not_at_initial_offset;
308
309/* Count the number of registers that we may be able to eliminate. */
310static int num_eliminable;
311
312/* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319static char *offsets_known_at;
320static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322/* Number of labels in the current function. */
323
324static int num_labels;
546b63fb
RK
325
326struct hard_reg_n_uses { int regno; int uses; };
32131a9c 327\f
546b63fb
RK
328static int possible_group_p PROTO((int, int *));
329static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334static void spill_failure PROTO((rtx));
335static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337static void delete_dead_insn PROTO((rtx));
338static void alter_reg PROTO((int, int));
c307c237 339static void mark_scratch_live PROTO((rtx));
546b63fb
RK
340static void set_label_offsets PROTO((rtx, rtx, int));
341static int eliminate_regs_in_insn PROTO((rtx, int));
342static void mark_not_eliminable PROTO((rtx, rtx));
343static int spill_hard_reg PROTO((int, int, FILE *, int));
344static void scan_paradoxical_subregs PROTO((rtx));
345static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347static void order_regs_for_reload PROTO((void));
348static void reload_as_needed PROTO((rtx, int));
9a881562 349static void forget_old_reloads_1 PROTO((rtx, rtx));
546b63fb
RK
350static int reload_reg_class_lower PROTO((short *, short *));
351static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
352 enum machine_mode));
be7ae2a4
RK
353static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
354 enum machine_mode));
546b63fb
RK
355static int reload_reg_free_p PROTO((int, int, enum reload_type));
356static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
357static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
351aa1c1 358static int reloads_conflict PROTO((int, int));
546b63fb
RK
359static int allocate_reload_reg PROTO((int, rtx, int, int));
360static void choose_reload_regs PROTO((rtx, rtx));
361static void merge_assigned_reloads PROTO((rtx));
362static void emit_reload_insns PROTO((rtx));
363static void delete_output_reload PROTO((rtx, int, rtx));
364static void inc_for_reload PROTO((rtx, rtx, int));
365static int constraint_accepts_reg_p PROTO((char *, rtx));
366static int count_occurrences PROTO((rtx, rtx));
32131a9c 367\f
546b63fb
RK
368/* Initialize the reload pass once per compilation. */
369
32131a9c
RK
370void
371init_reload ()
372{
373 register int i;
374
375 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
376 Set spill_indirect_levels to the number of levels such addressing is
377 permitted, zero if it is not permitted at all. */
378
379 register rtx tem
380 = gen_rtx (MEM, Pmode,
381 gen_rtx (PLUS, Pmode,
382 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 383 GEN_INT (4)));
32131a9c
RK
384 spill_indirect_levels = 0;
385
386 while (memory_address_p (QImode, tem))
387 {
388 spill_indirect_levels++;
389 tem = gen_rtx (MEM, Pmode, tem);
390 }
391
392 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
393
394 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
395 indirect_symref_ok = memory_address_p (QImode, tem);
396
397 /* See if reg+reg is a valid (and offsettable) address. */
398
65701fd2 399 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
400 {
401 tem = gen_rtx (PLUS, Pmode,
3ec2ea3e 402 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
57caa638
RS
403 gen_rtx (REG, Pmode, i));
404 /* This way, we make sure that reg+reg is an offsettable address. */
405 tem = plus_constant (tem, 4);
406
407 if (memory_address_p (QImode, tem))
408 {
409 double_reg_address_ok = 1;
410 break;
411 }
412 }
32131a9c
RK
413
414 /* Initialize obstack for our rtl allocation. */
415 gcc_obstack_init (&reload_obstack);
416 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
417}
418
546b63fb 419/* Main entry point for the reload pass.
32131a9c
RK
420
421 FIRST is the first insn of the function being compiled.
422
423 GLOBAL nonzero means we were called from global_alloc
424 and should attempt to reallocate any pseudoregs that we
425 displace from hard regs we will use for reloads.
426 If GLOBAL is zero, we do not have enough information to do that,
427 so any pseudo reg that is spilled must go to the stack.
428
429 DUMPFILE is the global-reg debugging dump file stream, or 0.
430 If it is nonzero, messages are written to it to describe
431 which registers are seized as reload regs, which pseudo regs
5352b11a 432 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 433
5352b11a
RS
434 Return value is nonzero if reload failed
435 and we must not do any more for this function. */
436
437int
32131a9c
RK
438reload (first, global, dumpfile)
439 rtx first;
440 int global;
441 FILE *dumpfile;
442{
443 register int class;
8b3e912b 444 register int i, j, k;
32131a9c
RK
445 register rtx insn;
446 register struct elim_table *ep;
447
448 int something_changed;
449 int something_needs_reloads;
450 int something_needs_elimination;
451 int new_basic_block_needs;
a8efe40d
RK
452 enum reg_class caller_save_spill_class = NO_REGS;
453 int caller_save_group_size = 1;
32131a9c 454
5352b11a
RS
455 /* Nonzero means we couldn't get enough spill regs. */
456 int failure = 0;
457
32131a9c
RK
458 /* The basic block number currently being processed for INSN. */
459 int this_block;
460
461 /* Make sure even insns with volatile mem refs are recognizable. */
462 init_recog ();
463
464 /* Enable find_equiv_reg to distinguish insns made by reload. */
465 reload_first_uid = get_max_uid ();
466
467 for (i = 0; i < N_REG_CLASSES; i++)
468 basic_block_needs[i] = 0;
469
0dadecf6
RK
470#ifdef SECONDARY_MEMORY_NEEDED
471 /* Initialize the secondary memory table. */
472 clear_secondary_mem ();
473#endif
474
32131a9c
RK
475 /* Remember which hard regs appear explicitly
476 before we merge into `regs_ever_live' the ones in which
477 pseudo regs have been allocated. */
478 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
479
480 /* We don't have a stack slot for any spill reg yet. */
481 bzero (spill_stack_slot, sizeof spill_stack_slot);
482 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
483
a8efe40d
RK
484 /* Initialize the save area information for caller-save, in case some
485 are needed. */
486 init_save_areas ();
a8fdc208 487
32131a9c
RK
488 /* Compute which hard registers are now in use
489 as homes for pseudo registers.
490 This is done here rather than (eg) in global_alloc
491 because this point is reached even if not optimizing. */
492
493 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
494 mark_home_live (i);
495
c307c237
RK
496 for (i = 0; i < scratch_list_length; i++)
497 if (scratch_list[i])
498 mark_scratch_live (scratch_list[i]);
499
32131a9c
RK
500 /* Make sure that the last insn in the chain
501 is not something that needs reloading. */
fb3821f7 502 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
503
504 /* Find all the pseudo registers that didn't get hard regs
505 but do have known equivalent constants or memory slots.
506 These include parameters (known equivalent to parameter slots)
507 and cse'd or loop-moved constant memory addresses.
508
509 Record constant equivalents in reg_equiv_constant
510 so they will be substituted by find_reloads.
511 Record memory equivalents in reg_mem_equiv so they can
512 be substituted eventually by altering the REG-rtx's. */
513
514 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
515 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
516 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
517 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
518 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
519 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
520 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
521 bzero (reg_equiv_init, max_regno * sizeof (rtx));
522 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
523 bzero (reg_equiv_address, max_regno * sizeof (rtx));
524 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
525 bzero (reg_max_ref_width, max_regno * sizeof (int));
208dffa5
RS
526 cannot_omit_stores = (char *) alloca (max_regno);
527 bzero (cannot_omit_stores, max_regno);
32131a9c 528
56f58d3a
RK
529#ifdef SMALL_REGISTER_CLASSES
530 CLEAR_HARD_REG_SET (forbidden_regs);
531#endif
532
32131a9c 533 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
56f58d3a
RK
534 Also find all paradoxical subregs and find largest such for each pseudo.
535 On machines with small register classes, record hard registers that
536 are used for user variables. These can never be used for spills. */
32131a9c
RK
537
538 for (insn = first; insn; insn = NEXT_INSN (insn))
539 {
540 rtx set = single_set (insn);
541
542 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
543 {
fb3821f7 544 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
545 if (note
546#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 547 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
548 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
549#endif
550 )
32131a9c
RK
551 {
552 rtx x = XEXP (note, 0);
553 i = REGNO (SET_DEST (set));
554 if (i > LAST_VIRTUAL_REGISTER)
555 {
556 if (GET_CODE (x) == MEM)
557 reg_equiv_memory_loc[i] = x;
558 else if (CONSTANT_P (x))
559 {
560 if (LEGITIMATE_CONSTANT_P (x))
561 reg_equiv_constant[i] = x;
562 else
563 reg_equiv_memory_loc[i]
d445b551 564 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
565 }
566 else
567 continue;
568
569 /* If this register is being made equivalent to a MEM
570 and the MEM is not SET_SRC, the equivalencing insn
571 is one with the MEM as a SET_DEST and it occurs later.
572 So don't mark this insn now. */
573 if (GET_CODE (x) != MEM
574 || rtx_equal_p (SET_SRC (set), x))
575 reg_equiv_init[i] = insn;
576 }
577 }
578 }
579
580 /* If this insn is setting a MEM from a register equivalent to it,
581 this is the equivalencing insn. */
582 else if (set && GET_CODE (SET_DEST (set)) == MEM
583 && GET_CODE (SET_SRC (set)) == REG
584 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
585 && rtx_equal_p (SET_DEST (set),
586 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
587 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
588
589 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
590 scan_paradoxical_subregs (PATTERN (insn));
591 }
592
593 /* Does this function require a frame pointer? */
594
595 frame_pointer_needed = (! flag_omit_frame_pointer
596#ifdef EXIT_IGNORE_STACK
597 /* ?? If EXIT_IGNORE_STACK is set, we will not save
598 and restore sp for alloca. So we can't eliminate
599 the frame pointer in that case. At some point,
600 we should improve this by emitting the
601 sp-adjusting insns for this case. */
602 || (current_function_calls_alloca
603 && EXIT_IGNORE_STACK)
604#endif
605 || FRAME_POINTER_REQUIRED);
606
607 num_eliminable = 0;
608
609 /* Initialize the table of registers to eliminate. The way we do this
610 depends on how the eliminable registers were defined. */
611#ifdef ELIMINABLE_REGS
612 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
613 {
614 ep->can_eliminate = ep->can_eliminate_previous
615 = (CAN_ELIMINATE (ep->from, ep->to)
3ec2ea3e
DE
616 && (ep->from != HARD_FRAME_POINTER_REGNUM
617 || ! frame_pointer_needed));
32131a9c
RK
618 }
619#else
620 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
621 = ! frame_pointer_needed;
622#endif
623
624 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 625 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
626 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
627 We depend on this. */
628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
629 {
630 num_eliminable += ep->can_eliminate;
631 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
632 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
633 }
634
635 num_labels = max_label_num () - get_first_label_num ();
636
637 /* Allocate the tables used to store offset information at labels. */
638 offsets_known_at = (char *) alloca (num_labels);
639 offsets_at
640 = (int (*)[NUM_ELIMINABLE_REGS])
641 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
642
643 offsets_known_at -= get_first_label_num ();
644 offsets_at -= get_first_label_num ();
645
646 /* Alter each pseudo-reg rtx to contain its hard reg number.
647 Assign stack slots to the pseudos that lack hard regs or equivalents.
648 Do not touch virtual registers. */
649
650 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
651 alter_reg (i, -1);
652
653 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
654 because the stack size may be a part of the offset computation for
655 register elimination. */
656 assign_stack_local (BLKmode, 0, 0);
657
658 /* If we have some registers we think can be eliminated, scan all insns to
659 see if there is an insn that sets one of these registers to something
660 other than itself plus a constant. If so, the register cannot be
661 eliminated. Doing this scan here eliminates an extra pass through the
662 main reload loop in the most common case where register elimination
663 cannot be done. */
664 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
665 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
666 || GET_CODE (insn) == CALL_INSN)
667 note_stores (PATTERN (insn), mark_not_eliminable);
668
669#ifndef REGISTER_CONSTRAINTS
670 /* If all the pseudo regs have hard regs,
671 except for those that are never referenced,
672 we know that no reloads are needed. */
673 /* But that is not true if there are register constraints, since
674 in that case some pseudos might be in the wrong kind of hard reg. */
675
676 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
677 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
678 break;
679
b8093d02 680 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
681 return;
682#endif
683
684 /* Compute the order of preference for hard registers to spill.
685 Store them by decreasing preference in potential_reload_regs. */
686
687 order_regs_for_reload ();
688
689 /* So far, no hard regs have been spilled. */
690 n_spills = 0;
691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
692 spill_reg_order[i] = -1;
693
694 /* On most machines, we can't use any register explicitly used in the
695 rtl as a spill register. But on some, we have to. Those will have
696 taken care to keep the life of hard regs as short as possible. */
697
56f58d3a 698#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
699 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
700#endif
701
702 /* Spill any hard regs that we know we can't eliminate. */
703 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
704 if (! ep->can_eliminate)
705 {
706 spill_hard_reg (ep->from, global, dumpfile, 1);
707 regs_ever_live[ep->from] = 1;
708 }
709
710 if (global)
711 for (i = 0; i < N_REG_CLASSES; i++)
712 {
713 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
714 bzero (basic_block_needs[i], n_basic_blocks);
715 }
716
b2f15f94
RK
717 /* From now on, we need to emit any moves without making new pseudos. */
718 reload_in_progress = 1;
719
32131a9c
RK
720 /* This loop scans the entire function each go-round
721 and repeats until one repetition spills no additional hard regs. */
722
d45cf215 723 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
724 to require another pass. Note that getting an additional reload
725 reg does not necessarily imply any pseudo reg was spilled;
726 sometimes we find a reload reg that no pseudo reg was allocated in. */
727 something_changed = 1;
728 /* This flag is set if there are any insns that require reloading. */
729 something_needs_reloads = 0;
730 /* This flag is set if there are any insns that require register
731 eliminations. */
732 something_needs_elimination = 0;
733 while (something_changed)
734 {
735 rtx after_call = 0;
736
737 /* For each class, number of reload regs needed in that class.
738 This is the maximum over all insns of the needs in that class
739 of the individual insn. */
740 int max_needs[N_REG_CLASSES];
741 /* For each class, size of group of consecutive regs
742 that is needed for the reloads of this class. */
743 int group_size[N_REG_CLASSES];
744 /* For each class, max number of consecutive groups needed.
745 (Each group contains group_size[CLASS] consecutive registers.) */
746 int max_groups[N_REG_CLASSES];
747 /* For each class, max number needed of regs that don't belong
748 to any of the groups. */
749 int max_nongroups[N_REG_CLASSES];
750 /* For each class, the machine mode which requires consecutive
751 groups of regs of that class.
752 If two different modes ever require groups of one class,
753 they must be the same size and equally restrictive for that class,
754 otherwise we can't handle the complexity. */
755 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
756 /* Record the insn where each maximum need is first found. */
757 rtx max_needs_insn[N_REG_CLASSES];
758 rtx max_groups_insn[N_REG_CLASSES];
759 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 760 rtx x;
0dadecf6 761 int starting_frame_size = get_frame_size ();
e404a39a 762 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
763
764 something_changed = 0;
765 bzero (max_needs, sizeof max_needs);
766 bzero (max_groups, sizeof max_groups);
767 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
768 bzero (max_needs_insn, sizeof max_needs_insn);
769 bzero (max_groups_insn, sizeof max_groups_insn);
770 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
771 bzero (group_size, sizeof group_size);
772 for (i = 0; i < N_REG_CLASSES; i++)
773 group_mode[i] = VOIDmode;
774
775 /* Keep track of which basic blocks are needing the reloads. */
776 this_block = 0;
777
778 /* Remember whether any element of basic_block_needs
779 changes from 0 to 1 in this pass. */
780 new_basic_block_needs = 0;
781
782 /* Reset all offsets on eliminable registers to their initial values. */
783#ifdef ELIMINABLE_REGS
784 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
785 {
786 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
787 ep->previous_offset = ep->offset
788 = ep->max_offset = ep->initial_offset;
32131a9c
RK
789 }
790#else
791#ifdef INITIAL_FRAME_POINTER_OFFSET
792 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
793#else
794 if (!FRAME_POINTER_REQUIRED)
795 abort ();
796 reg_eliminate[0].initial_offset = 0;
797#endif
a8efe40d 798 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
799 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
800#endif
801
802 num_not_at_initial_offset = 0;
803
804 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
805
806 /* Set a known offset for each forced label to be at the initial offset
807 of each elimination. We do this because we assume that all
808 computed jumps occur from a location where each elimination is
809 at its initial offset. */
810
811 for (x = forced_labels; x; x = XEXP (x, 1))
812 if (XEXP (x, 0))
fb3821f7 813 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
814
815 /* For each pseudo register that has an equivalent location defined,
816 try to eliminate any eliminable registers (such as the frame pointer)
817 assuming initial offsets for the replacement register, which
818 is the normal case.
819
820 If the resulting location is directly addressable, substitute
821 the MEM we just got directly for the old REG.
822
823 If it is not addressable but is a constant or the sum of a hard reg
824 and constant, it is probably not addressable because the constant is
825 out of range, in that case record the address; we will generate
826 hairy code to compute the address in a register each time it is
6491dbbb
RK
827 needed. Similarly if it is a hard register, but one that is not
828 valid as an address register.
32131a9c
RK
829
830 If the location is not addressable, but does not have one of the
831 above forms, assign a stack slot. We have to do this to avoid the
832 potential of producing lots of reloads if, e.g., a location involves
833 a pseudo that didn't get a hard register and has an equivalent memory
834 location that also involves a pseudo that didn't get a hard register.
835
836 Perhaps at some point we will improve reload_when_needed handling
837 so this problem goes away. But that's very hairy. */
838
839 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
840 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
841 {
fb3821f7 842 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
843
844 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
845 XEXP (x, 0)))
846 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
847 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
848 || (GET_CODE (XEXP (x, 0)) == REG
849 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
850 || (GET_CODE (XEXP (x, 0)) == PLUS
851 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
852 && (REGNO (XEXP (XEXP (x, 0), 0))
853 < FIRST_PSEUDO_REGISTER)
854 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
855 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
856 else
857 {
858 /* Make a new stack slot. Then indicate that something
a8fdc208 859 changed so we go back and recompute offsets for
32131a9c
RK
860 eliminable registers because the allocation of memory
861 below might change some offset. reg_equiv_{mem,address}
862 will be set up for this pseudo on the next pass around
863 the loop. */
864 reg_equiv_memory_loc[i] = 0;
865 reg_equiv_init[i] = 0;
866 alter_reg (i, -1);
867 something_changed = 1;
868 }
869 }
a8fdc208 870
d45cf215 871 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
872 bookkeeping. */
873 if (something_changed)
874 continue;
875
a8efe40d
RK
876 /* If caller-saves needs a group, initialize the group to include
877 the size and mode required for caller-saves. */
878
879 if (caller_save_group_size > 1)
880 {
881 group_mode[(int) caller_save_spill_class] = Pmode;
882 group_size[(int) caller_save_spill_class] = caller_save_group_size;
883 }
884
32131a9c
RK
885 /* Compute the most additional registers needed by any instruction.
886 Collect information separately for each class of regs. */
887
888 for (insn = first; insn; insn = NEXT_INSN (insn))
889 {
890 if (global && this_block + 1 < n_basic_blocks
891 && insn == basic_block_head[this_block+1])
892 ++this_block;
893
894 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
895 might include REG_LABEL), we need to see what effects this
896 has on the known offsets at labels. */
897
898 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
899 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
900 && REG_NOTES (insn) != 0))
901 set_label_offsets (insn, insn, 0);
902
903 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
904 {
905 /* Nonzero means don't use a reload reg that overlaps
906 the place where a function value can be returned. */
907 rtx avoid_return_reg = 0;
908
909 rtx old_body = PATTERN (insn);
910 int old_code = INSN_CODE (insn);
911 rtx old_notes = REG_NOTES (insn);
912 int did_elimination = 0;
546b63fb
RK
913
914 /* To compute the number of reload registers of each class
915 needed for an insn, we must similate what choose_reload_regs
916 can do. We do this by splitting an insn into an "input" and
917 an "output" part. RELOAD_OTHER reloads are used in both.
918 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
919 which must be live over the entire input section of reloads,
920 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
921 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
922 inputs.
923
924 The registers needed for output are RELOAD_OTHER and
925 RELOAD_FOR_OUTPUT, which are live for the entire output
926 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
927 reloads for each operand.
928
929 The total number of registers needed is the maximum of the
930 inputs and outputs. */
931
8b3e912b 932 struct needs
32131a9c 933 {
8b3e912b
RK
934 /* [0] is normal, [1] is nongroup. */
935 int regs[2][N_REG_CLASSES];
936 int groups[N_REG_CLASSES];
937 };
938
939 /* Each `struct needs' corresponds to one RELOAD_... type. */
940 struct {
941 struct needs other;
942 struct needs input;
943 struct needs output;
944 struct needs insn;
945 struct needs other_addr;
946 struct needs op_addr;
947 struct needs in_addr[MAX_RECOG_OPERANDS];
948 struct needs out_addr[MAX_RECOG_OPERANDS];
949 } insn_needs;
32131a9c
RK
950
951 /* If needed, eliminate any eliminable registers. */
952 if (num_eliminable)
953 did_elimination = eliminate_regs_in_insn (insn, 0);
954
955#ifdef SMALL_REGISTER_CLASSES
956 /* Set avoid_return_reg if this is an insn
957 that might use the value of a function call. */
958 if (GET_CODE (insn) == CALL_INSN)
959 {
960 if (GET_CODE (PATTERN (insn)) == SET)
961 after_call = SET_DEST (PATTERN (insn));
962 else if (GET_CODE (PATTERN (insn)) == PARALLEL
963 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
964 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
965 else
966 after_call = 0;
967 }
968 else if (after_call != 0
969 && !(GET_CODE (PATTERN (insn)) == SET
970 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
971 {
2b979c57 972 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
973 avoid_return_reg = after_call;
974 after_call = 0;
975 }
976#endif /* SMALL_REGISTER_CLASSES */
977
978 /* Analyze the instruction. */
979 find_reloads (insn, 0, spill_indirect_levels, global,
980 spill_reg_order);
981
982 /* Remember for later shortcuts which insns had any reloads or
983 register eliminations.
984
985 One might think that it would be worthwhile to mark insns
986 that need register replacements but not reloads, but this is
987 not safe because find_reloads may do some manipulation of
988 the insn (such as swapping commutative operands), which would
989 be lost when we restore the old pattern after register
990 replacement. So the actions of find_reloads must be redone in
991 subsequent passes or in reload_as_needed.
992
993 However, it is safe to mark insns that need reloads
994 but not register replacement. */
995
996 PUT_MODE (insn, (did_elimination ? QImode
997 : n_reloads ? HImode
546b63fb 998 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
999 : VOIDmode));
1000
1001 /* Discard any register replacements done. */
1002 if (did_elimination)
1003 {
1004 obstack_free (&reload_obstack, reload_firstobj);
1005 PATTERN (insn) = old_body;
1006 INSN_CODE (insn) = old_code;
1007 REG_NOTES (insn) = old_notes;
1008 something_needs_elimination = 1;
1009 }
1010
a8efe40d 1011 /* If this insn has no reloads, we need not do anything except
a8fdc208 1012 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1013 caller-save needs reloads. */
1014
1015 if (n_reloads == 0
1016 && ! (GET_CODE (insn) == CALL_INSN
1017 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1018 continue;
1019
1020 something_needs_reloads = 1;
8b3e912b 1021 bzero (&insn_needs, sizeof insn_needs);
32131a9c
RK
1022
1023 /* Count each reload once in every class
1024 containing the reload's own class. */
1025
1026 for (i = 0; i < n_reloads; i++)
1027 {
1028 register enum reg_class *p;
e85ddd99 1029 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1030 int size;
1031 enum machine_mode mode;
ce0e109b 1032 int nongroup_need;
8b3e912b 1033 struct needs *this_needs;
32131a9c
RK
1034
1035 /* Don't count the dummy reloads, for which one of the
1036 regs mentioned in the insn can be used for reloading.
1037 Don't count optional reloads.
1038 Don't count reloads that got combined with others. */
1039 if (reload_reg_rtx[i] != 0
1040 || reload_optional[i] != 0
1041 || (reload_out[i] == 0 && reload_in[i] == 0
1042 && ! reload_secondary_p[i]))
1043 continue;
1044
e85ddd99
RK
1045 /* Show that a reload register of this class is needed
1046 in this basic block. We do not use insn_needs and
1047 insn_groups because they are overly conservative for
1048 this purpose. */
1049 if (global && ! basic_block_needs[(int) class][this_block])
1050 {
1051 basic_block_needs[(int) class][this_block] = 1;
1052 new_basic_block_needs = 1;
1053 }
1054
ee249c09
RK
1055
1056 mode = reload_inmode[i];
1057 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1058 mode = reload_outmode[i];
1059 size = CLASS_MAX_NREGS (class, mode);
1060
8b3e912b
RK
1061 /* If this class doesn't want a group, determine if we have
1062 a nongroup need or a regular need. We have a nongroup
1063 need if this reload conflicts with a group reload whose
1064 class intersects with this reload's class. */
ce0e109b
RK
1065
1066 nongroup_need = 0;
ee249c09 1067 if (size == 1)
b8f4c738
RK
1068 for (j = 0; j < n_reloads; j++)
1069 if ((CLASS_MAX_NREGS (reload_reg_class[j],
255cf280
RK
1070 (GET_MODE_SIZE (reload_outmode[j])
1071 > GET_MODE_SIZE (reload_inmode[j]))
1072 ? reload_outmode[j]
1073 : reload_inmode[j])
b8f4c738
RK
1074 > 1)
1075 && reloads_conflict (i, j)
ce0e109b
RK
1076 && reg_classes_intersect_p (class,
1077 reload_reg_class[j]))
1078 {
1079 nongroup_need = 1;
1080 break;
1081 }
1082
32131a9c
RK
1083 /* Decide which time-of-use to count this reload for. */
1084 switch (reload_when_needed[i])
1085 {
1086 case RELOAD_OTHER:
8b3e912b 1087 this_needs = &insn_needs.other;
32131a9c 1088 break;
546b63fb 1089 case RELOAD_FOR_INPUT:
8b3e912b 1090 this_needs = &insn_needs.input;
32131a9c 1091 break;
546b63fb 1092 case RELOAD_FOR_OUTPUT:
8b3e912b 1093 this_needs = &insn_needs.output;
32131a9c 1094 break;
546b63fb 1095 case RELOAD_FOR_INSN:
8b3e912b 1096 this_needs = &insn_needs.insn;
546b63fb 1097 break;
546b63fb 1098 case RELOAD_FOR_OTHER_ADDRESS:
8b3e912b 1099 this_needs = &insn_needs.other_addr;
546b63fb 1100 break;
546b63fb 1101 case RELOAD_FOR_INPUT_ADDRESS:
8b3e912b 1102 this_needs = &insn_needs.in_addr[reload_opnum[i]];
546b63fb 1103 break;
546b63fb 1104 case RELOAD_FOR_OUTPUT_ADDRESS:
8b3e912b 1105 this_needs = &insn_needs.out_addr[reload_opnum[i]];
546b63fb 1106 break;
32131a9c 1107 case RELOAD_FOR_OPERAND_ADDRESS:
8b3e912b 1108 this_needs = &insn_needs.op_addr;
32131a9c
RK
1109 break;
1110 }
1111
32131a9c
RK
1112 if (size > 1)
1113 {
1114 enum machine_mode other_mode, allocate_mode;
1115
1116 /* Count number of groups needed separately from
1117 number of individual regs needed. */
8b3e912b 1118 this_needs->groups[(int) class]++;
e85ddd99 1119 p = reg_class_superclasses[(int) class];
32131a9c 1120 while (*p != LIM_REG_CLASSES)
8b3e912b 1121 this_needs->groups[(int) *p++]++;
32131a9c
RK
1122
1123 /* Record size and mode of a group of this class. */
1124 /* If more than one size group is needed,
1125 make all groups the largest needed size. */
e85ddd99 1126 if (group_size[(int) class] < size)
32131a9c 1127 {
e85ddd99 1128 other_mode = group_mode[(int) class];
32131a9c
RK
1129 allocate_mode = mode;
1130
e85ddd99
RK
1131 group_size[(int) class] = size;
1132 group_mode[(int) class] = mode;
32131a9c
RK
1133 }
1134 else
1135 {
1136 other_mode = mode;
e85ddd99 1137 allocate_mode = group_mode[(int) class];
32131a9c
RK
1138 }
1139
1140 /* Crash if two dissimilar machine modes both need
1141 groups of consecutive regs of the same class. */
1142
8b3e912b 1143 if (other_mode != VOIDmode && other_mode != allocate_mode
32131a9c 1144 && ! modes_equiv_for_class_p (allocate_mode,
8b3e912b 1145 other_mode, class))
32131a9c
RK
1146 abort ();
1147 }
1148 else if (size == 1)
1149 {
8b3e912b 1150 this_needs->regs[nongroup_need][(int) class] += 1;
e85ddd99 1151 p = reg_class_superclasses[(int) class];
32131a9c 1152 while (*p != LIM_REG_CLASSES)
8b3e912b 1153 this_needs->regs[nongroup_need][(int) *p++] += 1;
32131a9c
RK
1154 }
1155 else
1156 abort ();
1157 }
1158
1159 /* All reloads have been counted for this insn;
1160 now merge the various times of use.
1161 This sets insn_needs, etc., to the maximum total number
1162 of registers needed at any point in this insn. */
1163
1164 for (i = 0; i < N_REG_CLASSES; i++)
1165 {
546b63fb
RK
1166 int in_max, out_max;
1167
8b3e912b
RK
1168 /* Compute normal and nongroup needs. */
1169 for (j = 0; j <= 1; j++)
546b63fb 1170 {
8b3e912b
RK
1171 for (in_max = 0, out_max = 0, k = 0;
1172 k < reload_n_operands; k++)
1173 {
1174 in_max
1175 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1176 out_max
1177 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1178 }
546b63fb 1179
8b3e912b
RK
1180 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1181 and operand addresses but not things used to reload
1182 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1183 don't conflict with things needed to reload inputs or
1184 outputs. */
546b63fb 1185
8b3e912b
RK
1186 in_max = MAX (in_max, insn_needs.op_addr.regs[j][i]);
1187 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
546b63fb 1188
8b3e912b
RK
1189 insn_needs.input.regs[j][i]
1190 = MAX (insn_needs.input.regs[j][i]
1191 + insn_needs.op_addr.regs[j][i]
1192 + insn_needs.insn.regs[j][i],
1193 in_max + insn_needs.input.regs[j][i]);
546b63fb 1194
8b3e912b
RK
1195 insn_needs.output.regs[j][i] += out_max;
1196 insn_needs.other.regs[j][i]
1197 += MAX (MAX (insn_needs.input.regs[j][i],
1198 insn_needs.output.regs[j][i]),
1199 insn_needs.other_addr.regs[j][i]);
546b63fb 1200
ce0e109b
RK
1201 }
1202
8b3e912b 1203 /* Now compute group needs. */
546b63fb
RK
1204 for (in_max = 0, out_max = 0, j = 0;
1205 j < reload_n_operands; j++)
1206 {
8b3e912b
RK
1207 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1208 out_max
1209 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
546b63fb
RK
1210 }
1211
8b3e912b
RK
1212 in_max = MAX (in_max, insn_needs.op_addr.groups[i]);
1213 out_max = MAX (out_max, insn_needs.insn.groups[i]);
546b63fb 1214
8b3e912b
RK
1215 insn_needs.input.groups[i]
1216 = MAX (insn_needs.input.groups[i]
1217 + insn_needs.op_addr.groups[i]
1218 + insn_needs.insn.groups[i],
1219 in_max + insn_needs.input.groups[i]);
546b63fb 1220
8b3e912b
RK
1221 insn_needs.output.groups[i] += out_max;
1222 insn_needs.other.groups[i]
1223 += MAX (MAX (insn_needs.input.groups[i],
1224 insn_needs.output.groups[i]),
1225 insn_needs.other_addr.groups[i]);
546b63fb
RK
1226 }
1227
a8efe40d
RK
1228 /* If this is a CALL_INSN and caller-saves will need
1229 a spill register, act as if the spill register is
1230 needed for this insn. However, the spill register
1231 can be used by any reload of this insn, so we only
1232 need do something if no need for that class has
a8fdc208 1233 been recorded.
a8efe40d
RK
1234
1235 The assumption that every CALL_INSN will trigger a
1236 caller-save is highly conservative, however, the number
1237 of cases where caller-saves will need a spill register but
1238 a block containing a CALL_INSN won't need a spill register
1239 of that class should be quite rare.
1240
1241 If a group is needed, the size and mode of the group will
d45cf215 1242 have been set up at the beginning of this loop. */
a8efe40d
RK
1243
1244 if (GET_CODE (insn) == CALL_INSN
1245 && caller_save_spill_class != NO_REGS)
1246 {
8b3e912b
RK
1247 /* See if this register would conflict with any reload
1248 that needs a group. */
1249 int nongroup_need = 0;
1250 int *caller_save_needs;
1251
1252 for (j = 0; j < n_reloads; j++)
1253 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1254 (GET_MODE_SIZE (reload_outmode[j])
1255 > GET_MODE_SIZE (reload_inmode[j]))
1256 ? reload_outmode[j]
1257 : reload_inmode[j])
1258 > 1)
1259 && reg_classes_intersect_p (caller_save_spill_class,
1260 reload_reg_class[j]))
1261 {
1262 nongroup_need = 1;
1263 break;
1264 }
1265
1266 caller_save_needs
1267 = (caller_save_group_size > 1
1268 ? insn_needs.other.groups
1269 : insn_needs.other.regs[nongroup_need]);
a8efe40d
RK
1270
1271 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1272 {
1273 register enum reg_class *p
1274 = reg_class_superclasses[(int) caller_save_spill_class];
1275
1276 caller_save_needs[(int) caller_save_spill_class]++;
1277
1278 while (*p != LIM_REG_CLASSES)
0aaa6af8 1279 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1280 }
1281
8b3e912b 1282 /* Show that this basic block will need a register of
d1c1397e
RS
1283 this class. */
1284
8b3e912b
RK
1285 if (global
1286 && ! (basic_block_needs[(int) caller_save_spill_class]
1287 [this_block]))
1288 {
1289 basic_block_needs[(int) caller_save_spill_class]
1290 [this_block] = 1;
1291 new_basic_block_needs = 1;
1292 }
a8efe40d
RK
1293 }
1294
32131a9c
RK
1295#ifdef SMALL_REGISTER_CLASSES
1296 /* If this insn stores the value of a function call,
1297 and that value is in a register that has been spilled,
1298 and if the insn needs a reload in a class
1299 that might use that register as the reload register,
1300 then add add an extra need in that class.
1301 This makes sure we have a register available that does
1302 not overlap the return value. */
8b3e912b 1303
32131a9c
RK
1304 if (avoid_return_reg)
1305 {
1306 int regno = REGNO (avoid_return_reg);
1307 int nregs
1308 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1309 int r;
546b63fb
RK
1310 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1311
1312 /* First compute the "basic needs", which counts a
1313 need only in the smallest class in which it
1314 is required. */
1315
8b3e912b
RK
1316 bcopy (insn_needs.other.regs[0], basic_needs,
1317 sizeof basic_needs);
1318 bcopy (insn_needs.other.groups, basic_groups,
1319 sizeof basic_groups);
546b63fb
RK
1320
1321 for (i = 0; i < N_REG_CLASSES; i++)
1322 {
1323 enum reg_class *p;
1324
1325 if (basic_needs[i] >= 0)
1326 for (p = reg_class_superclasses[i];
1327 *p != LIM_REG_CLASSES; p++)
1328 basic_needs[(int) *p] -= basic_needs[i];
1329
1330 if (basic_groups[i] >= 0)
1331 for (p = reg_class_superclasses[i];
1332 *p != LIM_REG_CLASSES; p++)
1333 basic_groups[(int) *p] -= basic_groups[i];
1334 }
1335
1336 /* Now count extra regs if there might be a conflict with
1337 the return value register.
1338
1339 ??? This is not quite correct because we don't properly
1340 handle the case of groups, but if we end up doing
1341 something wrong, it either will end up not mattering or
1342 we will abort elsewhere. */
1343
32131a9c
RK
1344 for (r = regno; r < regno + nregs; r++)
1345 if (spill_reg_order[r] >= 0)
1346 for (i = 0; i < N_REG_CLASSES; i++)
1347 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1348 {
546b63fb
RK
1349 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1350 {
1351 enum reg_class *p;
1352
8b3e912b 1353 insn_needs.other.regs[0][i]++;
546b63fb
RK
1354 p = reg_class_superclasses[i];
1355 while (*p != LIM_REG_CLASSES)
8b3e912b 1356 insn_needs.other.regs[0][(int) *p++]++;
546b63fb 1357 }
32131a9c 1358 }
32131a9c
RK
1359 }
1360#endif /* SMALL_REGISTER_CLASSES */
1361
1362 /* For each class, collect maximum need of any insn. */
1363
1364 for (i = 0; i < N_REG_CLASSES; i++)
1365 {
8b3e912b 1366 if (max_needs[i] < insn_needs.other.regs[0][i])
5352b11a 1367 {
8b3e912b 1368 max_needs[i] = insn_needs.other.regs[0][i];
5352b11a
RS
1369 max_needs_insn[i] = insn;
1370 }
8b3e912b 1371 if (max_groups[i] < insn_needs.other.groups[i])
5352b11a 1372 {
8b3e912b 1373 max_groups[i] = insn_needs.other.groups[i];
5352b11a
RS
1374 max_groups_insn[i] = insn;
1375 }
8b3e912b 1376 if (max_nongroups[i] < insn_needs.other.regs[1][i])
ce0e109b 1377 {
8b3e912b 1378 max_nongroups[i] = insn_needs.other.regs[1][i];
ce0e109b
RK
1379 max_nongroups_insn[i] = insn;
1380 }
32131a9c
RK
1381 }
1382 }
1383 /* Note that there is a continue statement above. */
1384 }
1385
0dadecf6
RK
1386 /* If we allocated any new memory locations, make another pass
1387 since it might have changed elimination offsets. */
1388 if (starting_frame_size != get_frame_size ())
1389 something_changed = 1;
1390
e404a39a
RK
1391 if (dumpfile)
1392 for (i = 0; i < N_REG_CLASSES; i++)
1393 {
1394 if (max_needs[i] > 0)
1395 fprintf (dumpfile,
1396 ";; Need %d reg%s of class %s (for insn %d).\n",
1397 max_needs[i], max_needs[i] == 1 ? "" : "s",
1398 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1399 if (max_nongroups[i] > 0)
1400 fprintf (dumpfile,
1401 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1402 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1403 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1404 if (max_groups[i] > 0)
1405 fprintf (dumpfile,
1406 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1407 max_groups[i], max_groups[i] == 1 ? "" : "s",
1408 mode_name[(int) group_mode[i]],
1409 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1410 }
1411
d445b551 1412 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1413 will need a spill register. */
32131a9c 1414
d445b551 1415 if (caller_save_needed
a8efe40d
RK
1416 && ! setup_save_areas (&something_changed)
1417 && caller_save_spill_class == NO_REGS)
32131a9c 1418 {
a8efe40d
RK
1419 /* The class we will need depends on whether the machine
1420 supports the sum of two registers for an address; see
1421 find_address_reloads for details. */
1422
a8fdc208 1423 caller_save_spill_class
a8efe40d
RK
1424 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1425 caller_save_group_size
1426 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1427 something_changed = 1;
32131a9c
RK
1428 }
1429
5c23c401
RK
1430 /* See if anything that happened changes which eliminations are valid.
1431 For example, on the Sparc, whether or not the frame pointer can
1432 be eliminated can depend on what registers have been used. We need
1433 not check some conditions again (such as flag_omit_frame_pointer)
1434 since they can't have changed. */
1435
1436 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3ec2ea3e 1437 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5c23c401
RK
1438#ifdef ELIMINABLE_REGS
1439 || ! CAN_ELIMINATE (ep->from, ep->to)
1440#endif
1441 )
1442 ep->can_eliminate = 0;
1443
32131a9c
RK
1444 /* Look for the case where we have discovered that we can't replace
1445 register A with register B and that means that we will now be
1446 trying to replace register A with register C. This means we can
1447 no longer replace register C with register B and we need to disable
1448 such an elimination, if it exists. This occurs often with A == ap,
1449 B == sp, and C == fp. */
a8fdc208 1450
32131a9c
RK
1451 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1452 {
1453 struct elim_table *op;
1454 register int new_to = -1;
1455
1456 if (! ep->can_eliminate && ep->can_eliminate_previous)
1457 {
1458 /* Find the current elimination for ep->from, if there is a
1459 new one. */
1460 for (op = reg_eliminate;
1461 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1462 if (op->from == ep->from && op->can_eliminate)
1463 {
1464 new_to = op->to;
1465 break;
1466 }
1467
1468 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1469 disable it. */
1470 for (op = reg_eliminate;
1471 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1472 if (op->from == new_to && op->to == ep->to)
1473 op->can_eliminate = 0;
1474 }
1475 }
1476
1477 /* See if any registers that we thought we could eliminate the previous
1478 time are no longer eliminable. If so, something has changed and we
1479 must spill the register. Also, recompute the number of eliminable
1480 registers and see if the frame pointer is needed; it is if there is
1481 no elimination of the frame pointer that we can perform. */
1482
1483 frame_pointer_needed = 1;
1484 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1485 {
3ec2ea3e
DE
1486 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1487 && ep->to != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
1488 frame_pointer_needed = 0;
1489
1490 if (! ep->can_eliminate && ep->can_eliminate_previous)
1491 {
1492 ep->can_eliminate_previous = 0;
1493 spill_hard_reg (ep->from, global, dumpfile, 1);
1494 regs_ever_live[ep->from] = 1;
1495 something_changed = 1;
1496 num_eliminable--;
1497 }
1498 }
1499
1500 /* If all needs are met, we win. */
1501
1502 for (i = 0; i < N_REG_CLASSES; i++)
1503 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1504 break;
1505 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1506 break;
1507
546b63fb
RK
1508 /* Not all needs are met; must spill some hard regs. */
1509
1510 /* Put all registers spilled so far back in potential_reload_regs, but
1511 put them at the front, since we've already spilled most of the
1512 psuedos in them (we might have left some pseudos unspilled if they
1513 were in a block that didn't need any spill registers of a conflicting
1514 class. We used to try to mark off the need for those registers,
1515 but doing so properly is very complex and reallocating them is the
1516 simpler approach. First, "pack" potential_reload_regs by pushing
1517 any nonnegative entries towards the end. That will leave room
1518 for the registers we already spilled.
1519
1520 Also, undo the marking of the spill registers from the last time
1521 around in FORBIDDEN_REGS since we will be probably be allocating
1522 them again below.
1523
1524 ??? It is theoretically possible that we might end up not using one
1525 of our previously-spilled registers in this allocation, even though
1526 they are at the head of the list. It's not clear what to do about
1527 this, but it was no better before, when we marked off the needs met
1528 by the previously-spilled registers. With the current code, globals
1529 can be allocated into these registers, but locals cannot. */
1530
1531 if (n_spills)
1532 {
1533 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1534 if (potential_reload_regs[i] != -1)
1535 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1536
546b63fb
RK
1537 for (i = 0; i < n_spills; i++)
1538 {
1539 potential_reload_regs[i] = spill_regs[i];
1540 spill_reg_order[spill_regs[i]] = -1;
1541 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1542 }
32131a9c 1543
546b63fb
RK
1544 n_spills = 0;
1545 }
32131a9c
RK
1546
1547 /* Now find more reload regs to satisfy the remaining need
1548 Do it by ascending class number, since otherwise a reg
1549 might be spilled for a big class and might fail to count
1550 for a smaller class even though it belongs to that class.
1551
1552 Count spilled regs in `spills', and add entries to
1553 `spill_regs' and `spill_reg_order'.
1554
1555 ??? Note there is a problem here.
1556 When there is a need for a group in a high-numbered class,
1557 and also need for non-group regs that come from a lower class,
1558 the non-group regs are chosen first. If there aren't many regs,
1559 they might leave no room for a group.
1560
1561 This was happening on the 386. To fix it, we added the code
1562 that calls possible_group_p, so that the lower class won't
1563 break up the last possible group.
1564
1565 Really fixing the problem would require changes above
1566 in counting the regs already spilled, and in choose_reload_regs.
1567 It might be hard to avoid introducing bugs there. */
1568
546b63fb
RK
1569 CLEAR_HARD_REG_SET (counted_for_groups);
1570 CLEAR_HARD_REG_SET (counted_for_nongroups);
1571
32131a9c
RK
1572 for (class = 0; class < N_REG_CLASSES; class++)
1573 {
1574 /* First get the groups of registers.
1575 If we got single registers first, we might fragment
1576 possible groups. */
1577 while (max_groups[class] > 0)
1578 {
1579 /* If any single spilled regs happen to form groups,
1580 count them now. Maybe we don't really need
1581 to spill another group. */
1582 count_possible_groups (group_size, group_mode, max_groups);
1583
93193ab5
RK
1584 if (max_groups[class] <= 0)
1585 break;
1586
32131a9c
RK
1587 /* Groups of size 2 (the only groups used on most machines)
1588 are treated specially. */
1589 if (group_size[class] == 2)
1590 {
1591 /* First, look for a register that will complete a group. */
1592 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1593 {
32131a9c 1594 int other;
546b63fb
RK
1595
1596 j = potential_reload_regs[i];
32131a9c
RK
1597 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1598 &&
1599 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1600 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1601 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1602 && HARD_REGNO_MODE_OK (other, group_mode[class])
1603 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1604 other)
1605 /* We don't want one part of another group.
1606 We could get "two groups" that overlap! */
1607 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1608 ||
1609 (j < FIRST_PSEUDO_REGISTER - 1
1610 && (other = j + 1, spill_reg_order[other] >= 0)
1611 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1612 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1613 && HARD_REGNO_MODE_OK (j, group_mode[class])
1614 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1615 other)
1616 && ! TEST_HARD_REG_BIT (counted_for_groups,
1617 other))))
1618 {
1619 register enum reg_class *p;
1620
1621 /* We have found one that will complete a group,
1622 so count off one group as provided. */
1623 max_groups[class]--;
1624 p = reg_class_superclasses[class];
1625 while (*p != LIM_REG_CLASSES)
1626 max_groups[(int) *p++]--;
1627
1628 /* Indicate both these regs are part of a group. */
1629 SET_HARD_REG_BIT (counted_for_groups, j);
1630 SET_HARD_REG_BIT (counted_for_groups, other);
1631 break;
1632 }
1633 }
1634 /* We can't complete a group, so start one. */
92b0556d
RS
1635#ifdef SMALL_REGISTER_CLASSES
1636 /* Look for a pair neither of which is explicitly used. */
1637 if (i == FIRST_PSEUDO_REGISTER)
1638 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1639 {
1640 int k;
1641 j = potential_reload_regs[i];
1642 /* Verify that J+1 is a potential reload reg. */
1643 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1644 if (potential_reload_regs[k] == j + 1)
1645 break;
1646 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1647 && k < FIRST_PSEUDO_REGISTER
1648 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1649 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1650 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1651 && HARD_REGNO_MODE_OK (j, group_mode[class])
1652 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1653 j + 1)
1654 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1655 /* Reject J at this stage
1656 if J+1 was explicitly used. */
1657 && ! regs_explicitly_used[j + 1])
1658 break;
1659 }
1660#endif
1661 /* Now try any group at all
1662 whose registers are not in bad_spill_regs. */
32131a9c
RK
1663 if (i == FIRST_PSEUDO_REGISTER)
1664 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1665 {
57697575 1666 int k;
546b63fb 1667 j = potential_reload_regs[i];
57697575
RS
1668 /* Verify that J+1 is a potential reload reg. */
1669 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1670 if (potential_reload_regs[k] == j + 1)
1671 break;
32131a9c 1672 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1673 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1674 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1675 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1676 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1677 && HARD_REGNO_MODE_OK (j, group_mode[class])
1678 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1679 j + 1)
1680 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1681 break;
1682 }
1683
1684 /* I should be the index in potential_reload_regs
1685 of the new reload reg we have found. */
1686
5352b11a
RS
1687 if (i >= FIRST_PSEUDO_REGISTER)
1688 {
1689 /* There are no groups left to spill. */
1690 spill_failure (max_groups_insn[class]);
1691 failure = 1;
1692 goto failed;
1693 }
1694 else
1695 something_changed
fb3821f7 1696 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1697 global, dumpfile);
32131a9c
RK
1698 }
1699 else
1700 {
1701 /* For groups of more than 2 registers,
1702 look for a sufficient sequence of unspilled registers,
1703 and spill them all at once. */
1704 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1705 {
32131a9c 1706 int k;
546b63fb
RK
1707
1708 j = potential_reload_regs[i];
9d1a4667
RS
1709 if (j >= 0
1710 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1711 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1712 {
1713 /* Check each reg in the sequence. */
1714 for (k = 0; k < group_size[class]; k++)
1715 if (! (spill_reg_order[j + k] < 0
1716 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1717 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1718 break;
1719 /* We got a full sequence, so spill them all. */
1720 if (k == group_size[class])
1721 {
1722 register enum reg_class *p;
1723 for (k = 0; k < group_size[class]; k++)
1724 {
1725 int idx;
1726 SET_HARD_REG_BIT (counted_for_groups, j + k);
1727 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1728 if (potential_reload_regs[idx] == j + k)
1729 break;
9d1a4667
RS
1730 something_changed
1731 |= new_spill_reg (idx, class,
1732 max_needs, NULL_PTR,
1733 global, dumpfile);
32131a9c
RK
1734 }
1735
1736 /* We have found one that will complete a group,
1737 so count off one group as provided. */
1738 max_groups[class]--;
1739 p = reg_class_superclasses[class];
1740 while (*p != LIM_REG_CLASSES)
1741 max_groups[(int) *p++]--;
1742
1743 break;
1744 }
1745 }
1746 }
fa52261e 1747 /* We couldn't find any registers for this reload.
9d1a4667
RS
1748 Avoid going into an infinite loop. */
1749 if (i >= FIRST_PSEUDO_REGISTER)
1750 {
1751 /* There are no groups left. */
1752 spill_failure (max_groups_insn[class]);
1753 failure = 1;
1754 goto failed;
1755 }
32131a9c
RK
1756 }
1757 }
1758
1759 /* Now similarly satisfy all need for single registers. */
1760
1761 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1762 {
9a6cde3a
RS
1763#ifdef SMALL_REGISTER_CLASSES
1764 /* This should be right for all machines, but only the 386
1765 is known to need it, so this conditional plays safe.
1766 ??? For 2.5, try making this unconditional. */
1767 /* If we spilled enough regs, but they weren't counted
1768 against the non-group need, see if we can count them now.
1769 If so, we can avoid some actual spilling. */
1770 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1771 for (i = 0; i < n_spills; i++)
1772 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1773 spill_regs[i])
1774 && !TEST_HARD_REG_BIT (counted_for_groups,
1775 spill_regs[i])
1776 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1777 spill_regs[i])
1778 && max_nongroups[class] > 0)
1779 {
1780 register enum reg_class *p;
1781
1782 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1783 max_nongroups[class]--;
1784 p = reg_class_superclasses[class];
1785 while (*p != LIM_REG_CLASSES)
1786 max_nongroups[(int) *p++]--;
1787 }
1788 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1789 break;
1790#endif
1791
32131a9c
RK
1792 /* Consider the potential reload regs that aren't
1793 yet in use as reload regs, in order of preference.
1794 Find the most preferred one that's in this class. */
1795
1796 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1797 if (potential_reload_regs[i] >= 0
1798 && TEST_HARD_REG_BIT (reg_class_contents[class],
1799 potential_reload_regs[i])
1800 /* If this reg will not be available for groups,
1801 pick one that does not foreclose possible groups.
1802 This is a kludge, and not very general,
1803 but it should be sufficient to make the 386 work,
1804 and the problem should not occur on machines with
1805 more registers. */
1806 && (max_nongroups[class] == 0
1807 || possible_group_p (potential_reload_regs[i], max_groups)))
1808 break;
1809
e404a39a
RK
1810 /* If we couldn't get a register, try to get one even if we
1811 might foreclose possible groups. This may cause problems
1812 later, but that's better than aborting now, since it is
1813 possible that we will, in fact, be able to form the needed
1814 group even with this allocation. */
1815
1816 if (i >= FIRST_PSEUDO_REGISTER
1817 && (asm_noperands (max_needs[class] > 0
1818 ? max_needs_insn[class]
1819 : max_nongroups_insn[class])
1820 < 0))
1821 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1822 if (potential_reload_regs[i] >= 0
1823 && TEST_HARD_REG_BIT (reg_class_contents[class],
1824 potential_reload_regs[i]))
1825 break;
1826
32131a9c
RK
1827 /* I should be the index in potential_reload_regs
1828 of the new reload reg we have found. */
1829
5352b11a
RS
1830 if (i >= FIRST_PSEUDO_REGISTER)
1831 {
1832 /* There are no possible registers left to spill. */
1833 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1834 : max_nongroups_insn[class]);
1835 failure = 1;
1836 goto failed;
1837 }
1838 else
1839 something_changed
1840 |= new_spill_reg (i, class, max_needs, max_nongroups,
1841 global, dumpfile);
32131a9c
RK
1842 }
1843 }
1844 }
1845
1846 /* If global-alloc was run, notify it of any register eliminations we have
1847 done. */
1848 if (global)
1849 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1850 if (ep->can_eliminate)
1851 mark_elimination (ep->from, ep->to);
1852
32131a9c 1853 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1854 around calls. Tell if what mode to use so that we will process
1855 those insns in reload_as_needed if we have to. */
32131a9c
RK
1856
1857 if (caller_save_needed)
a8efe40d
RK
1858 save_call_clobbered_regs (num_eliminable ? QImode
1859 : caller_save_spill_class != NO_REGS ? HImode
1860 : VOIDmode);
32131a9c
RK
1861
1862 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1863 If that insn didn't set the register (i.e., it copied the register to
1864 memory), just delete that insn instead of the equivalencing insn plus
1865 anything now dead. If we call delete_dead_insn on that insn, we may
1866 delete the insn that actually sets the register if the register die
1867 there and that is incorrect. */
1868
1869 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1870 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1871 && GET_CODE (reg_equiv_init[i]) != NOTE)
1872 {
1873 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1874 delete_dead_insn (reg_equiv_init[i]);
1875 else
1876 {
1877 PUT_CODE (reg_equiv_init[i], NOTE);
1878 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1879 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1880 }
1881 }
1882
1883 /* Use the reload registers where necessary
1884 by generating move instructions to move the must-be-register
1885 values into or out of the reload registers. */
1886
a8efe40d
RK
1887 if (something_needs_reloads || something_needs_elimination
1888 || (caller_save_needed && num_eliminable)
1889 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1890 reload_as_needed (first, global);
1891
2a1f8b6b 1892 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1893 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1894 virtue of being in a pseudo, that pseudo will be marked live
1895 and hence the frame pointer will be known to be live via that
1896 pseudo. */
1897
1898 if (! frame_pointer_needed)
1899 for (i = 0; i < n_basic_blocks; i++)
3ec2ea3e
DE
1900 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1901 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1902 % REGSET_ELT_BITS));
2a1f8b6b 1903
5352b11a
RS
1904 /* Come here (with failure set nonzero) if we can't get enough spill regs
1905 and we decide not to abort about it. */
1906 failed:
1907
a3ec87a8
RS
1908 reload_in_progress = 0;
1909
32131a9c
RK
1910 /* Now eliminate all pseudo regs by modifying them into
1911 their equivalent memory references.
1912 The REG-rtx's for the pseudos are modified in place,
1913 so all insns that used to refer to them now refer to memory.
1914
1915 For a reg that has a reg_equiv_address, all those insns
1916 were changed by reloading so that no insns refer to it any longer;
1917 but the DECL_RTL of a variable decl may refer to it,
1918 and if so this causes the debugging info to mention the variable. */
1919
1920 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1921 {
1922 rtx addr = 0;
ab1fd483 1923 int in_struct = 0;
32131a9c 1924 if (reg_equiv_mem[i])
ab1fd483
RS
1925 {
1926 addr = XEXP (reg_equiv_mem[i], 0);
1927 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1928 }
32131a9c
RK
1929 if (reg_equiv_address[i])
1930 addr = reg_equiv_address[i];
1931 if (addr)
1932 {
1933 if (reg_renumber[i] < 0)
1934 {
1935 rtx reg = regno_reg_rtx[i];
1936 XEXP (reg, 0) = addr;
1937 REG_USERVAR_P (reg) = 0;
ab1fd483 1938 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1939 PUT_CODE (reg, MEM);
1940 }
1941 else if (reg_equiv_mem[i])
1942 XEXP (reg_equiv_mem[i], 0) = addr;
1943 }
1944 }
1945
1946#ifdef PRESERVE_DEATH_INFO_REGNO_P
1947 /* Make a pass over all the insns and remove death notes for things that
1948 are no longer registers or no longer die in the insn (e.g., an input
1949 and output pseudo being tied). */
1950
1951 for (insn = first; insn; insn = NEXT_INSN (insn))
1952 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1953 {
1954 rtx note, next;
1955
1956 for (note = REG_NOTES (insn); note; note = next)
1957 {
1958 next = XEXP (note, 1);
1959 if (REG_NOTE_KIND (note) == REG_DEAD
1960 && (GET_CODE (XEXP (note, 0)) != REG
1961 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1962 remove_note (insn, note);
1963 }
1964 }
1965#endif
1966
1967 /* Indicate that we no longer have known memory locations or constants. */
1968 reg_equiv_constant = 0;
1969 reg_equiv_memory_loc = 0;
5352b11a 1970
c8ab4464
RS
1971 if (scratch_list)
1972 free (scratch_list);
c307c237 1973 scratch_list = 0;
c8ab4464
RS
1974 if (scratch_block)
1975 free (scratch_block);
c307c237
RK
1976 scratch_block = 0;
1977
5352b11a 1978 return failure;
32131a9c
RK
1979}
1980\f
1981/* Nonzero if, after spilling reg REGNO for non-groups,
1982 it will still be possible to find a group if we still need one. */
1983
1984static int
1985possible_group_p (regno, max_groups)
1986 int regno;
1987 int *max_groups;
1988{
1989 int i;
1990 int class = (int) NO_REGS;
1991
1992 for (i = 0; i < (int) N_REG_CLASSES; i++)
1993 if (max_groups[i] > 0)
1994 {
1995 class = i;
1996 break;
1997 }
1998
1999 if (class == (int) NO_REGS)
2000 return 1;
2001
2002 /* Consider each pair of consecutive registers. */
2003 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2004 {
2005 /* Ignore pairs that include reg REGNO. */
2006 if (i == regno || i + 1 == regno)
2007 continue;
2008
2009 /* Ignore pairs that are outside the class that needs the group.
2010 ??? Here we fail to handle the case where two different classes
2011 independently need groups. But this never happens with our
2012 current machine descriptions. */
2013 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2014 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2015 continue;
2016
2017 /* A pair of consecutive regs we can still spill does the trick. */
2018 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2019 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2020 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2021 return 1;
2022
2023 /* A pair of one already spilled and one we can spill does it
2024 provided the one already spilled is not otherwise reserved. */
2025 if (spill_reg_order[i] < 0
2026 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2027 && spill_reg_order[i + 1] >= 0
2028 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2029 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2030 return 1;
2031 if (spill_reg_order[i + 1] < 0
2032 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2033 && spill_reg_order[i] >= 0
2034 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2035 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2036 return 1;
2037 }
2038
2039 return 0;
2040}
2041\f
2042/* Count any groups that can be formed from the registers recently spilled.
2043 This is done class by class, in order of ascending class number. */
2044
2045static void
2046count_possible_groups (group_size, group_mode, max_groups)
546b63fb 2047 int *group_size;
32131a9c 2048 enum machine_mode *group_mode;
546b63fb 2049 int *max_groups;
32131a9c
RK
2050{
2051 int i;
2052 /* Now find all consecutive groups of spilled registers
2053 and mark each group off against the need for such groups.
2054 But don't count them against ordinary need, yet. */
2055
2056 for (i = 0; i < N_REG_CLASSES; i++)
2057 if (group_size[i] > 1)
2058 {
93193ab5 2059 HARD_REG_SET new;
32131a9c
RK
2060 int j;
2061
93193ab5
RK
2062 CLEAR_HARD_REG_SET (new);
2063
32131a9c
RK
2064 /* Make a mask of all the regs that are spill regs in class I. */
2065 for (j = 0; j < n_spills; j++)
2066 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2067 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2068 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2069 spill_regs[j]))
93193ab5
RK
2070 SET_HARD_REG_BIT (new, spill_regs[j]);
2071
32131a9c
RK
2072 /* Find each consecutive group of them. */
2073 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
93193ab5
RK
2074 if (TEST_HARD_REG_BIT (new, j)
2075 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
2076 /* Next line in case group-mode for this class
2077 demands an even-odd pair. */
2078 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2079 {
2080 int k;
2081 for (k = 1; k < group_size[i]; k++)
93193ab5 2082 if (! TEST_HARD_REG_BIT (new, j + k))
32131a9c
RK
2083 break;
2084 if (k == group_size[i])
2085 {
2086 /* We found a group. Mark it off against this class's
2087 need for groups, and against each superclass too. */
2088 register enum reg_class *p;
2089 max_groups[i]--;
2090 p = reg_class_superclasses[i];
2091 while (*p != LIM_REG_CLASSES)
2092 max_groups[(int) *p++]--;
a8fdc208 2093 /* Don't count these registers again. */
32131a9c
RK
2094 for (k = 0; k < group_size[i]; k++)
2095 SET_HARD_REG_BIT (counted_for_groups, j + k);
2096 }
fa52261e
RS
2097 /* Skip to the last reg in this group. When j is incremented
2098 above, it will then point to the first reg of the next
2099 possible group. */
2100 j += k - 1;
32131a9c
RK
2101 }
2102 }
2103
2104}
2105\f
2106/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2107 another mode that needs to be reloaded for the same register class CLASS.
2108 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2109 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2110
2111 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2112 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2113 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2114 causes unnecessary failures on machines requiring alignment of register
2115 groups when the two modes are different sizes, because the larger mode has
2116 more strict alignment rules than the smaller mode. */
2117
2118static int
2119modes_equiv_for_class_p (allocate_mode, other_mode, class)
2120 enum machine_mode allocate_mode, other_mode;
2121 enum reg_class class;
2122{
2123 register int regno;
2124 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2125 {
2126 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2127 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2128 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2129 return 0;
2130 }
2131 return 1;
2132}
2133
5352b11a
RS
2134/* Handle the failure to find a register to spill.
2135 INSN should be one of the insns which needed this particular spill reg. */
2136
2137static void
2138spill_failure (insn)
2139 rtx insn;
2140{
2141 if (asm_noperands (PATTERN (insn)) >= 0)
2142 error_for_asm (insn, "`asm' needs too many reloads");
2143 else
2144 abort ();
2145}
2146
32131a9c
RK
2147/* Add a new register to the tables of available spill-registers
2148 (as well as spilling all pseudos allocated to the register).
2149 I is the index of this register in potential_reload_regs.
2150 CLASS is the regclass whose need is being satisfied.
2151 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2152 so that this register can count off against them.
2153 MAX_NONGROUPS is 0 if this register is part of a group.
2154 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2155
2156static int
2157new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2158 int i;
2159 int class;
2160 int *max_needs;
2161 int *max_nongroups;
2162 int global;
2163 FILE *dumpfile;
2164{
2165 register enum reg_class *p;
2166 int val;
2167 int regno = potential_reload_regs[i];
2168
2169 if (i >= FIRST_PSEUDO_REGISTER)
2170 abort (); /* Caller failed to find any register. */
2171
2172 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2173 fatal ("fixed or forbidden register was spilled.\n\
56f58d3a
RK
2174This may be due to a compiler bug or to impossible asm\n\
2175statements or clauses.");
32131a9c
RK
2176
2177 /* Make reg REGNO an additional reload reg. */
2178
2179 potential_reload_regs[i] = -1;
2180 spill_regs[n_spills] = regno;
2181 spill_reg_order[regno] = n_spills;
2182 if (dumpfile)
2183 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2184
2185 /* Clear off the needs we just satisfied. */
2186
2187 max_needs[class]--;
2188 p = reg_class_superclasses[class];
2189 while (*p != LIM_REG_CLASSES)
2190 max_needs[(int) *p++]--;
2191
2192 if (max_nongroups && max_nongroups[class] > 0)
2193 {
2194 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2195 max_nongroups[class]--;
2196 p = reg_class_superclasses[class];
2197 while (*p != LIM_REG_CLASSES)
2198 max_nongroups[(int) *p++]--;
2199 }
2200
2201 /* Spill every pseudo reg that was allocated to this reg
2202 or to something that overlaps this reg. */
2203
2204 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2205
2206 /* If there are some registers still to eliminate and this register
2207 wasn't ever used before, additional stack space may have to be
2208 allocated to store this register. Thus, we may have changed the offset
2209 between the stack and frame pointers, so mark that something has changed.
2210 (If new pseudos were spilled, thus requiring more space, VAL would have
2211 been set non-zero by the call to spill_hard_reg above since additional
2212 reloads may be needed in that case.
2213
2214 One might think that we need only set VAL to 1 if this is a call-used
2215 register. However, the set of registers that must be saved by the
2216 prologue is not identical to the call-used set. For example, the
2217 register used by the call insn for the return PC is a call-used register,
2218 but must be saved by the prologue. */
2219 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2220 val = 1;
2221
2222 regs_ever_live[spill_regs[n_spills]] = 1;
2223 n_spills++;
2224
2225 return val;
2226}
2227\f
2228/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2229 data that is dead in INSN. */
2230
2231static void
2232delete_dead_insn (insn)
2233 rtx insn;
2234{
2235 rtx prev = prev_real_insn (insn);
2236 rtx prev_dest;
2237
2238 /* If the previous insn sets a register that dies in our insn, delete it
2239 too. */
2240 if (prev && GET_CODE (PATTERN (prev)) == SET
2241 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2242 && reg_mentioned_p (prev_dest, PATTERN (insn))
2243 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2244 delete_dead_insn (prev);
2245
2246 PUT_CODE (insn, NOTE);
2247 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2248 NOTE_SOURCE_FILE (insn) = 0;
2249}
2250
2251/* Modify the home of pseudo-reg I.
2252 The new home is present in reg_renumber[I].
2253
2254 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2255 or it may be -1, meaning there is none or it is not relevant.
2256 This is used so that all pseudos spilled from a given hard reg
2257 can share one stack slot. */
2258
2259static void
2260alter_reg (i, from_reg)
2261 register int i;
2262 int from_reg;
2263{
2264 /* When outputting an inline function, this can happen
2265 for a reg that isn't actually used. */
2266 if (regno_reg_rtx[i] == 0)
2267 return;
2268
2269 /* If the reg got changed to a MEM at rtl-generation time,
2270 ignore it. */
2271 if (GET_CODE (regno_reg_rtx[i]) != REG)
2272 return;
2273
2274 /* Modify the reg-rtx to contain the new hard reg
2275 number or else to contain its pseudo reg number. */
2276 REGNO (regno_reg_rtx[i])
2277 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2278
2279 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2280 allocate a stack slot for it. */
2281
2282 if (reg_renumber[i] < 0
2283 && reg_n_refs[i] > 0
2284 && reg_equiv_constant[i] == 0
2285 && reg_equiv_memory_loc[i] == 0)
2286 {
2287 register rtx x;
2288 int inherent_size = PSEUDO_REGNO_BYTES (i);
2289 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2290 int adjust = 0;
2291
2292 /* Each pseudo reg has an inherent size which comes from its own mode,
2293 and a total size which provides room for paradoxical subregs
2294 which refer to the pseudo reg in wider modes.
2295
2296 We can use a slot already allocated if it provides both
2297 enough inherent space and enough total space.
2298 Otherwise, we allocate a new slot, making sure that it has no less
2299 inherent space, and no less total space, then the previous slot. */
2300 if (from_reg == -1)
2301 {
2302 /* No known place to spill from => no slot to reuse. */
2303 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2304#if BYTES_BIG_ENDIAN
2305 /* Cancel the big-endian correction done in assign_stack_local.
2306 Get the address of the beginning of the slot.
2307 This is so we can do a big-endian correction unconditionally
2308 below. */
2309 adjust = inherent_size - total_size;
2310#endif
2311 }
2312 /* Reuse a stack slot if possible. */
2313 else if (spill_stack_slot[from_reg] != 0
2314 && spill_stack_slot_width[from_reg] >= total_size
2315 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2316 >= inherent_size))
2317 x = spill_stack_slot[from_reg];
2318 /* Allocate a bigger slot. */
2319 else
2320 {
2321 /* Compute maximum size needed, both for inherent size
2322 and for total size. */
2323 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2324 if (spill_stack_slot[from_reg])
2325 {
2326 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2327 > inherent_size)
2328 mode = GET_MODE (spill_stack_slot[from_reg]);
2329 if (spill_stack_slot_width[from_reg] > total_size)
2330 total_size = spill_stack_slot_width[from_reg];
2331 }
2332 /* Make a slot with that size. */
2333 x = assign_stack_local (mode, total_size, -1);
2334#if BYTES_BIG_ENDIAN
2335 /* Cancel the big-endian correction done in assign_stack_local.
2336 Get the address of the beginning of the slot.
2337 This is so we can do a big-endian correction unconditionally
2338 below. */
2339 adjust = GET_MODE_SIZE (mode) - total_size;
2340#endif
2341 spill_stack_slot[from_reg] = x;
2342 spill_stack_slot_width[from_reg] = total_size;
2343 }
2344
2345#if BYTES_BIG_ENDIAN
2346 /* On a big endian machine, the "address" of the slot
2347 is the address of the low part that fits its inherent mode. */
2348 if (inherent_size < total_size)
2349 adjust += (total_size - inherent_size);
2350#endif /* BYTES_BIG_ENDIAN */
2351
2352 /* If we have any adjustment to make, or if the stack slot is the
2353 wrong mode, make a new stack slot. */
2354 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2355 {
2356 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2357 plus_constant (XEXP (x, 0), adjust));
2358 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2359 }
2360
2361 /* Save the stack slot for later. */
2362 reg_equiv_memory_loc[i] = x;
2363 }
2364}
2365
2366/* Mark the slots in regs_ever_live for the hard regs
2367 used by pseudo-reg number REGNO. */
2368
2369void
2370mark_home_live (regno)
2371 int regno;
2372{
2373 register int i, lim;
2374 i = reg_renumber[regno];
2375 if (i < 0)
2376 return;
2377 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2378 while (i < lim)
2379 regs_ever_live[i++] = 1;
2380}
c307c237
RK
2381
2382/* Mark the registers used in SCRATCH as being live. */
2383
2384static void
2385mark_scratch_live (scratch)
2386 rtx scratch;
2387{
2388 register int i;
2389 int regno = REGNO (scratch);
2390 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2391
2392 for (i = regno; i < lim; i++)
2393 regs_ever_live[i] = 1;
2394}
32131a9c
RK
2395\f
2396/* This function handles the tracking of elimination offsets around branches.
2397
2398 X is a piece of RTL being scanned.
2399
2400 INSN is the insn that it came from, if any.
2401
2402 INITIAL_P is non-zero if we are to set the offset to be the initial
2403 offset and zero if we are setting the offset of the label to be the
2404 current offset. */
2405
2406static void
2407set_label_offsets (x, insn, initial_p)
2408 rtx x;
2409 rtx insn;
2410 int initial_p;
2411{
2412 enum rtx_code code = GET_CODE (x);
2413 rtx tem;
2414 int i;
2415 struct elim_table *p;
2416
2417 switch (code)
2418 {
2419 case LABEL_REF:
8be386d9
RS
2420 if (LABEL_REF_NONLOCAL_P (x))
2421 return;
2422
32131a9c
RK
2423 x = XEXP (x, 0);
2424
2425 /* ... fall through ... */
2426
2427 case CODE_LABEL:
2428 /* If we know nothing about this label, set the desired offsets. Note
2429 that this sets the offset at a label to be the offset before a label
2430 if we don't know anything about the label. This is not correct for
2431 the label after a BARRIER, but is the best guess we can make. If
2432 we guessed wrong, we will suppress an elimination that might have
2433 been possible had we been able to guess correctly. */
2434
2435 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2436 {
2437 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2438 offsets_at[CODE_LABEL_NUMBER (x)][i]
2439 = (initial_p ? reg_eliminate[i].initial_offset
2440 : reg_eliminate[i].offset);
2441 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2442 }
2443
2444 /* Otherwise, if this is the definition of a label and it is
d45cf215 2445 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2446 that label. */
2447
2448 else if (x == insn
2449 && (tem = prev_nonnote_insn (insn)) != 0
2450 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2451 {
2452 num_not_at_initial_offset = 0;
2453 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2454 {
2455 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2456 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2457 if (reg_eliminate[i].can_eliminate
2458 && (reg_eliminate[i].offset
2459 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2460 num_not_at_initial_offset++;
2461 }
2462 }
32131a9c
RK
2463
2464 else
2465 /* If neither of the above cases is true, compare each offset
2466 with those previously recorded and suppress any eliminations
2467 where the offsets disagree. */
a8fdc208 2468
32131a9c
RK
2469 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2470 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2471 != (initial_p ? reg_eliminate[i].initial_offset
2472 : reg_eliminate[i].offset))
2473 reg_eliminate[i].can_eliminate = 0;
2474
2475 return;
2476
2477 case JUMP_INSN:
2478 set_label_offsets (PATTERN (insn), insn, initial_p);
2479
2480 /* ... fall through ... */
2481
2482 case INSN:
2483 case CALL_INSN:
2484 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2485 and hence must have all eliminations at their initial offsets. */
2486 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2487 if (REG_NOTE_KIND (tem) == REG_LABEL)
2488 set_label_offsets (XEXP (tem, 0), insn, 1);
2489 return;
2490
2491 case ADDR_VEC:
2492 case ADDR_DIFF_VEC:
2493 /* Each of the labels in the address vector must be at their initial
2494 offsets. We want the first first for ADDR_VEC and the second
2495 field for ADDR_DIFF_VEC. */
2496
2497 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2498 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2499 insn, initial_p);
2500 return;
2501
2502 case SET:
2503 /* We only care about setting PC. If the source is not RETURN,
2504 IF_THEN_ELSE, or a label, disable any eliminations not at
2505 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2506 isn't one of those possibilities. For branches to a label,
2507 call ourselves recursively.
2508
2509 Note that this can disable elimination unnecessarily when we have
2510 a non-local goto since it will look like a non-constant jump to
2511 someplace in the current function. This isn't a significant
2512 problem since such jumps will normally be when all elimination
2513 pairs are back to their initial offsets. */
2514
2515 if (SET_DEST (x) != pc_rtx)
2516 return;
2517
2518 switch (GET_CODE (SET_SRC (x)))
2519 {
2520 case PC:
2521 case RETURN:
2522 return;
2523
2524 case LABEL_REF:
2525 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2526 return;
2527
2528 case IF_THEN_ELSE:
2529 tem = XEXP (SET_SRC (x), 1);
2530 if (GET_CODE (tem) == LABEL_REF)
2531 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2532 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2533 break;
2534
2535 tem = XEXP (SET_SRC (x), 2);
2536 if (GET_CODE (tem) == LABEL_REF)
2537 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2538 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2539 break;
2540 return;
2541 }
2542
2543 /* If we reach here, all eliminations must be at their initial
2544 offset because we are doing a jump to a variable address. */
2545 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2546 if (p->offset != p->initial_offset)
2547 p->can_eliminate = 0;
2548 }
2549}
2550\f
2551/* Used for communication between the next two function to properly share
2552 the vector for an ASM_OPERANDS. */
2553
2554static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2555
a8fdc208 2556/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2557 replacement (such as sp), plus an offset.
2558
2559 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2560 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2561 MEM, we are allowed to replace a sum of a register and the constant zero
2562 with the register, which we cannot do outside a MEM. In addition, we need
2563 to record the fact that a register is referenced outside a MEM.
2564
ff32812a 2565 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2566 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2567 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2568 that the REG is being modified.
2569
ff32812a
RS
2570 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2571 That's used when we eliminate in expressions stored in notes.
2572 This means, do not set ref_outside_mem even if the reference
2573 is outside of MEMs.
2574
32131a9c
RK
2575 If we see a modification to a register we know about, take the
2576 appropriate action (see case SET, below).
2577
2578 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2579 replacements done assuming all offsets are at their initial values. If
2580 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2581 encounter, return the actual location so that find_reloads will do
2582 the proper thing. */
2583
2584rtx
2585eliminate_regs (x, mem_mode, insn)
2586 rtx x;
2587 enum machine_mode mem_mode;
2588 rtx insn;
2589{
2590 enum rtx_code code = GET_CODE (x);
2591 struct elim_table *ep;
2592 int regno;
2593 rtx new;
2594 int i, j;
2595 char *fmt;
2596 int copied = 0;
2597
2598 switch (code)
2599 {
2600 case CONST_INT:
2601 case CONST_DOUBLE:
2602 case CONST:
2603 case SYMBOL_REF:
2604 case CODE_LABEL:
2605 case PC:
2606 case CC0:
2607 case ASM_INPUT:
2608 case ADDR_VEC:
2609 case ADDR_DIFF_VEC:
2610 case RETURN:
2611 return x;
2612
2613 case REG:
2614 regno = REGNO (x);
2615
2616 /* First handle the case where we encounter a bare register that
2617 is eliminable. Replace it with a PLUS. */
2618 if (regno < FIRST_PSEUDO_REGISTER)
2619 {
2620 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2621 ep++)
2622 if (ep->from_rtx == x && ep->can_eliminate)
2623 {
ff32812a
RS
2624 if (! mem_mode
2625 /* Refs inside notes don't count for this purpose. */
fe089a90 2626 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2627 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2628 ep->ref_outside_mem = 1;
2629 return plus_constant (ep->to_rtx, ep->previous_offset);
2630 }
2631
2632 }
2633 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2634 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2635 {
2636 /* In this case, find_reloads would attempt to either use an
2637 incorrect address (if something is not at its initial offset)
2638 or substitute an replaced address into an insn (which loses
2639 if the offset is changed by some later action). So we simply
2640 return the replaced stack slot (assuming it is changed by
2641 elimination) and ignore the fact that this is actually a
2642 reference to the pseudo. Ensure we make a copy of the
2643 address in case it is shared. */
fb3821f7 2644 new = eliminate_regs (reg_equiv_memory_loc[regno],
e5687447 2645 mem_mode, insn);
32131a9c 2646 if (new != reg_equiv_memory_loc[regno])
208dffa5
RS
2647 {
2648 cannot_omit_stores[regno] = 1;
2649 return copy_rtx (new);
2650 }
32131a9c
RK
2651 }
2652 return x;
2653
2654 case PLUS:
2655 /* If this is the sum of an eliminable register and a constant, rework
2656 the sum. */
2657 if (GET_CODE (XEXP (x, 0)) == REG
2658 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2659 && CONSTANT_P (XEXP (x, 1)))
2660 {
2661 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2662 ep++)
2663 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2664 {
e5687447
JW
2665 if (! mem_mode
2666 /* Refs inside notes don't count for this purpose. */
2667 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2668 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2669 ep->ref_outside_mem = 1;
2670
2671 /* The only time we want to replace a PLUS with a REG (this
2672 occurs when the constant operand of the PLUS is the negative
2673 of the offset) is when we are inside a MEM. We won't want
2674 to do so at other times because that would change the
2675 structure of the insn in a way that reload can't handle.
2676 We special-case the commonest situation in
2677 eliminate_regs_in_insn, so just replace a PLUS with a
2678 PLUS here, unless inside a MEM. */
a23b64d5 2679 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2680 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2681 return ep->to_rtx;
2682 else
2683 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2684 plus_constant (XEXP (x, 1),
2685 ep->previous_offset));
2686 }
2687
2688 /* If the register is not eliminable, we are done since the other
2689 operand is a constant. */
2690 return x;
2691 }
2692
2693 /* If this is part of an address, we want to bring any constant to the
2694 outermost PLUS. We will do this by doing register replacement in
2695 our operands and seeing if a constant shows up in one of them.
2696
2697 We assume here this is part of an address (or a "load address" insn)
2698 since an eliminable register is not likely to appear in any other
2699 context.
2700
2701 If we have (plus (eliminable) (reg)), we want to produce
2702 (plus (plus (replacement) (reg) (const))). If this was part of a
2703 normal add insn, (plus (replacement) (reg)) will be pushed as a
2704 reload. This is the desired action. */
2705
2706 {
e5687447
JW
2707 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2708 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2709
2710 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2711 {
2712 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2713 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2714 we must replace the constant here since it may no longer
2715 be in the position of any operand. */
2716 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2717 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2718 && reg_renumber[REGNO (new1)] < 0
2719 && reg_equiv_constant != 0
2720 && reg_equiv_constant[REGNO (new1)] != 0)
2721 new1 = reg_equiv_constant[REGNO (new1)];
2722 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2723 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2724 && reg_renumber[REGNO (new0)] < 0
2725 && reg_equiv_constant[REGNO (new0)] != 0)
2726 new0 = reg_equiv_constant[REGNO (new0)];
2727
2728 new = form_sum (new0, new1);
2729
2730 /* As above, if we are not inside a MEM we do not want to
2731 turn a PLUS into something else. We might try to do so here
2732 for an addition of 0 if we aren't optimizing. */
2733 if (! mem_mode && GET_CODE (new) != PLUS)
2734 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2735 else
2736 return new;
2737 }
2738 }
2739 return x;
2740
981c7390
RK
2741 case MULT:
2742 /* If this is the product of an eliminable register and a
2743 constant, apply the distribute law and move the constant out
2744 so that we have (plus (mult ..) ..). This is needed in order
2745 to keep load-address insns valid. This case is pathalogical.
2746 We ignore the possibility of overflow here. */
2747 if (GET_CODE (XEXP (x, 0)) == REG
2748 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2749 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2750 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2751 ep++)
2752 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2753 {
2754 if (! mem_mode
2755 /* Refs inside notes don't count for this purpose. */
2756 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2757 || GET_CODE (insn) == INSN_LIST)))
2758 ep->ref_outside_mem = 1;
2759
2760 return
2761 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2762 ep->previous_offset * INTVAL (XEXP (x, 1)));
2763 }
32131a9c
RK
2764
2765 /* ... fall through ... */
2766
32131a9c
RK
2767 case CALL:
2768 case COMPARE:
2769 case MINUS:
32131a9c
RK
2770 case DIV: case UDIV:
2771 case MOD: case UMOD:
2772 case AND: case IOR: case XOR:
45620ed4
RK
2773 case ROTATERT: case ROTATE:
2774 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
2775 case NE: case EQ:
2776 case GE: case GT: case GEU: case GTU:
2777 case LE: case LT: case LEU: case LTU:
2778 {
e5687447 2779 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2780 rtx new1
e5687447 2781 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2782
2783 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2784 return gen_rtx (code, GET_MODE (x), new0, new1);
2785 }
2786 return x;
2787
981c7390
RK
2788 case EXPR_LIST:
2789 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2790 if (XEXP (x, 0))
2791 {
2792 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2793 if (new != XEXP (x, 0))
2794 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2795 }
2796
2797 /* ... fall through ... */
2798
2799 case INSN_LIST:
2800 /* Now do eliminations in the rest of the chain. If this was
2801 an EXPR_LIST, this might result in allocating more memory than is
2802 strictly needed, but it simplifies the code. */
2803 if (XEXP (x, 1))
2804 {
2805 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2806 if (new != XEXP (x, 1))
2807 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2808 }
2809 return x;
2810
32131a9c
RK
2811 case PRE_INC:
2812 case POST_INC:
2813 case PRE_DEC:
2814 case POST_DEC:
2815 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2816 if (ep->to_rtx == XEXP (x, 0))
2817 {
4c05b187
RK
2818 int size = GET_MODE_SIZE (mem_mode);
2819
2820 /* If more bytes than MEM_MODE are pushed, account for them. */
2821#ifdef PUSH_ROUNDING
2822 if (ep->to_rtx == stack_pointer_rtx)
2823 size = PUSH_ROUNDING (size);
2824#endif
32131a9c 2825 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2826 ep->offset += size;
32131a9c 2827 else
4c05b187 2828 ep->offset -= size;
32131a9c
RK
2829 }
2830
2831 /* Fall through to generic unary operation case. */
2832 case USE:
2833 case STRICT_LOW_PART:
2834 case NEG: case NOT:
2835 case SIGN_EXTEND: case ZERO_EXTEND:
2836 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2837 case FLOAT: case FIX:
2838 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2839 case ABS:
2840 case SQRT:
2841 case FFS:
e5687447 2842 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2843 if (new != XEXP (x, 0))
2844 return gen_rtx (code, GET_MODE (x), new);
2845 return x;
2846
2847 case SUBREG:
2848 /* Similar to above processing, but preserve SUBREG_WORD.
2849 Convert (subreg (mem)) to (mem) if not paradoxical.
2850 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2851 pseudo didn't get a hard reg, we must replace this with the
2852 eliminated version of the memory location because push_reloads
2853 may do the replacement in certain circumstances. */
2854 if (GET_CODE (SUBREG_REG (x)) == REG
2855 && (GET_MODE_SIZE (GET_MODE (x))
2856 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2857 && reg_equiv_memory_loc != 0
2858 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2859 {
2860 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
e5687447 2861 mem_mode, insn);
32131a9c
RK
2862
2863 /* If we didn't change anything, we must retain the pseudo. */
2864 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2865 new = XEXP (x, 0);
2866 else
2867 /* Otherwise, ensure NEW isn't shared in case we have to reload
2868 it. */
2869 new = copy_rtx (new);
2870 }
2871 else
e5687447 2872 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
2873
2874 if (new != XEXP (x, 0))
2875 {
2876 if (GET_CODE (new) == MEM
2877 && (GET_MODE_SIZE (GET_MODE (x))
a3b75c07 2878 <= GET_MODE_SIZE (GET_MODE (new)))
e90d3cbb 2879#ifdef LOAD_EXTEND_OP
a3b75c07
RS
2880 /* On these machines we will be reloading what is
2881 inside the SUBREG if it originally was a pseudo and
2882 the inner and outer modes are both a word or
2883 smaller. So leave the SUBREG then. */
2884 && ! (GET_CODE (SUBREG_REG (x)) == REG
2885 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2886 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2887#endif
2888 )
32131a9c
RK
2889 {
2890 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2891 enum machine_mode mode = GET_MODE (x);
2892
2893#if BYTES_BIG_ENDIAN
2894 offset += (MIN (UNITS_PER_WORD,
2895 GET_MODE_SIZE (GET_MODE (new)))
2896 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2897#endif
2898
2899 PUT_MODE (new, mode);
2900 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2901 return new;
2902 }
2903 else
2904 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2905 }
2906
2907 return x;
2908
2909 case CLOBBER:
2910 /* If clobbering a register that is the replacement register for an
d45cf215 2911 elimination we still think can be performed, note that it cannot
32131a9c
RK
2912 be performed. Otherwise, we need not be concerned about it. */
2913 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2914 if (ep->to_rtx == XEXP (x, 0))
2915 ep->can_eliminate = 0;
2916
e5687447 2917 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c
JVA
2918 if (new != XEXP (x, 0))
2919 return gen_rtx (code, GET_MODE (x), new);
32131a9c
RK
2920 return x;
2921
2922 case ASM_OPERANDS:
2923 {
2924 rtx *temp_vec;
2925 /* Properly handle sharing input and constraint vectors. */
2926 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2927 {
2928 /* When we come to a new vector not seen before,
2929 scan all its elements; keep the old vector if none
2930 of them changes; otherwise, make a copy. */
2931 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2932 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2933 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2934 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
e5687447 2935 mem_mode, insn);
32131a9c
RK
2936
2937 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2938 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2939 break;
2940
2941 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2942 new_asm_operands_vec = old_asm_operands_vec;
2943 else
2944 new_asm_operands_vec
2945 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2946 }
2947
2948 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2949 if (new_asm_operands_vec == old_asm_operands_vec)
2950 return x;
2951
2952 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2953 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2954 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2955 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2956 ASM_OPERANDS_SOURCE_FILE (x),
2957 ASM_OPERANDS_SOURCE_LINE (x));
2958 new->volatil = x->volatil;
2959 return new;
2960 }
2961
2962 case SET:
2963 /* Check for setting a register that we know about. */
2964 if (GET_CODE (SET_DEST (x)) == REG)
2965 {
2966 /* See if this is setting the replacement register for an
a8fdc208 2967 elimination.
32131a9c 2968
3ec2ea3e
DE
2969 If DEST is the hard frame pointer, we do nothing because we
2970 assume that all assignments to the frame pointer are for
2971 non-local gotos and are being done at a time when they are valid
2972 and do not disturb anything else. Some machines want to
2973 eliminate a fake argument pointer (or even a fake frame pointer)
2974 with either the real frame or the stack pointer. Assignments to
2975 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
2976
2977 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2978 ep++)
2979 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 2980 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 2981 {
6dc42e49 2982 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2983 this elimination can't be done. */
2984 rtx src = SET_SRC (x);
2985
2986 if (GET_CODE (src) == PLUS
2987 && XEXP (src, 0) == SET_DEST (x)
2988 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2989 ep->offset -= INTVAL (XEXP (src, 1));
2990 else
2991 ep->can_eliminate = 0;
2992 }
2993
2994 /* Now check to see we are assigning to a register that can be
2995 eliminated. If so, it must be as part of a PARALLEL, since we
2996 will not have been called if this is a single SET. So indicate
2997 that we can no longer eliminate this reg. */
2998 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2999 ep++)
3000 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3001 ep->can_eliminate = 0;
3002 }
3003
3004 /* Now avoid the loop below in this common case. */
3005 {
e5687447
JW
3006 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3007 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3008
ff32812a 3009 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3010 write a CLOBBER insn. */
3011 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3012 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3013 && GET_CODE (insn) != INSN_LIST)
32131a9c
RK
3014 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3015
3016 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3017 return gen_rtx (SET, VOIDmode, new0, new1);
3018 }
3019
3020 return x;
3021
3022 case MEM:
3023 /* Our only special processing is to pass the mode of the MEM to our
3024 recursive call and copy the flags. While we are here, handle this
3025 case more efficiently. */
e5687447 3026 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3027 if (new != XEXP (x, 0))
3028 {
3029 new = gen_rtx (MEM, GET_MODE (x), new);
3030 new->volatil = x->volatil;
3031 new->unchanging = x->unchanging;
3032 new->in_struct = x->in_struct;
3033 return new;
3034 }
3035 else
3036 return x;
3037 }
3038
3039 /* Process each of our operands recursively. If any have changed, make a
3040 copy of the rtx. */
3041 fmt = GET_RTX_FORMAT (code);
3042 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3043 {
3044 if (*fmt == 'e')
3045 {
e5687447 3046 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3047 if (new != XEXP (x, i) && ! copied)
3048 {
3049 rtx new_x = rtx_alloc (code);
3050 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3051 + (sizeof (new_x->fld[0])
3052 * GET_RTX_LENGTH (code))));
3053 x = new_x;
3054 copied = 1;
3055 }
3056 XEXP (x, i) = new;
3057 }
3058 else if (*fmt == 'E')
3059 {
3060 int copied_vec = 0;
3061 for (j = 0; j < XVECLEN (x, i); j++)
3062 {
3063 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3064 if (new != XVECEXP (x, i, j) && ! copied_vec)
3065 {
3066 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3067 &XVECEXP (x, i, 0));
3068 if (! copied)
3069 {
3070 rtx new_x = rtx_alloc (code);
3071 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3072 + (sizeof (new_x->fld[0])
3073 * GET_RTX_LENGTH (code))));
3074 x = new_x;
3075 copied = 1;
3076 }
3077 XVEC (x, i) = new_v;
3078 copied_vec = 1;
3079 }
3080 XVECEXP (x, i, j) = new;
3081 }
3082 }
3083 }
3084
3085 return x;
3086}
3087\f
3088/* Scan INSN and eliminate all eliminable registers in it.
3089
3090 If REPLACE is nonzero, do the replacement destructively. Also
3091 delete the insn as dead it if it is setting an eliminable register.
3092
3093 If REPLACE is zero, do all our allocations in reload_obstack.
3094
3095 If no eliminations were done and this insn doesn't require any elimination
3096 processing (these are not identical conditions: it might be updating sp,
3097 but not referencing fp; this needs to be seen during reload_as_needed so
3098 that the offset between fp and sp can be taken into consideration), zero
3099 is returned. Otherwise, 1 is returned. */
3100
3101static int
3102eliminate_regs_in_insn (insn, replace)
3103 rtx insn;
3104 int replace;
3105{
3106 rtx old_body = PATTERN (insn);
3107 rtx new_body;
3108 int val = 0;
3109 struct elim_table *ep;
3110
3111 if (! replace)
3112 push_obstacks (&reload_obstack, &reload_obstack);
3113
3114 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3115 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3116 {
3117 /* Check for setting an eliminable register. */
3118 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3119 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3120 {
3121 /* In this case this insn isn't serving a useful purpose. We
3122 will delete it in reload_as_needed once we know that this
3123 elimination is, in fact, being done.
3124
3125 If REPLACE isn't set, we can't delete this insn, but neededn't
3126 process it since it won't be used unless something changes. */
3127 if (replace)
3128 delete_dead_insn (insn);
3129 val = 1;
3130 goto done;
3131 }
3132
3133 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3134 in the insn is the negative of the offset in FROM. Substitute
3135 (set (reg) (reg to)) for the insn and change its code.
3136
3137 We have to do this here, rather than in eliminate_regs, do that we can
3138 change the insn code. */
3139
3140 if (GET_CODE (SET_SRC (old_body)) == PLUS
3141 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3142 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3143 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3144 ep++)
3145 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
922d9d40 3146 && ep->can_eliminate)
32131a9c 3147 {
922d9d40
RK
3148 /* We must stop at the first elimination that will be used.
3149 If this one would replace the PLUS with a REG, do it
3150 now. Otherwise, quit the loop and let eliminate_regs
3151 do its normal replacement. */
3152 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3153 {
3154 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3155 SET_DEST (old_body), ep->to_rtx);
3156 INSN_CODE (insn) = -1;
3157 val = 1;
3158 goto done;
3159 }
3160
3161 break;
32131a9c
RK
3162 }
3163 }
3164
3165 old_asm_operands_vec = 0;
3166
3167 /* Replace the body of this insn with a substituted form. If we changed
05b4c365 3168 something, return non-zero.
32131a9c
RK
3169
3170 If we are replacing a body that was a (set X (plus Y Z)), try to
3171 re-recognize the insn. We do this in case we had a simple addition
3172 but now can do this as a load-address. This saves an insn in this
3173 common case. */
3174
fb3821f7 3175 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3176 if (new_body != old_body)
3177 {
7c791b13
RK
3178 /* If we aren't replacing things permanently and we changed something,
3179 make another copy to ensure that all the RTL is new. Otherwise
3180 things can go wrong if find_reload swaps commutative operands
3181 and one is inside RTL that has been copied while the other is not. */
3182
4d411872
RS
3183 /* Don't copy an asm_operands because (1) there's no need and (2)
3184 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3185 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3186 new_body = copy_rtx (new_body);
3187
4a5d0fb5 3188 /* If we had a move insn but now we don't, rerecognize it. */
0ba846c7
RS
3189 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3190 && (GET_CODE (new_body) != SET
3191 || GET_CODE (SET_SRC (new_body)) != REG))
51b8cba1
JL
3192 /* If this was a load from or store to memory, compare
3193 the MEM in recog_operand to the one in the insn. If they
3194 are not equal, then rerecognize the insn. */
3195 || (GET_CODE (old_body) == SET
3196 && ((GET_CODE (SET_SRC (old_body)) == MEM
3197 && SET_SRC (old_body) != recog_operand[1])
3198 || (GET_CODE (SET_DEST (old_body)) == MEM
3199 && SET_DEST (old_body) != recog_operand[0])))
0ba846c7
RS
3200 /* If this was an add insn before, rerecognize. */
3201 ||
3202 (GET_CODE (old_body) == SET
3203 && GET_CODE (SET_SRC (old_body)) == PLUS))
4a5d0fb5
RS
3204 {
3205 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3206 /* If recognition fails, store the new body anyway.
3207 It's normal to have recognition failures here
3208 due to bizarre memory addresses; reloading will fix them. */
3209 PATTERN (insn) = new_body;
4a5d0fb5 3210 }
0ba846c7 3211 else
32131a9c
RK
3212 PATTERN (insn) = new_body;
3213
32131a9c
RK
3214 val = 1;
3215 }
a8fdc208 3216
32131a9c
RK
3217 /* Loop through all elimination pairs. See if any have changed and
3218 recalculate the number not at initial offset.
3219
a8efe40d
RK
3220 Compute the maximum offset (minimum offset if the stack does not
3221 grow downward) for each elimination pair.
3222
32131a9c
RK
3223 We also detect a cases where register elimination cannot be done,
3224 namely, if a register would be both changed and referenced outside a MEM
3225 in the resulting insn since such an insn is often undefined and, even if
3226 not, we cannot know what meaning will be given to it. Note that it is
3227 valid to have a register used in an address in an insn that changes it
3228 (presumably with a pre- or post-increment or decrement).
3229
3230 If anything changes, return nonzero. */
3231
3232 num_not_at_initial_offset = 0;
3233 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3234 {
3235 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3236 ep->can_eliminate = 0;
3237
3238 ep->ref_outside_mem = 0;
3239
3240 if (ep->previous_offset != ep->offset)
3241 val = 1;
3242
3243 ep->previous_offset = ep->offset;
3244 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3245 num_not_at_initial_offset++;
a8efe40d
RK
3246
3247#ifdef STACK_GROWS_DOWNWARD
3248 ep->max_offset = MAX (ep->max_offset, ep->offset);
3249#else
3250 ep->max_offset = MIN (ep->max_offset, ep->offset);
3251#endif
32131a9c
RK
3252 }
3253
3254 done:
05b4c365
RK
3255 /* If we changed something, perform elmination in REG_NOTES. This is
3256 needed even when REPLACE is zero because a REG_DEAD note might refer
3257 to a register that we eliminate and could cause a different number
3258 of spill registers to be needed in the final reload pass than in
3259 the pre-passes. */
20748cab 3260 if (val && REG_NOTES (insn) != 0)
ff32812a 3261 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3262
32131a9c
RK
3263 if (! replace)
3264 pop_obstacks ();
3265
3266 return val;
3267}
3268
3269/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3270 replacement we currently believe is valid, mark it as not eliminable if X
3271 modifies DEST in any way other than by adding a constant integer to it.
3272
3273 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3274 all assignments to the hard frame pointer are nonlocal gotos and are being
3275 done at a time when they are valid and do not disturb anything else.
32131a9c 3276 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3277 frame or stack pointer. Assignments to the hard frame pointer must not
3278 prevent this elimination.
32131a9c
RK
3279
3280 Called via note_stores from reload before starting its passes to scan
3281 the insns of the function. */
3282
3283static void
3284mark_not_eliminable (dest, x)
3285 rtx dest;
3286 rtx x;
3287{
3288 register int i;
3289
3290 /* A SUBREG of a hard register here is just changing its mode. We should
3291 not see a SUBREG of an eliminable hard register, but check just in
3292 case. */
3293 if (GET_CODE (dest) == SUBREG)
3294 dest = SUBREG_REG (dest);
3295
3ec2ea3e 3296 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3297 return;
3298
3299 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3300 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3301 && (GET_CODE (x) != SET
3302 || GET_CODE (SET_SRC (x)) != PLUS
3303 || XEXP (SET_SRC (x), 0) != dest
3304 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3305 {
3306 reg_eliminate[i].can_eliminate_previous
3307 = reg_eliminate[i].can_eliminate = 0;
3308 num_eliminable--;
3309 }
3310}
3311\f
3312/* Kick all pseudos out of hard register REGNO.
3313 If GLOBAL is nonzero, try to find someplace else to put them.
3314 If DUMPFILE is nonzero, log actions taken on that file.
3315
3316 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3317 because we found we can't eliminate some register. In the case, no pseudos
3318 are allowed to be in the register, even if they are only in a block that
3319 doesn't require spill registers, unlike the case when we are spilling this
3320 hard reg to produce another spill register.
3321
3322 Return nonzero if any pseudos needed to be kicked out. */
3323
3324static int
3325spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3326 register int regno;
3327 int global;
3328 FILE *dumpfile;
3329 int cant_eliminate;
3330{
c307c237 3331 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3332 int something_changed = 0;
3333 register int i;
3334
3335 SET_HARD_REG_BIT (forbidden_regs, regno);
3336
3337 /* Spill every pseudo reg that was allocated to this reg
3338 or to something that overlaps this reg. */
3339
3340 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3341 if (reg_renumber[i] >= 0
3342 && reg_renumber[i] <= regno
a8fdc208 3343 && (reg_renumber[i]
32131a9c
RK
3344 + HARD_REGNO_NREGS (reg_renumber[i],
3345 PSEUDO_REGNO_MODE (i))
3346 > regno))
3347 {
32131a9c
RK
3348 /* If this register belongs solely to a basic block which needed no
3349 spilling of any class that this register is contained in,
3350 leave it be, unless we are spilling this register because
3351 it was a hard register that can't be eliminated. */
3352
3353 if (! cant_eliminate
3354 && basic_block_needs[0]
3355 && reg_basic_block[i] >= 0
3356 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3357 {
3358 enum reg_class *p;
3359
3360 for (p = reg_class_superclasses[(int) class];
3361 *p != LIM_REG_CLASSES; p++)
3362 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3363 break;
a8fdc208 3364
32131a9c
RK
3365 if (*p == LIM_REG_CLASSES)
3366 continue;
3367 }
3368
3369 /* Mark it as no longer having a hard register home. */
3370 reg_renumber[i] = -1;
3371 /* We will need to scan everything again. */
3372 something_changed = 1;
3373 if (global)
3374 retry_global_alloc (i, forbidden_regs);
3375
3376 alter_reg (i, regno);
3377 if (dumpfile)
3378 {
3379 if (reg_renumber[i] == -1)
3380 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3381 else
3382 fprintf (dumpfile, " Register %d now in %d.\n\n",
3383 i, reg_renumber[i]);
3384 }
3385 }
c307c237
RK
3386 for (i = 0; i < scratch_list_length; i++)
3387 {
3388 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3389 {
3390 if (! cant_eliminate && basic_block_needs[0]
3391 && ! basic_block_needs[(int) class][scratch_block[i]])
3392 {
3393 enum reg_class *p;
3394
3395 for (p = reg_class_superclasses[(int) class];
3396 *p != LIM_REG_CLASSES; p++)
3397 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3398 break;
3399
3400 if (*p == LIM_REG_CLASSES)
3401 continue;
3402 }
3403 PUT_CODE (scratch_list[i], SCRATCH);
3404 scratch_list[i] = 0;
3405 something_changed = 1;
3406 continue;
3407 }
3408 }
32131a9c
RK
3409
3410 return something_changed;
3411}
3412\f
56f58d3a
RK
3413/* Find all paradoxical subregs within X and update reg_max_ref_width.
3414 Also mark any hard registers used to store user variables as
3415 forbidden from being used for spill registers. */
32131a9c
RK
3416
3417static void
3418scan_paradoxical_subregs (x)
3419 register rtx x;
3420{
3421 register int i;
3422 register char *fmt;
3423 register enum rtx_code code = GET_CODE (x);
3424
3425 switch (code)
3426 {
56f58d3a
RK
3427 case REG:
3428#ifdef SMALL_REGISTER_CLASSES
3429 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3430 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3431#endif
3432 return;
3433
32131a9c
RK
3434 case CONST_INT:
3435 case CONST:
3436 case SYMBOL_REF:
3437 case LABEL_REF:
3438 case CONST_DOUBLE:
3439 case CC0:
3440 case PC:
32131a9c
RK
3441 case USE:
3442 case CLOBBER:
3443 return;
3444
3445 case SUBREG:
3446 if (GET_CODE (SUBREG_REG (x)) == REG
3447 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3448 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3449 = GET_MODE_SIZE (GET_MODE (x));
3450 return;
3451 }
3452
3453 fmt = GET_RTX_FORMAT (code);
3454 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3455 {
3456 if (fmt[i] == 'e')
3457 scan_paradoxical_subregs (XEXP (x, i));
3458 else if (fmt[i] == 'E')
3459 {
3460 register int j;
3461 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3462 scan_paradoxical_subregs (XVECEXP (x, i, j));
3463 }
3464 }
3465}
3466\f
32131a9c
RK
3467static int
3468hard_reg_use_compare (p1, p2)
3469 struct hard_reg_n_uses *p1, *p2;
3470{
3471 int tem = p1->uses - p2->uses;
3472 if (tem != 0) return tem;
3473 /* If regs are equally good, sort by regno,
3474 so that the results of qsort leave nothing to chance. */
3475 return p1->regno - p2->regno;
3476}
3477
3478/* Choose the order to consider regs for use as reload registers
3479 based on how much trouble would be caused by spilling one.
3480 Store them in order of decreasing preference in potential_reload_regs. */
3481
3482static void
3483order_regs_for_reload ()
3484{
3485 register int i;
3486 register int o = 0;
3487 int large = 0;
3488
3489 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3490
3491 CLEAR_HARD_REG_SET (bad_spill_regs);
3492
3493 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3494 potential_reload_regs[i] = -1;
3495
3496 /* Count number of uses of each hard reg by pseudo regs allocated to it
3497 and then order them by decreasing use. */
3498
3499 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3500 {
3501 hard_reg_n_uses[i].uses = 0;
3502 hard_reg_n_uses[i].regno = i;
3503 }
3504
3505 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3506 {
3507 int regno = reg_renumber[i];
3508 if (regno >= 0)
3509 {
3510 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3511 while (regno < lim)
3512 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3513 }
3514 large += reg_n_refs[i];
3515 }
3516
3517 /* Now fixed registers (which cannot safely be used for reloading)
3518 get a very high use count so they will be considered least desirable.
3519 Registers used explicitly in the rtl code are almost as bad. */
3520
3521 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3522 {
3523 if (fixed_regs[i])
3524 {
3525 hard_reg_n_uses[i].uses += 2 * large + 2;
3526 SET_HARD_REG_BIT (bad_spill_regs, i);
3527 }
3528 else if (regs_explicitly_used[i])
3529 {
3530 hard_reg_n_uses[i].uses += large + 1;
546b63fb 3531#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
3532 /* ??? We are doing this here because of the potential that
3533 bad code may be generated if a register explicitly used in
3534 an insn was used as a spill register for that insn. But
3535 not using these are spill registers may lose on some machine.
3536 We'll have to see how this works out. */
3537 SET_HARD_REG_BIT (bad_spill_regs, i);
546b63fb 3538#endif
32131a9c
RK
3539 }
3540 }
3ec2ea3e
DE
3541 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3542 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
32131a9c
RK
3543
3544#ifdef ELIMINABLE_REGS
3545 /* If registers other than the frame pointer are eliminable, mark them as
3546 poor choices. */
3547 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3548 {
3549 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3550 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3551 }
3552#endif
3553
3554 /* Prefer registers not so far used, for use in temporary loading.
3555 Among them, if REG_ALLOC_ORDER is defined, use that order.
3556 Otherwise, prefer registers not preserved by calls. */
3557
3558#ifdef REG_ALLOC_ORDER
3559 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3560 {
3561 int regno = reg_alloc_order[i];
3562
3563 if (hard_reg_n_uses[regno].uses == 0)
3564 potential_reload_regs[o++] = regno;
3565 }
3566#else
3567 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3568 {
3569 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3570 potential_reload_regs[o++] = i;
3571 }
3572 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3573 {
3574 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3575 potential_reload_regs[o++] = i;
3576 }
3577#endif
3578
3579 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3580 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3581
3582 /* Now add the regs that are already used,
3583 preferring those used less often. The fixed and otherwise forbidden
3584 registers will be at the end of this list. */
3585
3586 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3587 if (hard_reg_n_uses[i].uses != 0)
3588 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3589}
3590\f
3591/* Reload pseudo-registers into hard regs around each insn as needed.
3592 Additional register load insns are output before the insn that needs it
3593 and perhaps store insns after insns that modify the reloaded pseudo reg.
3594
3595 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3596 which registers are already available in reload registers.
32131a9c
RK
3597 We update these for the reloads that we perform,
3598 as the insns are scanned. */
3599
3600static void
3601reload_as_needed (first, live_known)
3602 rtx first;
3603 int live_known;
3604{
3605 register rtx insn;
3606 register int i;
3607 int this_block = 0;
3608 rtx x;
3609 rtx after_call = 0;
3610
3611 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3612 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3613 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3614 reg_has_output_reload = (char *) alloca (max_regno);
3615 for (i = 0; i < n_spills; i++)
3616 {
3617 reg_reloaded_contents[i] = -1;
3618 reg_reloaded_insn[i] = 0;
3619 }
3620
3621 /* Reset all offsets on eliminable registers to their initial values. */
3622#ifdef ELIMINABLE_REGS
3623 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3624 {
3625 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3626 reg_eliminate[i].initial_offset);
32131a9c
RK
3627 reg_eliminate[i].previous_offset
3628 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3629 }
3630#else
3631 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3632 reg_eliminate[0].previous_offset
3633 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3634#endif
3635
3636 num_not_at_initial_offset = 0;
3637
3638 for (insn = first; insn;)
3639 {
3640 register rtx next = NEXT_INSN (insn);
3641
3642 /* Notice when we move to a new basic block. */
aa2c50d6 3643 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3644 && insn == basic_block_head[this_block+1])
3645 ++this_block;
3646
3647 /* If we pass a label, copy the offsets from the label information
3648 into the current offsets of each elimination. */
3649 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3650 {
3651 num_not_at_initial_offset = 0;
3652 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3653 {
3654 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3655 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3656 if (reg_eliminate[i].can_eliminate
3657 && (reg_eliminate[i].offset
3658 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3659 num_not_at_initial_offset++;
3660 }
3661 }
32131a9c
RK
3662
3663 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3664 {
3665 rtx avoid_return_reg = 0;
3666
3667#ifdef SMALL_REGISTER_CLASSES
3668 /* Set avoid_return_reg if this is an insn
3669 that might use the value of a function call. */
3670 if (GET_CODE (insn) == CALL_INSN)
3671 {
3672 if (GET_CODE (PATTERN (insn)) == SET)
3673 after_call = SET_DEST (PATTERN (insn));
3674 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3675 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3676 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3677 else
3678 after_call = 0;
3679 }
3680 else if (after_call != 0
3681 && !(GET_CODE (PATTERN (insn)) == SET
3682 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3683 {
2b979c57 3684 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
3685 avoid_return_reg = after_call;
3686 after_call = 0;
3687 }
3688#endif /* SMALL_REGISTER_CLASSES */
3689
2758481d
RS
3690 /* If this is a USE and CLOBBER of a MEM, ensure that any
3691 references to eliminable registers have been removed. */
3692
3693 if ((GET_CODE (PATTERN (insn)) == USE
3694 || GET_CODE (PATTERN (insn)) == CLOBBER)
3695 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3696 XEXP (XEXP (PATTERN (insn), 0), 0)
3697 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3698 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3699
32131a9c
RK
3700 /* If we need to do register elimination processing, do so.
3701 This might delete the insn, in which case we are done. */
3702 if (num_eliminable && GET_MODE (insn) == QImode)
3703 {
3704 eliminate_regs_in_insn (insn, 1);
3705 if (GET_CODE (insn) == NOTE)
3706 {
3707 insn = next;
3708 continue;
3709 }
3710 }
3711
3712 if (GET_MODE (insn) == VOIDmode)
3713 n_reloads = 0;
3714 /* First find the pseudo regs that must be reloaded for this insn.
3715 This info is returned in the tables reload_... (see reload.h).
3716 Also modify the body of INSN by substituting RELOAD
3717 rtx's for those pseudo regs. */
3718 else
3719 {
3720 bzero (reg_has_output_reload, max_regno);
3721 CLEAR_HARD_REG_SET (reg_is_output_reload);
3722
3723 find_reloads (insn, 1, spill_indirect_levels, live_known,
3724 spill_reg_order);
3725 }
3726
3727 if (n_reloads > 0)
3728 {
3c3eeea6
RK
3729 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3730 rtx p;
32131a9c
RK
3731 int class;
3732
3733 /* If this block has not had spilling done for a
546b63fb
RK
3734 particular clas and we have any non-optionals that need a
3735 spill reg in that class, abort. */
32131a9c
RK
3736
3737 for (class = 0; class < N_REG_CLASSES; class++)
3738 if (basic_block_needs[class] != 0
3739 && basic_block_needs[class][this_block] == 0)
3740 for (i = 0; i < n_reloads; i++)
546b63fb
RK
3741 if (class == (int) reload_reg_class[i]
3742 && reload_reg_rtx[i] == 0
3743 && ! reload_optional[i]
3744 && (reload_in[i] != 0 || reload_out[i] != 0
3745 || reload_secondary_p[i] != 0))
3746 abort ();
32131a9c
RK
3747
3748 /* Now compute which reload regs to reload them into. Perhaps
3749 reusing reload regs from previous insns, or else output
3750 load insns to reload them. Maybe output store insns too.
3751 Record the choices of reload reg in reload_reg_rtx. */
3752 choose_reload_regs (insn, avoid_return_reg);
3753
546b63fb
RK
3754#ifdef SMALL_REGISTER_CLASSES
3755 /* Merge any reloads that we didn't combine for fear of
3756 increasing the number of spill registers needed but now
3757 discover can be safely merged. */
3758 merge_assigned_reloads (insn);
3759#endif
3760
32131a9c
RK
3761 /* Generate the insns to reload operands into or out of
3762 their reload regs. */
3763 emit_reload_insns (insn);
3764
3765 /* Substitute the chosen reload regs from reload_reg_rtx
3766 into the insn's body (or perhaps into the bodies of other
3767 load and store insn that we just made for reloading
3768 and that we moved the structure into). */
3769 subst_reloads ();
3c3eeea6
RK
3770
3771 /* If this was an ASM, make sure that all the reload insns
3772 we have generated are valid. If not, give an error
3773 and delete them. */
3774
3775 if (asm_noperands (PATTERN (insn)) >= 0)
3776 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3777 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3778 && (recog_memoized (p) < 0
3779 || (insn_extract (p),
3780 ! constrain_operands (INSN_CODE (p), 1))))
3781 {
3782 error_for_asm (insn,
3783 "`asm' operand requires impossible reload");
3784 PUT_CODE (p, NOTE);
3785 NOTE_SOURCE_FILE (p) = 0;
3786 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3787 }
32131a9c
RK
3788 }
3789 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3790 is no longer validly lying around to save a future reload.
3791 Note that this does not detect pseudos that were reloaded
3792 for this insn in order to be stored in
3793 (obeying register constraints). That is correct; such reload
3794 registers ARE still valid. */
3795 note_stores (PATTERN (insn), forget_old_reloads_1);
3796
3797 /* There may have been CLOBBER insns placed after INSN. So scan
3798 between INSN and NEXT and use them to forget old reloads. */
3799 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3800 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3801 note_stores (PATTERN (x), forget_old_reloads_1);
3802
3803#ifdef AUTO_INC_DEC
3804 /* Likewise for regs altered by auto-increment in this insn.
3805 But note that the reg-notes are not changed by reloading:
3806 they still contain the pseudo-regs, not the spill regs. */
3807 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3808 if (REG_NOTE_KIND (x) == REG_INC)
3809 {
3810 /* See if this pseudo reg was reloaded in this insn.
3811 If so, its last-reload info is still valid
3812 because it is based on this insn's reload. */
3813 for (i = 0; i < n_reloads; i++)
3814 if (reload_out[i] == XEXP (x, 0))
3815 break;
3816
08fb99fa 3817 if (i == n_reloads)
9a881562 3818 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
3819 }
3820#endif
3821 }
3822 /* A reload reg's contents are unknown after a label. */
3823 if (GET_CODE (insn) == CODE_LABEL)
3824 for (i = 0; i < n_spills; i++)
3825 {
3826 reg_reloaded_contents[i] = -1;
3827 reg_reloaded_insn[i] = 0;
3828 }
3829
3830 /* Don't assume a reload reg is still good after a call insn
3831 if it is a call-used reg. */
546b63fb 3832 else if (GET_CODE (insn) == CALL_INSN)
32131a9c
RK
3833 for (i = 0; i < n_spills; i++)
3834 if (call_used_regs[spill_regs[i]])
3835 {
3836 reg_reloaded_contents[i] = -1;
3837 reg_reloaded_insn[i] = 0;
3838 }
3839
3840 /* In case registers overlap, allow certain insns to invalidate
3841 particular hard registers. */
3842
3843#ifdef INSN_CLOBBERS_REGNO_P
3844 for (i = 0 ; i < n_spills ; i++)
3845 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3846 {
3847 reg_reloaded_contents[i] = -1;
3848 reg_reloaded_insn[i] = 0;
3849 }
3850#endif
3851
3852 insn = next;
3853
3854#ifdef USE_C_ALLOCA
3855 alloca (0);
3856#endif
3857 }
3858}
3859
3860/* Discard all record of any value reloaded from X,
3861 or reloaded in X from someplace else;
3862 unless X is an output reload reg of the current insn.
3863
3864 X may be a hard reg (the reload reg)
3865 or it may be a pseudo reg that was reloaded from. */
3866
3867static void
9a881562 3868forget_old_reloads_1 (x, ignored)
32131a9c 3869 rtx x;
9a881562 3870 rtx ignored;
32131a9c
RK
3871{
3872 register int regno;
3873 int nr;
0a2e51a9
RS
3874 int offset = 0;
3875
3876 /* note_stores does give us subregs of hard regs. */
3877 while (GET_CODE (x) == SUBREG)
3878 {
3879 offset += SUBREG_WORD (x);
3880 x = SUBREG_REG (x);
3881 }
32131a9c
RK
3882
3883 if (GET_CODE (x) != REG)
3884 return;
3885
0a2e51a9 3886 regno = REGNO (x) + offset;
32131a9c
RK
3887
3888 if (regno >= FIRST_PSEUDO_REGISTER)
3889 nr = 1;
3890 else
3891 {
3892 int i;
3893 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3894 /* Storing into a spilled-reg invalidates its contents.
3895 This can happen if a block-local pseudo is allocated to that reg
3896 and it wasn't spilled because this block's total need is 0.
3897 Then some insn might have an optional reload and use this reg. */
3898 for (i = 0; i < nr; i++)
3899 if (spill_reg_order[regno + i] >= 0
3900 /* But don't do this if the reg actually serves as an output
3901 reload reg in the current instruction. */
3902 && (n_reloads == 0
3903 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3904 {
3905 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3906 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3907 }
3908 }
3909
3910 /* Since value of X has changed,
3911 forget any value previously copied from it. */
3912
3913 while (nr-- > 0)
3914 /* But don't forget a copy if this is the output reload
3915 that establishes the copy's validity. */
3916 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3917 reg_last_reload_reg[regno + nr] = 0;
3918}
3919\f
3920/* For each reload, the mode of the reload register. */
3921static enum machine_mode reload_mode[MAX_RELOADS];
3922
3923/* For each reload, the largest number of registers it will require. */
3924static int reload_nregs[MAX_RELOADS];
3925
3926/* Comparison function for qsort to decide which of two reloads
3927 should be handled first. *P1 and *P2 are the reload numbers. */
3928
3929static int
3930reload_reg_class_lower (p1, p2)
3931 short *p1, *p2;
3932{
3933 register int r1 = *p1, r2 = *p2;
3934 register int t;
a8fdc208 3935
32131a9c
RK
3936 /* Consider required reloads before optional ones. */
3937 t = reload_optional[r1] - reload_optional[r2];
3938 if (t != 0)
3939 return t;
3940
3941 /* Count all solitary classes before non-solitary ones. */
3942 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3943 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3944 if (t != 0)
3945 return t;
3946
3947 /* Aside from solitaires, consider all multi-reg groups first. */
3948 t = reload_nregs[r2] - reload_nregs[r1];
3949 if (t != 0)
3950 return t;
3951
3952 /* Consider reloads in order of increasing reg-class number. */
3953 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3954 if (t != 0)
3955 return t;
3956
3957 /* If reloads are equally urgent, sort by reload number,
3958 so that the results of qsort leave nothing to chance. */
3959 return r1 - r2;
3960}
3961\f
3962/* The following HARD_REG_SETs indicate when each hard register is
3963 used for a reload of various parts of the current insn. */
3964
3965/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3966static HARD_REG_SET reload_reg_used;
546b63fb
RK
3967/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3968static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3969/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3970static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
3971/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
3972static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
3973/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
3974static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
3975/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3976static HARD_REG_SET reload_reg_used_in_op_addr;
546b63fb
RK
3977/* If reg is in use for a RELOAD_FOR_INSN reload. */
3978static HARD_REG_SET reload_reg_used_in_insn;
3979/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
3980static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
3981
3982/* If reg is in use as a reload reg for any sort of reload. */
3983static HARD_REG_SET reload_reg_used_at_all;
3984
be7ae2a4
RK
3985/* If reg is use as an inherited reload. We just mark the first register
3986 in the group. */
3987static HARD_REG_SET reload_reg_used_for_inherit;
3988
546b63fb
RK
3989/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
3990 TYPE. MODE is used to indicate how many consecutive regs are
3991 actually used. */
32131a9c
RK
3992
3993static void
546b63fb 3994mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 3995 int regno;
546b63fb
RK
3996 int opnum;
3997 enum reload_type type;
32131a9c
RK
3998 enum machine_mode mode;
3999{
4000 int nregs = HARD_REGNO_NREGS (regno, mode);
4001 int i;
4002
4003 for (i = regno; i < nregs + regno; i++)
4004 {
546b63fb 4005 switch (type)
32131a9c
RK
4006 {
4007 case RELOAD_OTHER:
4008 SET_HARD_REG_BIT (reload_reg_used, i);
4009 break;
4010
546b63fb
RK
4011 case RELOAD_FOR_INPUT_ADDRESS:
4012 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4013 break;
4014
546b63fb
RK
4015 case RELOAD_FOR_OUTPUT_ADDRESS:
4016 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4017 break;
4018
4019 case RELOAD_FOR_OPERAND_ADDRESS:
4020 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4021 break;
4022
546b63fb
RK
4023 case RELOAD_FOR_OTHER_ADDRESS:
4024 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4025 break;
4026
32131a9c 4027 case RELOAD_FOR_INPUT:
546b63fb 4028 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4029 break;
4030
4031 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4032 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4033 break;
4034
4035 case RELOAD_FOR_INSN:
4036 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4037 break;
4038 }
4039
4040 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4041 }
4042}
4043
be7ae2a4
RK
4044/* Similarly, but show REGNO is no longer in use for a reload. */
4045
4046static void
4047clear_reload_reg_in_use (regno, opnum, type, mode)
4048 int regno;
4049 int opnum;
4050 enum reload_type type;
4051 enum machine_mode mode;
4052{
4053 int nregs = HARD_REGNO_NREGS (regno, mode);
4054 int i;
4055
4056 for (i = regno; i < nregs + regno; i++)
4057 {
4058 switch (type)
4059 {
4060 case RELOAD_OTHER:
4061 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4062 break;
4063
4064 case RELOAD_FOR_INPUT_ADDRESS:
4065 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4066 break;
4067
4068 case RELOAD_FOR_OUTPUT_ADDRESS:
4069 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4070 break;
4071
4072 case RELOAD_FOR_OPERAND_ADDRESS:
4073 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4074 break;
4075
4076 case RELOAD_FOR_OTHER_ADDRESS:
4077 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4078 break;
4079
4080 case RELOAD_FOR_INPUT:
4081 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4082 break;
4083
4084 case RELOAD_FOR_OUTPUT:
4085 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4086 break;
4087
4088 case RELOAD_FOR_INSN:
4089 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4090 break;
4091 }
4092 }
4093}
4094
32131a9c 4095/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4096 specified by OPNUM and TYPE. */
32131a9c
RK
4097
4098static int
546b63fb 4099reload_reg_free_p (regno, opnum, type)
32131a9c 4100 int regno;
546b63fb
RK
4101 int opnum;
4102 enum reload_type type;
32131a9c 4103{
546b63fb
RK
4104 int i;
4105
4106 /* In use for a RELOAD_OTHER means it's not available for anything except
4107 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4108 to be used only for inputs. */
4109
4110 if (type != RELOAD_FOR_OTHER_ADDRESS
4111 && TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4112 return 0;
546b63fb
RK
4113
4114 switch (type)
32131a9c
RK
4115 {
4116 case RELOAD_OTHER:
4117 /* In use for anything means not available for a RELOAD_OTHER. */
4118 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4119
4120 /* The other kinds of use can sometimes share a register. */
4121 case RELOAD_FOR_INPUT:
546b63fb
RK
4122 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4123 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4124 return 0;
4125
4126 /* If it is used for some other input, can't use it. */
4127 for (i = 0; i < reload_n_operands; i++)
4128 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4129 return 0;
4130
4131 /* If it is used in a later operand's address, can't use it. */
4132 for (i = opnum + 1; i < reload_n_operands; i++)
4133 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4134 return 0;
4135
4136 return 1;
4137
4138 case RELOAD_FOR_INPUT_ADDRESS:
4139 /* Can't use a register if it is used for an input address for this
4140 operand or used as an input in an earlier one. */
4141 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4142 return 0;
4143
4144 for (i = 0; i < opnum; i++)
4145 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4146 return 0;
4147
4148 return 1;
4149
4150 case RELOAD_FOR_OUTPUT_ADDRESS:
4151 /* Can't use a register if it is used for an output address for this
4152 operand or used as an output in this or a later operand. */
4153 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4154 return 0;
4155
4156 for (i = opnum; i < reload_n_operands; i++)
4157 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4158 return 0;
4159
4160 return 1;
4161
32131a9c 4162 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4163 for (i = 0; i < reload_n_operands; i++)
4164 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4165 return 0;
4166
4167 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4168 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4169
32131a9c 4170 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4171 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4172 outputs, or an operand address for this or an earlier output. */
4173 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4174 return 0;
4175
4176 for (i = 0; i < reload_n_operands; i++)
4177 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4178 return 0;
4179
4180 for (i = 0; i <= opnum; i++)
4181 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4182 return 0;
4183
4184 return 1;
4185
4186 case RELOAD_FOR_INSN:
4187 for (i = 0; i < reload_n_operands; i++)
4188 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4189 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4190 return 0;
4191
4192 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4193 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4194
4195 case RELOAD_FOR_OTHER_ADDRESS:
4196 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4197 }
4198 abort ();
4199}
4200
4201/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4202 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4203 is not in use for a reload in any prior part of the insn.
4204
4205 We can assume that the reload reg was already tested for availability
4206 at the time it is needed, and we should not check this again,
4207 in case the reg has already been marked in use. */
4208
4209static int
546b63fb 4210reload_reg_free_before_p (regno, opnum, type)
32131a9c 4211 int regno;
546b63fb
RK
4212 int opnum;
4213 enum reload_type type;
32131a9c 4214{
546b63fb
RK
4215 int i;
4216
4217 switch (type)
32131a9c 4218 {
546b63fb
RK
4219 case RELOAD_FOR_OTHER_ADDRESS:
4220 /* These always come first. */
32131a9c
RK
4221 return 1;
4222
546b63fb
RK
4223 case RELOAD_OTHER:
4224 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4225
32131a9c 4226 /* If this use is for part of the insn,
546b63fb
RK
4227 check the reg is not in use for any prior part. It is tempting
4228 to try to do this by falling through from objecs that occur
4229 later in the insn to ones that occur earlier, but that will not
4230 correctly take into account the fact that here we MUST ignore
4231 things that would prevent the register from being allocated in
4232 the first place, since we know that it was allocated. */
4233
4234 case RELOAD_FOR_OUTPUT_ADDRESS:
4235 /* Earlier reloads are for earlier outputs or their addresses,
4236 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4237 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4238 RELOAD_OTHER).. */
4239 for (i = 0; i < opnum; i++)
4240 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4241 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4242 return 0;
4243
4244 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4245 return 0;
546b63fb
RK
4246
4247 for (i = 0; i < reload_n_operands; i++)
4248 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4249 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4250 return 0;
4251
4252 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4253 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4254 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4255
32131a9c 4256 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4257 /* This can't be used in the output address for this operand and
4258 anything that can't be used for it, except that we've already
4259 tested for RELOAD_FOR_INSN objects. */
4260
4261 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
32131a9c 4262 return 0;
546b63fb
RK
4263
4264 for (i = 0; i < opnum; i++)
4265 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4266 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4267 return 0;
4268
4269 for (i = 0; i < reload_n_operands; i++)
4270 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4271 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4272 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4273 return 0;
4274
4275 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4276
32131a9c 4277 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4278 case RELOAD_FOR_INSN:
4279 /* These can't conflict with inputs, or each other, so all we have to
4280 test is input addresses and the addresses of OTHER items. */
4281
4282 for (i = 0; i < reload_n_operands; i++)
4283 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4284 return 0;
4285
4286 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4287
32131a9c 4288 case RELOAD_FOR_INPUT:
546b63fb
RK
4289 /* The only things earlier are the address for this and
4290 earlier inputs, other inputs (which we know we don't conflict
4291 with), and addresses of RELOAD_OTHER objects. */
4292
4293 for (i = 0; i <= opnum; i++)
4294 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4295 return 0;
4296
4297 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4298
4299 case RELOAD_FOR_INPUT_ADDRESS:
4300 /* Similarly, all we have to check is for use in earlier inputs'
4301 addresses. */
4302 for (i = 0; i < opnum; i++)
4303 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4304 return 0;
4305
4306 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4307 }
4308 abort ();
4309}
4310
4311/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4312 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4313 is still available in REGNO at the end of the insn.
4314
4315 We can assume that the reload reg was already tested for availability
4316 at the time it is needed, and we should not check this again,
4317 in case the reg has already been marked in use. */
4318
4319static int
546b63fb 4320reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4321 int regno;
546b63fb
RK
4322 int opnum;
4323 enum reload_type type;
32131a9c 4324{
546b63fb
RK
4325 int i;
4326
4327 switch (type)
32131a9c
RK
4328 {
4329 case RELOAD_OTHER:
4330 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4331 its value must reach the end. */
4332 return 1;
4333
4334 /* If this use is for part of the insn,
546b63fb
RK
4335 its value reaches if no subsequent part uses the same register.
4336 Just like the above function, don't try to do this with lots
4337 of fallthroughs. */
4338
4339 case RELOAD_FOR_OTHER_ADDRESS:
4340 /* Here we check for everything else, since these don't conflict
4341 with anything else and everything comes later. */
4342
4343 for (i = 0; i < reload_n_operands; i++)
4344 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4345 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4346 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4347 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4348 return 0;
4349
4350 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4351 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4352 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4353
4354 case RELOAD_FOR_INPUT_ADDRESS:
4355 /* Similar, except that we check only for this and subsequent inputs
4356 and the address of only subsequent inputs and we do not need
4357 to check for RELOAD_OTHER objects since they are known not to
4358 conflict. */
4359
4360 for (i = opnum; i < reload_n_operands; i++)
4361 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4362 return 0;
4363
4364 for (i = opnum + 1; i < reload_n_operands; i++)
4365 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4366 return 0;
4367
4368 for (i = 0; i < reload_n_operands; i++)
4369 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4370 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4371 return 0;
4372
4373 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4374 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4375
32131a9c 4376 case RELOAD_FOR_INPUT:
546b63fb
RK
4377 /* Similar to input address, except we start at the next operand for
4378 both input and input address and we do not check for
4379 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4380 would conflict. */
4381
4382 for (i = opnum + 1; i < reload_n_operands; i++)
4383 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4384 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4385 return 0;
4386
4387 /* ... fall through ... */
4388
32131a9c 4389 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4390 /* Check outputs and their addresses. */
4391
4392 for (i = 0; i < reload_n_operands; i++)
4393 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4394 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4395 return 0;
4396
4397 return 1;
4398
4399 case RELOAD_FOR_INSN:
4400 /* These conflict with other outputs with with RELOAD_OTHER. So
4401 we need only check for output addresses. */
4402
4403 opnum = -1;
4404
4405 /* ... fall through ... */
4406
32131a9c 4407 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4408 case RELOAD_FOR_OUTPUT_ADDRESS:
4409 /* We already know these can't conflict with a later output. So the
4410 only thing to check are later output addresses. */
4411 for (i = opnum + 1; i < reload_n_operands; i++)
4412 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4413 return 0;
4414
32131a9c
RK
4415 return 1;
4416 }
546b63fb 4417
32131a9c
RK
4418 abort ();
4419}
4420\f
351aa1c1
RK
4421/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4422 Return 0 otherwise.
4423
4424 This function uses the same algorithm as reload_reg_free_p above. */
4425
4426static int
4427reloads_conflict (r1, r2)
4428 int r1, r2;
4429{
4430 enum reload_type r1_type = reload_when_needed[r1];
4431 enum reload_type r2_type = reload_when_needed[r2];
4432 int r1_opnum = reload_opnum[r1];
4433 int r2_opnum = reload_opnum[r2];
4434
4435 /* RELOAD_OTHER conflicts with everything except
4436 RELOAD_FOR_OTHER_ADDRESS. */
4437
4438 if ((r1_type == RELOAD_OTHER && r2_type != RELOAD_FOR_OTHER_ADDRESS)
4439 || (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS))
4440 return 1;
4441
4442 /* Otherwise, check conflicts differently for each type. */
4443
4444 switch (r1_type)
4445 {
4446 case RELOAD_FOR_INPUT:
4447 return (r2_type == RELOAD_FOR_INSN
4448 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4449 || r2_type == RELOAD_FOR_INPUT
4450 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4451
4452 case RELOAD_FOR_INPUT_ADDRESS:
4453 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4454 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4455
4456 case RELOAD_FOR_OUTPUT_ADDRESS:
4457 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4458 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4459
4460 case RELOAD_FOR_OPERAND_ADDRESS:
4461 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4462 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4463
4464 case RELOAD_FOR_OUTPUT:
4465 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
0aaf4a58 4466 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
351aa1c1
RK
4467 && r2_opnum >= r1_opnum));
4468
4469 case RELOAD_FOR_INSN:
4470 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4471 || r2_type == RELOAD_FOR_INSN
4472 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4473
4474 case RELOAD_FOR_OTHER_ADDRESS:
4475 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4476
4477 default:
4478 abort ();
4479 }
4480}
4481\f
32131a9c
RK
4482/* Vector of reload-numbers showing the order in which the reloads should
4483 be processed. */
4484short reload_order[MAX_RELOADS];
4485
4486/* Indexed by reload number, 1 if incoming value
4487 inherited from previous insns. */
4488char reload_inherited[MAX_RELOADS];
4489
4490/* For an inherited reload, this is the insn the reload was inherited from,
4491 if we know it. Otherwise, this is 0. */
4492rtx reload_inheritance_insn[MAX_RELOADS];
4493
4494/* If non-zero, this is a place to get the value of the reload,
4495 rather than using reload_in. */
4496rtx reload_override_in[MAX_RELOADS];
4497
4498/* For each reload, the index in spill_regs of the spill register used,
4499 or -1 if we did not need one of the spill registers for this reload. */
4500int reload_spill_index[MAX_RELOADS];
4501
4502/* Index of last register assigned as a spill register. We allocate in
4503 a round-robin fashio. */
4504
1d2310f3 4505static int last_spill_reg = 0;
32131a9c
RK
4506
4507/* Find a spill register to use as a reload register for reload R.
4508 LAST_RELOAD is non-zero if this is the last reload for the insn being
4509 processed.
4510
4511 Set reload_reg_rtx[R] to the register allocated.
4512
4513 If NOERROR is nonzero, we return 1 if successful,
4514 or 0 if we couldn't find a spill reg and we didn't change anything. */
4515
4516static int
4517allocate_reload_reg (r, insn, last_reload, noerror)
4518 int r;
4519 rtx insn;
4520 int last_reload;
4521 int noerror;
4522{
4523 int i;
4524 int pass;
4525 int count;
4526 rtx new;
4527 int regno;
4528
4529 /* If we put this reload ahead, thinking it is a group,
4530 then insist on finding a group. Otherwise we can grab a
a8fdc208 4531 reg that some other reload needs.
32131a9c
RK
4532 (That can happen when we have a 68000 DATA_OR_FP_REG
4533 which is a group of data regs or one fp reg.)
4534 We need not be so restrictive if there are no more reloads
4535 for this insn.
4536
4537 ??? Really it would be nicer to have smarter handling
4538 for that kind of reg class, where a problem like this is normal.
4539 Perhaps those classes should be avoided for reloading
4540 by use of more alternatives. */
4541
4542 int force_group = reload_nregs[r] > 1 && ! last_reload;
4543
4544 /* If we want a single register and haven't yet found one,
4545 take any reg in the right class and not in use.
4546 If we want a consecutive group, here is where we look for it.
4547
4548 We use two passes so we can first look for reload regs to
4549 reuse, which are already in use for other reloads in this insn,
4550 and only then use additional registers.
4551 I think that maximizing reuse is needed to make sure we don't
4552 run out of reload regs. Suppose we have three reloads, and
4553 reloads A and B can share regs. These need two regs.
4554 Suppose A and B are given different regs.
4555 That leaves none for C. */
4556 for (pass = 0; pass < 2; pass++)
4557 {
4558 /* I is the index in spill_regs.
4559 We advance it round-robin between insns to use all spill regs
4560 equally, so that inherited reloads have a chance
4561 of leapfrogging each other. */
4562
4563 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4564 {
4565 int class = (int) reload_reg_class[r];
4566
4567 i = (i + 1) % n_spills;
4568
546b63fb
RK
4569 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4570 reload_when_needed[r])
32131a9c
RK
4571 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4572 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
4573 /* Look first for regs to share, then for unshared. But
4574 don't share regs used for inherited reloads; they are
4575 the ones we want to preserve. */
4576 && (pass
4577 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4578 spill_regs[i])
4579 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4580 spill_regs[i]))))
32131a9c
RK
4581 {
4582 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4583 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4584 (on 68000) got us two FP regs. If NR is 1,
4585 we would reject both of them. */
4586 if (force_group)
4587 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4588 /* If we need only one reg, we have already won. */
4589 if (nr == 1)
4590 {
4591 /* But reject a single reg if we demand a group. */
4592 if (force_group)
4593 continue;
4594 break;
4595 }
4596 /* Otherwise check that as many consecutive regs as we need
4597 are available here.
4598 Also, don't use for a group registers that are
4599 needed for nongroups. */
4600 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4601 while (nr > 1)
4602 {
4603 regno = spill_regs[i] + nr - 1;
4604 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4605 && spill_reg_order[regno] >= 0
546b63fb
RK
4606 && reload_reg_free_p (regno, reload_opnum[r],
4607 reload_when_needed[r])
32131a9c
RK
4608 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4609 regno)))
4610 break;
4611 nr--;
4612 }
4613 if (nr == 1)
4614 break;
4615 }
4616 }
4617
4618 /* If we found something on pass 1, omit pass 2. */
4619 if (count < n_spills)
4620 break;
4621 }
4622
4623 /* We should have found a spill register by now. */
4624 if (count == n_spills)
4625 {
4626 if (noerror)
4627 return 0;
139fc12e 4628 goto failure;
32131a9c
RK
4629 }
4630
be7ae2a4
RK
4631 /* I is the index in SPILL_REG_RTX of the reload register we are to
4632 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
4633
4634 new = spill_reg_rtx[i];
4635
4636 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4
RK
4637 spill_reg_rtx[i] = new
4638 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4639
32131a9c
RK
4640 regno = true_regnum (new);
4641
4642 /* Detect when the reload reg can't hold the reload mode.
4643 This used to be one `if', but Sequent compiler can't handle that. */
4644 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4645 {
4646 enum machine_mode test_mode = VOIDmode;
4647 if (reload_in[r])
4648 test_mode = GET_MODE (reload_in[r]);
4649 /* If reload_in[r] has VOIDmode, it means we will load it
4650 in whatever mode the reload reg has: to wit, reload_mode[r].
4651 We have already tested that for validity. */
4652 /* Aside from that, we need to test that the expressions
4653 to reload from or into have modes which are valid for this
4654 reload register. Otherwise the reload insns would be invalid. */
4655 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4656 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4657 if (! (reload_out[r] != 0
4658 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
4659 {
4660 /* The reg is OK. */
4661 last_spill_reg = i;
4662
4663 /* Mark as in use for this insn the reload regs we use
4664 for this. */
4665 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4666 reload_when_needed[r], reload_mode[r]);
4667
4668 reload_reg_rtx[r] = new;
4669 reload_spill_index[r] = i;
4670 return 1;
4671 }
32131a9c
RK
4672 }
4673
4674 /* The reg is not OK. */
4675 if (noerror)
4676 return 0;
4677
139fc12e 4678 failure:
32131a9c
RK
4679 if (asm_noperands (PATTERN (insn)) < 0)
4680 /* It's the compiler's fault. */
4681 abort ();
4682
4683 /* It's the user's fault; the operand's mode and constraint
4684 don't match. Disable this reload so we don't crash in final. */
4685 error_for_asm (insn,
4686 "`asm' operand constraint incompatible with operand size");
4687 reload_in[r] = 0;
4688 reload_out[r] = 0;
4689 reload_reg_rtx[r] = 0;
4690 reload_optional[r] = 1;
4691 reload_secondary_p[r] = 1;
4692
4693 return 1;
4694}
4695\f
4696/* Assign hard reg targets for the pseudo-registers we must reload
4697 into hard regs for this insn.
4698 Also output the instructions to copy them in and out of the hard regs.
4699
4700 For machines with register classes, we are responsible for
4701 finding a reload reg in the proper class. */
4702
4703static void
4704choose_reload_regs (insn, avoid_return_reg)
4705 rtx insn;
32131a9c
RK
4706 rtx avoid_return_reg;
4707{
4708 register int i, j;
4709 int max_group_size = 1;
4710 enum reg_class group_class = NO_REGS;
4711 int inheritance;
4712
4713 rtx save_reload_reg_rtx[MAX_RELOADS];
4714 char save_reload_inherited[MAX_RELOADS];
4715 rtx save_reload_inheritance_insn[MAX_RELOADS];
4716 rtx save_reload_override_in[MAX_RELOADS];
4717 int save_reload_spill_index[MAX_RELOADS];
4718 HARD_REG_SET save_reload_reg_used;
546b63fb
RK
4719 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4720 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4721 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4722 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 4723 HARD_REG_SET save_reload_reg_used_in_op_addr;
546b63fb
RK
4724 HARD_REG_SET save_reload_reg_used_in_insn;
4725 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
4726 HARD_REG_SET save_reload_reg_used_at_all;
4727
4728 bzero (reload_inherited, MAX_RELOADS);
4729 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4730 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4731
4732 CLEAR_HARD_REG_SET (reload_reg_used);
4733 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 4734 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
546b63fb
RK
4735 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4736 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 4737
546b63fb
RK
4738 for (i = 0; i < reload_n_operands; i++)
4739 {
4740 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4741 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4742 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4743 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4744 }
32131a9c
RK
4745
4746#ifdef SMALL_REGISTER_CLASSES
4747 /* Don't bother with avoiding the return reg
4748 if we have no mandatory reload that could use it. */
4749 if (avoid_return_reg)
4750 {
4751 int do_avoid = 0;
4752 int regno = REGNO (avoid_return_reg);
4753 int nregs
4754 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4755 int r;
4756
4757 for (r = regno; r < regno + nregs; r++)
4758 if (spill_reg_order[r] >= 0)
4759 for (j = 0; j < n_reloads; j++)
4760 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4761 && (reload_in[j] != 0 || reload_out[j] != 0
4762 || reload_secondary_p[j])
4763 &&
4764 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4765 do_avoid = 1;
4766 if (!do_avoid)
4767 avoid_return_reg = 0;
4768 }
4769#endif /* SMALL_REGISTER_CLASSES */
4770
4771#if 0 /* Not needed, now that we can always retry without inheritance. */
4772 /* See if we have more mandatory reloads than spill regs.
4773 If so, then we cannot risk optimizations that could prevent
a8fdc208 4774 reloads from sharing one spill register.
32131a9c
RK
4775
4776 Since we will try finding a better register than reload_reg_rtx
4777 unless it is equal to reload_in or reload_out, count such reloads. */
4778
4779 {
4780 int tem = 0;
4781#ifdef SMALL_REGISTER_CLASSES
4782 int tem = (avoid_return_reg != 0);
a8fdc208 4783#endif
32131a9c
RK
4784 for (j = 0; j < n_reloads; j++)
4785 if (! reload_optional[j]
4786 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4787 && (reload_reg_rtx[j] == 0
4788 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4789 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4790 tem++;
4791 if (tem > n_spills)
4792 must_reuse = 1;
4793 }
4794#endif
4795
4796#ifdef SMALL_REGISTER_CLASSES
4797 /* Don't use the subroutine call return reg for a reload
4798 if we are supposed to avoid it. */
4799 if (avoid_return_reg)
4800 {
4801 int regno = REGNO (avoid_return_reg);
4802 int nregs
4803 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4804 int r;
4805
4806 for (r = regno; r < regno + nregs; r++)
4807 if (spill_reg_order[r] >= 0)
4808 SET_HARD_REG_BIT (reload_reg_used, r);
4809 }
4810#endif /* SMALL_REGISTER_CLASSES */
4811
4812 /* In order to be certain of getting the registers we need,
4813 we must sort the reloads into order of increasing register class.
4814 Then our grabbing of reload registers will parallel the process
a8fdc208 4815 that provided the reload registers.
32131a9c
RK
4816
4817 Also note whether any of the reloads wants a consecutive group of regs.
4818 If so, record the maximum size of the group desired and what
4819 register class contains all the groups needed by this insn. */
4820
4821 for (j = 0; j < n_reloads; j++)
4822 {
4823 reload_order[j] = j;
4824 reload_spill_index[j] = -1;
4825
4826 reload_mode[j]
546b63fb
RK
4827 = (reload_inmode[j] == VOIDmode
4828 || (GET_MODE_SIZE (reload_outmode[j])
4829 > GET_MODE_SIZE (reload_inmode[j])))
4830 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
4831
4832 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4833
4834 if (reload_nregs[j] > 1)
4835 {
4836 max_group_size = MAX (reload_nregs[j], max_group_size);
4837 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4838 }
4839
4840 /* If we have already decided to use a certain register,
4841 don't use it in another way. */
4842 if (reload_reg_rtx[j])
546b63fb 4843 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
4844 reload_when_needed[j], reload_mode[j]);
4845 }
4846
4847 if (n_reloads > 1)
4848 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4849
4850 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4851 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4852 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4853 sizeof reload_inheritance_insn);
4854 bcopy (reload_override_in, save_reload_override_in,
4855 sizeof reload_override_in);
4856 bcopy (reload_spill_index, save_reload_spill_index,
4857 sizeof reload_spill_index);
4858 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4859 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
4860 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4861 reload_reg_used_in_op_addr);
546b63fb
RK
4862 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4863 reload_reg_used_in_insn);
4864 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4865 reload_reg_used_in_other_addr);
4866
4867 for (i = 0; i < reload_n_operands; i++)
4868 {
4869 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4870 reload_reg_used_in_output[i]);
4871 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4872 reload_reg_used_in_input[i]);
4873 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4874 reload_reg_used_in_input_addr[i]);
4875 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4876 reload_reg_used_in_output_addr[i]);
4877 }
32131a9c 4878
58b1581b
RS
4879 /* If -O, try first with inheritance, then turning it off.
4880 If not -O, don't do inheritance.
4881 Using inheritance when not optimizing leads to paradoxes
4882 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4883 because one side of the comparison might be inherited. */
32131a9c 4884
58b1581b 4885 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
4886 {
4887 /* Process the reloads in order of preference just found.
4888 Beyond this point, subregs can be found in reload_reg_rtx.
4889
4890 This used to look for an existing reloaded home for all
4891 of the reloads, and only then perform any new reloads.
4892 But that could lose if the reloads were done out of reg-class order
4893 because a later reload with a looser constraint might have an old
4894 home in a register needed by an earlier reload with a tighter constraint.
4895
4896 To solve this, we make two passes over the reloads, in the order
4897 described above. In the first pass we try to inherit a reload
4898 from a previous insn. If there is a later reload that needs a
4899 class that is a proper subset of the class being processed, we must
4900 also allocate a spill register during the first pass.
4901
4902 Then make a second pass over the reloads to allocate any reloads
4903 that haven't been given registers yet. */
4904
be7ae2a4
RK
4905 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4906
32131a9c
RK
4907 for (j = 0; j < n_reloads; j++)
4908 {
4909 register int r = reload_order[j];
4910
4911 /* Ignore reloads that got marked inoperative. */
4912 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4913 continue;
4914
4915 /* If find_reloads chose a to use reload_in or reload_out as a reload
4916 register, we don't need to chose one. Otherwise, try even if it found
4917 one since we might save an insn if we find the value lying around. */
4918 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4919 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4920 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4921 continue;
4922
4923#if 0 /* No longer needed for correct operation.
4924 It might give better code, or might not; worth an experiment? */
4925 /* If this is an optional reload, we can't inherit from earlier insns
4926 until we are sure that any non-optional reloads have been allocated.
4927 The following code takes advantage of the fact that optional reloads
4928 are at the end of reload_order. */
4929 if (reload_optional[r] != 0)
4930 for (i = 0; i < j; i++)
4931 if ((reload_out[reload_order[i]] != 0
4932 || reload_in[reload_order[i]] != 0
4933 || reload_secondary_p[reload_order[i]])
4934 && ! reload_optional[reload_order[i]]
4935 && reload_reg_rtx[reload_order[i]] == 0)
4936 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4937#endif
4938
4939 /* First see if this pseudo is already available as reloaded
4940 for a previous insn. We cannot try to inherit for reloads
4941 that are smaller than the maximum number of registers needed
4942 for groups unless the register we would allocate cannot be used
4943 for the groups.
4944
4945 We could check here to see if this is a secondary reload for
4946 an object that is already in a register of the desired class.
4947 This would avoid the need for the secondary reload register.
4948 But this is complex because we can't easily determine what
4949 objects might want to be loaded via this reload. So let a register
4950 be allocated here. In `emit_reload_insns' we suppress one of the
4951 loads in the case described above. */
4952
4953 if (inheritance)
4954 {
4955 register int regno = -1;
db660765 4956 enum machine_mode mode;
32131a9c
RK
4957
4958 if (reload_in[r] == 0)
4959 ;
4960 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
4961 {
4962 regno = REGNO (reload_in[r]);
4963 mode = GET_MODE (reload_in[r]);
4964 }
32131a9c 4965 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
4966 {
4967 regno = REGNO (reload_in_reg[r]);
4968 mode = GET_MODE (reload_in_reg[r]);
4969 }
32131a9c
RK
4970#if 0
4971 /* This won't work, since REGNO can be a pseudo reg number.
4972 Also, it takes much more hair to keep track of all the things
4973 that can invalidate an inherited reload of part of a pseudoreg. */
4974 else if (GET_CODE (reload_in[r]) == SUBREG
4975 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4976 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4977#endif
4978
4979 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4980 {
4981 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4982
4983 if (reg_reloaded_contents[i] == regno
db660765
TW
4984 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4985 >= GET_MODE_SIZE (mode))
32131a9c
RK
4986 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4987 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4988 spill_regs[i])
4989 && (reload_nregs[r] == max_group_size
4990 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4991 spill_regs[i]))
546b63fb
RK
4992 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
4993 reload_when_needed[r])
32131a9c 4994 && reload_reg_free_before_p (spill_regs[i],
546b63fb 4995 reload_opnum[r],
32131a9c
RK
4996 reload_when_needed[r]))
4997 {
4998 /* If a group is needed, verify that all the subsequent
4999 registers still have their values intact. */
5000 int nr
5001 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5002 int k;
5003
5004 for (k = 1; k < nr; k++)
5005 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5006 != regno)
5007 break;
5008
5009 if (k == nr)
5010 {
c74fa651
RS
5011 int i1;
5012
5013 /* We found a register that contains the
5014 value we need. If this register is the
5015 same as an `earlyclobber' operand of the
5016 current insn, just mark it as a place to
5017 reload from since we can't use it as the
5018 reload register itself. */
5019
5020 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5021 if (reg_overlap_mentioned_for_reload_p
5022 (reg_last_reload_reg[regno],
5023 reload_earlyclobbers[i1]))
5024 break;
5025
8908158d
RS
5026 if (i1 != n_earlyclobbers
5027 /* Don't really use the inherited spill reg
5028 if we need it wider than we've got it. */
5029 || (GET_MODE_SIZE (reload_mode[r])
5030 > GET_MODE_SIZE (mode)))
c74fa651
RS
5031 reload_override_in[r] = reg_last_reload_reg[regno];
5032 else
5033 {
54c40e68 5034 int k;
c74fa651
RS
5035 /* We can use this as a reload reg. */
5036 /* Mark the register as in use for this part of
5037 the insn. */
5038 mark_reload_reg_in_use (spill_regs[i],
5039 reload_opnum[r],
5040 reload_when_needed[r],
5041 reload_mode[r]);
5042 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5043 reload_inherited[r] = 1;
5044 reload_inheritance_insn[r]
5045 = reg_reloaded_insn[i];
5046 reload_spill_index[r] = i;
54c40e68
RS
5047 for (k = 0; k < nr; k++)
5048 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5049 spill_regs[i + k]);
c74fa651 5050 }
32131a9c
RK
5051 }
5052 }
5053 }
5054 }
5055
5056 /* Here's another way to see if the value is already lying around. */
5057 if (inheritance
5058 && reload_in[r] != 0
5059 && ! reload_inherited[r]
5060 && reload_out[r] == 0
5061 && (CONSTANT_P (reload_in[r])
5062 || GET_CODE (reload_in[r]) == PLUS
5063 || GET_CODE (reload_in[r]) == REG
5064 || GET_CODE (reload_in[r]) == MEM)
5065 && (reload_nregs[r] == max_group_size
5066 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5067 {
5068 register rtx equiv
5069 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 5070 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
5071 int regno;
5072
5073 if (equiv != 0)
5074 {
5075 if (GET_CODE (equiv) == REG)
5076 regno = REGNO (equiv);
5077 else if (GET_CODE (equiv) == SUBREG)
5078 {
f8a9e02b
RK
5079 /* This must be a SUBREG of a hard register.
5080 Make a new REG since this might be used in an
5081 address and not all machines support SUBREGs
5082 there. */
5083 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5084 equiv = gen_rtx (REG, reload_mode[r], regno);
32131a9c
RK
5085 }
5086 else
5087 abort ();
5088 }
5089
5090 /* If we found a spill reg, reject it unless it is free
5091 and of the desired class. */
5092 if (equiv != 0
5093 && ((spill_reg_order[regno] >= 0
546b63fb 5094 && ! reload_reg_free_before_p (regno, reload_opnum[r],
32131a9c
RK
5095 reload_when_needed[r]))
5096 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5097 regno)))
5098 equiv = 0;
5099
5100 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5101 equiv = 0;
5102
5103 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5104 equiv = 0;
5105
5106 /* We found a register that contains the value we need.
5107 If this register is the same as an `earlyclobber' operand
5108 of the current insn, just mark it as a place to reload from
5109 since we can't use it as the reload register itself. */
5110
5111 if (equiv != 0)
5112 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5113 if (reg_overlap_mentioned_for_reload_p (equiv,
5114 reload_earlyclobbers[i]))
32131a9c
RK
5115 {
5116 reload_override_in[r] = equiv;
5117 equiv = 0;
5118 break;
5119 }
5120
5121 /* JRV: If the equiv register we have found is explicitly
5122 clobbered in the current insn, mark but don't use, as above. */
5123
5124 if (equiv != 0 && regno_clobbered_p (regno, insn))
5125 {
5126 reload_override_in[r] = equiv;
5127 equiv = 0;
5128 }
5129
5130 /* If we found an equivalent reg, say no code need be generated
5131 to load it, and use it as our reload reg. */
3ec2ea3e 5132 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
5133 {
5134 reload_reg_rtx[r] = equiv;
5135 reload_inherited[r] = 1;
5136 /* If it is a spill reg,
5137 mark the spill reg as in use for this insn. */
5138 i = spill_reg_order[regno];
5139 if (i >= 0)
be7ae2a4 5140 {
54c40e68
RS
5141 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5142 int k;
be7ae2a4
RK
5143 mark_reload_reg_in_use (regno, reload_opnum[r],
5144 reload_when_needed[r],
5145 reload_mode[r]);
54c40e68
RS
5146 for (k = 0; k < nr; k++)
5147 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
be7ae2a4 5148 }
32131a9c
RK
5149 }
5150 }
5151
5152 /* If we found a register to use already, or if this is an optional
5153 reload, we are done. */
5154 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5155 continue;
5156
5157#if 0 /* No longer needed for correct operation. Might or might not
5158 give better code on the average. Want to experiment? */
5159
5160 /* See if there is a later reload that has a class different from our
5161 class that intersects our class or that requires less register
5162 than our reload. If so, we must allocate a register to this
5163 reload now, since that reload might inherit a previous reload
5164 and take the only available register in our class. Don't do this
5165 for optional reloads since they will force all previous reloads
5166 to be allocated. Also don't do this for reloads that have been
5167 turned off. */
5168
5169 for (i = j + 1; i < n_reloads; i++)
5170 {
5171 int s = reload_order[i];
5172
d45cf215
RS
5173 if ((reload_in[s] == 0 && reload_out[s] == 0
5174 && ! reload_secondary_p[s])
32131a9c
RK
5175 || reload_optional[s])
5176 continue;
5177
5178 if ((reload_reg_class[s] != reload_reg_class[r]
5179 && reg_classes_intersect_p (reload_reg_class[r],
5180 reload_reg_class[s]))
5181 || reload_nregs[s] < reload_nregs[r])
5182 break;
5183 }
5184
5185 if (i == n_reloads)
5186 continue;
5187
5188 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5189#endif
5190 }
5191
5192 /* Now allocate reload registers for anything non-optional that
5193 didn't get one yet. */
5194 for (j = 0; j < n_reloads; j++)
5195 {
5196 register int r = reload_order[j];
5197
5198 /* Ignore reloads that got marked inoperative. */
5199 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5200 continue;
5201
5202 /* Skip reloads that already have a register allocated or are
5203 optional. */
5204 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5205 continue;
5206
5207 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5208 break;
5209 }
5210
5211 /* If that loop got all the way, we have won. */
5212 if (j == n_reloads)
5213 break;
5214
5215 fail:
5216 /* Loop around and try without any inheritance. */
5217 /* First undo everything done by the failed attempt
5218 to allocate with inheritance. */
5219 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5220 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5221 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5222 sizeof reload_inheritance_insn);
5223 bcopy (save_reload_override_in, reload_override_in,
5224 sizeof reload_override_in);
5225 bcopy (save_reload_spill_index, reload_spill_index,
5226 sizeof reload_spill_index);
5227 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5228 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5229 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5230 save_reload_reg_used_in_op_addr);
546b63fb
RK
5231 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5232 save_reload_reg_used_in_insn);
5233 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5234 save_reload_reg_used_in_other_addr);
5235
5236 for (i = 0; i < reload_n_operands; i++)
5237 {
5238 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5239 save_reload_reg_used_in_input[i]);
5240 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5241 save_reload_reg_used_in_output[i]);
5242 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5243 save_reload_reg_used_in_input_addr[i]);
5244 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5245 save_reload_reg_used_in_output_addr[i]);
5246 }
32131a9c
RK
5247 }
5248
5249 /* If we thought we could inherit a reload, because it seemed that
5250 nothing else wanted the same reload register earlier in the insn,
5251 verify that assumption, now that all reloads have been assigned. */
5252
5253 for (j = 0; j < n_reloads; j++)
5254 {
5255 register int r = reload_order[j];
5256
5257 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5258 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
546b63fb 5259 reload_opnum[r],
32131a9c
RK
5260 reload_when_needed[r]))
5261 reload_inherited[r] = 0;
5262
5263 /* If we found a better place to reload from,
5264 validate it in the same fashion, if it is a reload reg. */
5265 if (reload_override_in[r]
5266 && (GET_CODE (reload_override_in[r]) == REG
5267 || GET_CODE (reload_override_in[r]) == SUBREG))
5268 {
5269 int regno = true_regnum (reload_override_in[r]);
5270 if (spill_reg_order[regno] >= 0
546b63fb
RK
5271 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5272 reload_when_needed[r]))
32131a9c
RK
5273 reload_override_in[r] = 0;
5274 }
5275 }
5276
5277 /* Now that reload_override_in is known valid,
5278 actually override reload_in. */
5279 for (j = 0; j < n_reloads; j++)
5280 if (reload_override_in[j])
5281 reload_in[j] = reload_override_in[j];
5282
5283 /* If this reload won't be done because it has been cancelled or is
5284 optional and not inherited, clear reload_reg_rtx so other
5285 routines (such as subst_reloads) don't get confused. */
5286 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
5287 if (reload_reg_rtx[j] != 0
5288 && ((reload_optional[j] && ! reload_inherited[j])
5289 || (reload_in[j] == 0 && reload_out[j] == 0
5290 && ! reload_secondary_p[j])))
5291 {
5292 int regno = true_regnum (reload_reg_rtx[j]);
5293
5294 if (spill_reg_order[regno] >= 0)
5295 clear_reload_reg_in_use (regno, reload_opnum[j],
5296 reload_when_needed[j], reload_mode[j]);
5297 reload_reg_rtx[j] = 0;
5298 }
32131a9c
RK
5299
5300 /* Record which pseudos and which spill regs have output reloads. */
5301 for (j = 0; j < n_reloads; j++)
5302 {
5303 register int r = reload_order[j];
5304
5305 i = reload_spill_index[r];
5306
5307 /* I is nonneg if this reload used one of the spill regs.
5308 If reload_reg_rtx[r] is 0, this is an optional reload
5309 that we opted to ignore. */
5310 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5311 && reload_reg_rtx[r] != 0)
5312 {
5313 register int nregno = REGNO (reload_out[r]);
372e033b
RS
5314 int nr = 1;
5315
5316 if (nregno < FIRST_PSEUDO_REGISTER)
5317 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
5318
5319 while (--nr >= 0)
372e033b
RS
5320 reg_has_output_reload[nregno + nr] = 1;
5321
5322 if (i >= 0)
32131a9c 5323 {
372e033b
RS
5324 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5325 while (--nr >= 0)
32131a9c
RK
5326 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5327 }
5328
5329 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
5330 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5331 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
5332 abort ();
5333 }
5334 }
5335}
5336\f
546b63fb
RK
5337/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5338 reloads of the same item for fear that we might not have enough reload
5339 registers. However, normally they will get the same reload register
5340 and hence actually need not be loaded twice.
5341
5342 Here we check for the most common case of this phenomenon: when we have
5343 a number of reloads for the same object, each of which were allocated
5344 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5345 reload, and is not modified in the insn itself. If we find such,
5346 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5347 This will not increase the number of spill registers needed and will
5348 prevent redundant code. */
5349
5350#ifdef SMALL_REGISTER_CLASSES
5351
5352static void
5353merge_assigned_reloads (insn)
5354 rtx insn;
5355{
5356 int i, j;
5357
5358 /* Scan all the reloads looking for ones that only load values and
5359 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5360 assigned and not modified by INSN. */
5361
5362 for (i = 0; i < n_reloads; i++)
5363 {
5364 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5365 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5366 || reg_set_p (reload_reg_rtx[i], insn))
5367 continue;
5368
5369 /* Look at all other reloads. Ensure that the only use of this
5370 reload_reg_rtx is in a reload that just loads the same value
5371 as we do. Note that any secondary reloads must be of the identical
5372 class since the values, modes, and result registers are the
5373 same, so we need not do anything with any secondary reloads. */
5374
5375 for (j = 0; j < n_reloads; j++)
5376 {
5377 if (i == j || reload_reg_rtx[j] == 0
5378 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5379 reload_reg_rtx[i]))
5380 continue;
5381
5382 /* If the reload regs aren't exactly the same (e.g, different modes)
5383 or if the values are different, we can't merge anything with this
5384 reload register. */
5385
5386 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5387 || reload_out[j] != 0 || reload_in[j] == 0
5388 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5389 break;
5390 }
5391
5392 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5393 we, in fact, found any matching reloads. */
5394
5395 if (j == n_reloads)
5396 {
5397 for (j = 0; j < n_reloads; j++)
5398 if (i != j && reload_reg_rtx[j] != 0
5399 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5400 {
5401 reload_when_needed[i] = RELOAD_OTHER;
5402 reload_in[j] = 0;
5403 transfer_replacements (i, j);
5404 }
5405
5406 /* If this is now RELOAD_OTHER, look for any reloads that load
5407 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5408 if they were for inputs, RELOAD_OTHER for outputs. Note that
5409 this test is equivalent to looking for reloads for this operand
5410 number. */
5411
5412 if (reload_when_needed[i] == RELOAD_OTHER)
5413 for (j = 0; j < n_reloads; j++)
5414 if (reload_in[j] != 0
5415 && reload_when_needed[i] != RELOAD_OTHER
5416 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5417 reload_in[i]))
5418 reload_when_needed[j]
5419 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5420 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5421 }
5422 }
5423}
5424#endif /* SMALL_RELOAD_CLASSES */
5425\f
32131a9c
RK
5426/* Output insns to reload values in and out of the chosen reload regs. */
5427
5428static void
5429emit_reload_insns (insn)
5430 rtx insn;
5431{
5432 register int j;
546b63fb
RK
5433 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5434 rtx other_input_address_reload_insns = 0;
5435 rtx other_input_reload_insns = 0;
5436 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5437 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5438 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5439 rtx operand_reload_insns = 0;
32131a9c 5440 rtx following_insn = NEXT_INSN (insn);
a8efe40d 5441 rtx before_insn = insn;
32131a9c
RK
5442 int special;
5443 /* Values to be put in spill_reg_store are put here first. */
5444 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5445
546b63fb
RK
5446 for (j = 0; j < reload_n_operands; j++)
5447 input_reload_insns[j] = input_address_reload_insns[j]
5448 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5449
d45cf215 5450 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
5451 must go in front of the first USE insn, not in front of INSN. */
5452
5453 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
5454 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
5455 while (GET_CODE (PREV_INSN (before_insn)) == INSN
5456 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
546b63fb
RK
5457 before_insn = PREV_INSN (before_insn);
5458
a34a369b 5459 /* If INSN is followed by any CLOBBER insns made by find_reloads,
546b63fb
RK
5460 put our reloads after them since they may otherwise be
5461 misinterpreted. */
5462
a34a369b
DE
5463 while (GET_CODE (following_insn) == INSN
5464 && GET_MODE (following_insn) == DImode
5465 && GET_CODE (PATTERN (following_insn)) == CLOBBER
5466 && NEXT_INSN (following_insn) != 0)
546b63fb 5467 following_insn = NEXT_INSN (following_insn);
a8efe40d 5468
32131a9c
RK
5469 /* Now output the instructions to copy the data into and out of the
5470 reload registers. Do these in the order that the reloads were reported,
5471 since reloads of base and index registers precede reloads of operands
5472 and the operands may need the base and index registers reloaded. */
5473
5474 for (j = 0; j < n_reloads; j++)
5475 {
5476 register rtx old;
5477 rtx oldequiv_reg = 0;
32131a9c
RK
5478 rtx store_insn = 0;
5479
5480 old = reload_in[j];
5481 if (old != 0 && ! reload_inherited[j]
5482 && ! rtx_equal_p (reload_reg_rtx[j], old)
5483 && reload_reg_rtx[j] != 0)
5484 {
5485 register rtx reloadreg = reload_reg_rtx[j];
5486 rtx oldequiv = 0;
5487 enum machine_mode mode;
546b63fb 5488 rtx *where;
32131a9c
RK
5489
5490 /* Determine the mode to reload in.
5491 This is very tricky because we have three to choose from.
5492 There is the mode the insn operand wants (reload_inmode[J]).
5493 There is the mode of the reload register RELOADREG.
5494 There is the intrinsic mode of the operand, which we could find
5495 by stripping some SUBREGs.
5496 It turns out that RELOADREG's mode is irrelevant:
5497 we can change that arbitrarily.
5498
5499 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5500 then the reload reg may not support QImode moves, so use SImode.
5501 If foo is in memory due to spilling a pseudo reg, this is safe,
5502 because the QImode value is in the least significant part of a
5503 slot big enough for a SImode. If foo is some other sort of
5504 memory reference, then it is impossible to reload this case,
5505 so previous passes had better make sure this never happens.
5506
5507 Then consider a one-word union which has SImode and one of its
5508 members is a float, being fetched as (SUBREG:SF union:SI).
5509 We must fetch that as SFmode because we could be loading into
5510 a float-only register. In this case OLD's mode is correct.
5511
5512 Consider an immediate integer: it has VOIDmode. Here we need
5513 to get a mode from something else.
5514
5515 In some cases, there is a fourth mode, the operand's
5516 containing mode. If the insn specifies a containing mode for
5517 this operand, it overrides all others.
5518
5519 I am not sure whether the algorithm here is always right,
5520 but it does the right things in those cases. */
5521
5522 mode = GET_MODE (old);
5523 if (mode == VOIDmode)
5524 mode = reload_inmode[j];
32131a9c
RK
5525
5526#ifdef SECONDARY_INPUT_RELOAD_CLASS
5527 /* If we need a secondary register for this operation, see if
5528 the value is already in a register in that class. Don't
5529 do this if the secondary register will be used as a scratch
5530 register. */
5531
b80bba27
RK
5532 if (reload_secondary_in_reload[j] >= 0
5533 && reload_secondary_in_icode[j] == CODE_FOR_nothing
58b1581b 5534 && optimize)
32131a9c
RK
5535 oldequiv
5536 = find_equiv_reg (old, insn,
b80bba27 5537 reload_reg_class[reload_secondary_in_reload[j]],
fb3821f7 5538 -1, NULL_PTR, 0, mode);
32131a9c
RK
5539#endif
5540
5541 /* If reloading from memory, see if there is a register
5542 that already holds the same value. If so, reload from there.
5543 We can pass 0 as the reload_reg_p argument because
5544 any other reload has either already been emitted,
5545 in which case find_equiv_reg will see the reload-insn,
5546 or has yet to be emitted, in which case it doesn't matter
5547 because we will use this equiv reg right away. */
5548
58b1581b 5549 if (oldequiv == 0 && optimize
32131a9c
RK
5550 && (GET_CODE (old) == MEM
5551 || (GET_CODE (old) == REG
5552 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5553 && reg_renumber[REGNO (old)] < 0)))
546b63fb 5554 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 5555 -1, NULL_PTR, 0, mode);
32131a9c
RK
5556
5557 if (oldequiv)
5558 {
5559 int regno = true_regnum (oldequiv);
5560
5561 /* If OLDEQUIV is a spill register, don't use it for this
5562 if any other reload needs it at an earlier stage of this insn
a8fdc208 5563 or at this stage. */
32131a9c 5564 if (spill_reg_order[regno] >= 0
546b63fb
RK
5565 && (! reload_reg_free_p (regno, reload_opnum[j],
5566 reload_when_needed[j])
5567 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
5568 reload_when_needed[j])))
5569 oldequiv = 0;
5570
5571 /* If OLDEQUIV is not a spill register,
5572 don't use it if any other reload wants it. */
5573 if (spill_reg_order[regno] < 0)
5574 {
5575 int k;
5576 for (k = 0; k < n_reloads; k++)
5577 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
5578 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5579 oldequiv))
32131a9c
RK
5580 {
5581 oldequiv = 0;
5582 break;
5583 }
5584 }
546b63fb
RK
5585
5586 /* If it is no cheaper to copy from OLDEQUIV into the
5587 reload register than it would be to move from memory,
5588 don't use it. Likewise, if we need a secondary register
5589 or memory. */
5590
5591 if (oldequiv != 0
5592 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5593 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5594 reload_reg_class[j])
5595 >= MEMORY_MOVE_COST (mode)))
5596#ifdef SECONDARY_INPUT_RELOAD_CLASS
5597 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5598 mode, oldequiv)
5599 != NO_REGS)
5600#endif
5601#ifdef SECONDARY_MEMORY_NEEDED
5602 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5603 REGNO_REG_CLASS (regno),
5604 mode)
5605#endif
5606 ))
5607 oldequiv = 0;
32131a9c
RK
5608 }
5609
5610 if (oldequiv == 0)
5611 oldequiv = old;
5612 else if (GET_CODE (oldequiv) == REG)
5613 oldequiv_reg = oldequiv;
5614 else if (GET_CODE (oldequiv) == SUBREG)
5615 oldequiv_reg = SUBREG_REG (oldequiv);
5616
5617 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
5618 then load RELOADREG from OLDEQUIV. Note that we cannot use
5619 gen_lowpart_common since it can do the wrong thing when
5620 RELOADREG has a multi-word mode. Note that RELOADREG
5621 must always be a REG here. */
32131a9c
RK
5622
5623 if (GET_MODE (reloadreg) != mode)
3abe6f90 5624 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
5625 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5626 oldequiv = SUBREG_REG (oldequiv);
5627 if (GET_MODE (oldequiv) != VOIDmode
5628 && mode != GET_MODE (oldequiv))
5629 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5630
546b63fb 5631 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
5632 switch (reload_when_needed[j])
5633 {
32131a9c 5634 case RELOAD_OTHER:
546b63fb
RK
5635 where = &other_input_reload_insns;
5636 break;
5637 case RELOAD_FOR_INPUT:
5638 where = &input_reload_insns[reload_opnum[j]];
32131a9c 5639 break;
546b63fb
RK
5640 case RELOAD_FOR_INPUT_ADDRESS:
5641 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 5642 break;
546b63fb
RK
5643 case RELOAD_FOR_OUTPUT_ADDRESS:
5644 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c
RK
5645 break;
5646 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5647 where = &operand_reload_insns;
5648 break;
5649 case RELOAD_FOR_OTHER_ADDRESS:
5650 where = &other_input_address_reload_insns;
5651 break;
5652 default:
5653 abort ();
32131a9c
RK
5654 }
5655
546b63fb 5656 push_to_sequence (*where);
32131a9c
RK
5657 special = 0;
5658
5659 /* Auto-increment addresses must be reloaded in a special way. */
5660 if (GET_CODE (oldequiv) == POST_INC
5661 || GET_CODE (oldequiv) == POST_DEC
5662 || GET_CODE (oldequiv) == PRE_INC
5663 || GET_CODE (oldequiv) == PRE_DEC)
5664 {
5665 /* We are not going to bother supporting the case where a
5666 incremented register can't be copied directly from
5667 OLDEQUIV since this seems highly unlikely. */
b80bba27 5668 if (reload_secondary_in_reload[j] >= 0)
32131a9c
RK
5669 abort ();
5670 /* Prevent normal processing of this reload. */
5671 special = 1;
5672 /* Output a special code sequence for this case. */
546b63fb 5673 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
5674 }
5675
5676 /* If we are reloading a pseudo-register that was set by the previous
5677 insn, see if we can get rid of that pseudo-register entirely
5678 by redirecting the previous insn into our reload register. */
5679
5680 else if (optimize && GET_CODE (old) == REG
5681 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5682 && dead_or_set_p (insn, old)
5683 /* This is unsafe if some other reload
5684 uses the same reg first. */
546b63fb
RK
5685 && reload_reg_free_before_p (REGNO (reloadreg),
5686 reload_opnum[j],
5687 reload_when_needed[j]))
32131a9c
RK
5688 {
5689 rtx temp = PREV_INSN (insn);
5690 while (temp && GET_CODE (temp) == NOTE)
5691 temp = PREV_INSN (temp);
5692 if (temp
5693 && GET_CODE (temp) == INSN
5694 && GET_CODE (PATTERN (temp)) == SET
5695 && SET_DEST (PATTERN (temp)) == old
5696 /* Make sure we can access insn_operand_constraint. */
5697 && asm_noperands (PATTERN (temp)) < 0
5698 /* This is unsafe if prev insn rejects our reload reg. */
5699 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5700 reloadreg)
5701 /* This is unsafe if operand occurs more than once in current
5702 insn. Perhaps some occurrences aren't reloaded. */
5703 && count_occurrences (PATTERN (insn), old) == 1
5704 /* Don't risk splitting a matching pair of operands. */
5705 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5706 {
5707 /* Store into the reload register instead of the pseudo. */
5708 SET_DEST (PATTERN (temp)) = reloadreg;
5709 /* If these are the only uses of the pseudo reg,
5710 pretend for GDB it lives in the reload reg we used. */
5711 if (reg_n_deaths[REGNO (old)] == 1
5712 && reg_n_sets[REGNO (old)] == 1)
5713 {
5714 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5715 alter_reg (REGNO (old), -1);
5716 }
5717 special = 1;
5718 }
5719 }
5720
546b63fb
RK
5721 /* We can't do that, so output an insn to load RELOADREG. */
5722
32131a9c
RK
5723 if (! special)
5724 {
5725#ifdef SECONDARY_INPUT_RELOAD_CLASS
5726 rtx second_reload_reg = 0;
5727 enum insn_code icode;
5728
5729 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
5730 and icode, if any. If OLDEQUIV and OLD are different or
5731 if this is an in-out reload, recompute whether or not we
5732 still need a secondary register and what the icode should
5733 be. If we still need a secondary register and the class or
5734 icode is different, go back to reloading from OLD if using
5735 OLDEQUIV means that we got the wrong type of register. We
5736 cannot have different class or icode due to an in-out reload
5737 because we don't make such reloads when both the input and
5738 output need secondary reload registers. */
32131a9c 5739
b80bba27 5740 if (reload_secondary_in_reload[j] >= 0)
32131a9c 5741 {
b80bba27 5742 int secondary_reload = reload_secondary_in_reload[j];
1554c2c6
RK
5743 rtx real_oldequiv = oldequiv;
5744 rtx real_old = old;
5745
5746 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5747 and similarly for OLD.
b80bba27 5748 See comments in get_secondary_reload in reload.c. */
1554c2c6
RK
5749 if (GET_CODE (oldequiv) == REG
5750 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5751 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5752 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5753
5754 if (GET_CODE (old) == REG
5755 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5756 && reg_equiv_mem[REGNO (old)] != 0)
5757 real_old = reg_equiv_mem[REGNO (old)];
5758
32131a9c 5759 second_reload_reg = reload_reg_rtx[secondary_reload];
b80bba27 5760 icode = reload_secondary_in_icode[j];
32131a9c 5761
d445b551
RK
5762 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5763 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
5764 {
5765 enum reg_class new_class
5766 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 5767 mode, real_oldequiv);
32131a9c
RK
5768
5769 if (new_class == NO_REGS)
5770 second_reload_reg = 0;
5771 else
5772 {
5773 enum insn_code new_icode;
5774 enum machine_mode new_mode;
5775
5776 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5777 REGNO (second_reload_reg)))
1554c2c6 5778 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5779 else
5780 {
5781 new_icode = reload_in_optab[(int) mode];
5782 if (new_icode != CODE_FOR_nothing
5783 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 5784 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 5785 (reloadreg, mode)))
a8fdc208
RS
5786 || (insn_operand_predicate[(int) new_icode][1]
5787 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 5788 (real_oldequiv, mode)))))
32131a9c
RK
5789 new_icode = CODE_FOR_nothing;
5790
5791 if (new_icode == CODE_FOR_nothing)
5792 new_mode = mode;
5793 else
196ddf8a 5794 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
5795
5796 if (GET_MODE (second_reload_reg) != new_mode)
5797 {
5798 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5799 new_mode))
1554c2c6 5800 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5801 else
5802 second_reload_reg
3aaa90c7
MM
5803 = gen_rtx (REG, new_mode,
5804 REGNO (second_reload_reg));
32131a9c
RK
5805 }
5806 }
5807 }
5808 }
5809
5810 /* If we still need a secondary reload register, check
5811 to see if it is being used as a scratch or intermediate
1554c2c6
RK
5812 register and generate code appropriately. If we need
5813 a scratch register, use REAL_OLDEQUIV since the form of
5814 the insn may depend on the actual address if it is
5815 a MEM. */
32131a9c
RK
5816
5817 if (second_reload_reg)
5818 {
5819 if (icode != CODE_FOR_nothing)
5820 {
546b63fb
RK
5821 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5822 second_reload_reg));
32131a9c
RK
5823 special = 1;
5824 }
5825 else
5826 {
5827 /* See if we need a scratch register to load the
5828 intermediate register (a tertiary reload). */
5829 enum insn_code tertiary_icode
b80bba27 5830 = reload_secondary_in_icode[secondary_reload];
32131a9c
RK
5831
5832 if (tertiary_icode != CODE_FOR_nothing)
5833 {
5834 rtx third_reload_reg
b80bba27 5835 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
32131a9c 5836
546b63fb
RK
5837 emit_insn ((GEN_FCN (tertiary_icode)
5838 (second_reload_reg, real_oldequiv,
5839 third_reload_reg)));
32131a9c
RK
5840 }
5841 else
546b63fb
RK
5842 gen_input_reload (second_reload_reg, oldequiv,
5843 reload_opnum[j],
5844 reload_when_needed[j]);
5845
5846 oldequiv = second_reload_reg;
32131a9c
RK
5847 }
5848 }
5849 }
5850#endif
5851
2d182c6f 5852 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
546b63fb
RK
5853 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5854 reload_when_needed[j]);
32131a9c
RK
5855
5856#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5857 /* We may have to make a REG_DEAD note for the secondary reload
5858 register in the insns we just made. Find the last insn that
5859 mentioned the register. */
5860 if (! special && second_reload_reg
5861 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5862 {
5863 rtx prev;
5864
546b63fb 5865 for (prev = get_last_insn (); prev;
32131a9c
RK
5866 prev = PREV_INSN (prev))
5867 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5868 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5869 PATTERN (prev)))
32131a9c
RK
5870 {
5871 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5872 second_reload_reg,
5873 REG_NOTES (prev));
5874 break;
5875 }
5876 }
5877#endif
5878 }
5879
546b63fb
RK
5880 /* End this sequence. */
5881 *where = get_insns ();
5882 end_sequence ();
32131a9c
RK
5883 }
5884
5885 /* Add a note saying the input reload reg
5886 dies in this insn, if anyone cares. */
5887#ifdef PRESERVE_DEATH_INFO_REGNO_P
5888 if (old != 0
5889 && reload_reg_rtx[j] != old
5890 && reload_reg_rtx[j] != 0
5891 && reload_out[j] == 0
5892 && ! reload_inherited[j]
5893 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5894 {
5895 register rtx reloadreg = reload_reg_rtx[j];
5896
a8fdc208 5897#if 0
32131a9c
RK
5898 /* We can't abort here because we need to support this for sched.c.
5899 It's not terrible to miss a REG_DEAD note, but we should try
5900 to figure out how to do this correctly. */
5901 /* The code below is incorrect for address-only reloads. */
5902 if (reload_when_needed[j] != RELOAD_OTHER
5903 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5904 abort ();
5905#endif
5906
5907 /* Add a death note to this insn, for an input reload. */
5908
5909 if ((reload_when_needed[j] == RELOAD_OTHER
5910 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5911 && ! dead_or_set_p (insn, reloadreg))
5912 REG_NOTES (insn)
5913 = gen_rtx (EXPR_LIST, REG_DEAD,
5914 reloadreg, REG_NOTES (insn));
5915 }
5916
5917 /* When we inherit a reload, the last marked death of the reload reg
5918 may no longer really be a death. */
5919 if (reload_reg_rtx[j] != 0
5920 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5921 && reload_inherited[j])
5922 {
5923 /* Handle inheriting an output reload.
5924 Remove the death note from the output reload insn. */
5925 if (reload_spill_index[j] >= 0
5926 && GET_CODE (reload_in[j]) == REG
5927 && spill_reg_store[reload_spill_index[j]] != 0
5928 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5929 REG_DEAD, REGNO (reload_reg_rtx[j])))
5930 remove_death (REGNO (reload_reg_rtx[j]),
5931 spill_reg_store[reload_spill_index[j]]);
5932 /* Likewise for input reloads that were inherited. */
5933 else if (reload_spill_index[j] >= 0
5934 && GET_CODE (reload_in[j]) == REG
5935 && spill_reg_store[reload_spill_index[j]] == 0
5936 && reload_inheritance_insn[j] != 0
a8fdc208 5937 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5938 REGNO (reload_reg_rtx[j])))
5939 remove_death (REGNO (reload_reg_rtx[j]),
5940 reload_inheritance_insn[j]);
5941 else
5942 {
5943 rtx prev;
5944
5945 /* We got this register from find_equiv_reg.
5946 Search back for its last death note and get rid of it.
5947 But don't search back too far.
5948 Don't go past a place where this reg is set,
5949 since a death note before that remains valid. */
5950 for (prev = PREV_INSN (insn);
5951 prev && GET_CODE (prev) != CODE_LABEL;
5952 prev = PREV_INSN (prev))
5953 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5954 && dead_or_set_p (prev, reload_reg_rtx[j]))
5955 {
5956 if (find_regno_note (prev, REG_DEAD,
5957 REGNO (reload_reg_rtx[j])))
5958 remove_death (REGNO (reload_reg_rtx[j]), prev);
5959 break;
5960 }
5961 }
5962 }
5963
5964 /* We might have used find_equiv_reg above to choose an alternate
5965 place from which to reload. If so, and it died, we need to remove
5966 that death and move it to one of the insns we just made. */
5967
5968 if (oldequiv_reg != 0
5969 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5970 {
5971 rtx prev, prev1;
5972
5973 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5974 prev = PREV_INSN (prev))
5975 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5976 && dead_or_set_p (prev, oldequiv_reg))
5977 {
5978 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5979 {
5980 for (prev1 = this_reload_insn;
5981 prev1; prev1 = PREV_INSN (prev1))
5982 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5983 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5984 PATTERN (prev1)))
32131a9c
RK
5985 {
5986 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5987 oldequiv_reg,
5988 REG_NOTES (prev1));
5989 break;
5990 }
5991 remove_death (REGNO (oldequiv_reg), prev);
5992 }
5993 break;
5994 }
5995 }
5996#endif
5997
5998 /* If we are reloading a register that was recently stored in with an
5999 output-reload, see if we can prove there was
6000 actually no need to store the old value in it. */
6001
6002 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 6003 && reload_in[j] != 0
32131a9c
RK
6004 && GET_CODE (reload_in[j]) == REG
6005#if 0
6006 /* There doesn't seem to be any reason to restrict this to pseudos
6007 and doing so loses in the case where we are copying from a
6008 register of the wrong class. */
6009 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6010#endif
6011 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb
RK
6012 /* This is unsafe if some other reload uses the same reg first. */
6013 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6014 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
6015 && dead_or_set_p (insn, reload_in[j])
6016 /* This is unsafe if operand occurs more than once in current
6017 insn. Perhaps some occurrences weren't reloaded. */
6018 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6019 delete_output_reload (insn, j,
6020 spill_reg_store[reload_spill_index[j]]);
6021
6022 /* Input-reloading is done. Now do output-reloading,
6023 storing the value from the reload-register after the main insn
6024 if reload_out[j] is nonzero.
6025
6026 ??? At some point we need to support handling output reloads of
6027 JUMP_INSNs or insns that set cc0. */
6028 old = reload_out[j];
6029 if (old != 0
6030 && reload_reg_rtx[j] != old
6031 && reload_reg_rtx[j] != 0)
6032 {
6033 register rtx reloadreg = reload_reg_rtx[j];
6034 register rtx second_reloadreg = 0;
32131a9c
RK
6035 rtx note, p;
6036 enum machine_mode mode;
6037 int special = 0;
6038
6039 /* An output operand that dies right away does need a reload,
6040 but need not be copied from it. Show the new location in the
6041 REG_UNUSED note. */
6042 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6043 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6044 {
6045 XEXP (note, 0) = reload_reg_rtx[j];
6046 continue;
6047 }
6048 else if (GET_CODE (old) == SCRATCH)
6049 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6050 but we don't want to make an output reload. */
6051 continue;
6052
6053#if 0
6054 /* Strip off of OLD any size-increasing SUBREGs such as
6055 (SUBREG:SI foo:QI 0). */
6056
6057 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6058 && (GET_MODE_SIZE (GET_MODE (old))
6059 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6060 old = SUBREG_REG (old);
6061#endif
6062
6063 /* If is a JUMP_INSN, we can't support output reloads yet. */
6064 if (GET_CODE (insn) == JUMP_INSN)
6065 abort ();
6066
546b63fb
RK
6067 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6068
32131a9c
RK
6069 /* Determine the mode to reload in.
6070 See comments above (for input reloading). */
6071
6072 mode = GET_MODE (old);
6073 if (mode == VOIDmode)
79a365a7
RS
6074 {
6075 /* VOIDmode should never happen for an output. */
6076 if (asm_noperands (PATTERN (insn)) < 0)
6077 /* It's the compiler's fault. */
6078 abort ();
6079 error_for_asm (insn, "output operand is constant in `asm'");
6080 /* Prevent crash--use something we know is valid. */
6081 mode = word_mode;
6082 old = gen_rtx (REG, mode, REGNO (reloadreg));
6083 }
32131a9c 6084
32131a9c 6085 if (GET_MODE (reloadreg) != mode)
3abe6f90 6086 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
6087
6088#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6089
6090 /* If we need two reload regs, set RELOADREG to the intermediate
6091 one, since it will be stored into OUT. We might need a secondary
6092 register only for an input reload, so check again here. */
6093
b80bba27 6094 if (reload_secondary_out_reload[j] >= 0)
32131a9c 6095 {
1554c2c6 6096 rtx real_old = old;
32131a9c 6097
1554c2c6
RK
6098 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6099 && reg_equiv_mem[REGNO (old)] != 0)
6100 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6101
1554c2c6
RK
6102 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6103 mode, real_old)
6104 != NO_REGS))
6105 {
6106 second_reloadreg = reloadreg;
b80bba27 6107 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
32131a9c 6108
1554c2c6
RK
6109 /* See if RELOADREG is to be used as a scratch register
6110 or as an intermediate register. */
b80bba27 6111 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
32131a9c 6112 {
b80bba27 6113 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
546b63fb 6114 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6115 special = 1;
32131a9c
RK
6116 }
6117 else
1554c2c6
RK
6118 {
6119 /* See if we need both a scratch and intermediate reload
6120 register. */
b80bba27 6121 int secondary_reload = reload_secondary_out_reload[j];
1554c2c6 6122 enum insn_code tertiary_icode
b80bba27 6123 = reload_secondary_out_icode[secondary_reload];
1554c2c6 6124 rtx pat;
32131a9c 6125
1554c2c6
RK
6126 if (GET_MODE (reloadreg) != mode)
6127 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6128
6129 if (tertiary_icode != CODE_FOR_nothing)
6130 {
6131 rtx third_reloadreg
b80bba27 6132 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
1554c2c6
RK
6133 pat = (GEN_FCN (tertiary_icode)
6134 (reloadreg, second_reloadreg, third_reloadreg));
6135 }
9ad5f9f6
JW
6136#ifdef SECONDARY_MEMORY_NEEDED
6137 /* If we need a memory location to do the move, do it that way. */
6138 else if (GET_CODE (reloadreg) == REG
6139 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6140 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6141 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6142 GET_MODE (second_reloadreg)))
6143 {
6144 /* Get the memory to use and rewrite both registers
6145 to its mode. */
546b63fb
RK
6146 rtx loc
6147 = get_secondary_mem (reloadreg,
6148 GET_MODE (second_reloadreg),
6149 reload_opnum[j],
6150 reload_when_needed[j]);
9ad5f9f6
JW
6151 rtx tmp_reloadreg;
6152
6153 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6154 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6155 REGNO (second_reloadreg));
6156
6157 if (GET_MODE (loc) != GET_MODE (reloadreg))
6158 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6159 REGNO (reloadreg));
6160 else
6161 tmp_reloadreg = reloadreg;
6162
546b63fb 6163 emit_move_insn (loc, second_reloadreg);
9ad5f9f6
JW
6164 pat = gen_move_insn (tmp_reloadreg, loc);
6165 }
6166#endif
1554c2c6
RK
6167 else
6168 pat = gen_move_insn (reloadreg, second_reloadreg);
6169
546b63fb 6170 emit_insn (pat);
1554c2c6 6171 }
32131a9c
RK
6172 }
6173 }
6174#endif
6175
6176 /* Output the last reload insn. */
6177 if (! special)
0dadecf6
RK
6178 {
6179#ifdef SECONDARY_MEMORY_NEEDED
6180 /* If we need a memory location to do the move, do it that way. */
6181 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6182 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6183 REGNO_REG_CLASS (REGNO (reloadreg)),
6184 GET_MODE (reloadreg)))
6185 {
6186 /* Get the memory to use and rewrite both registers to
6187 its mode. */
546b63fb
RK
6188 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6189 reload_opnum[j],
6190 reload_when_needed[j]);
0dadecf6
RK
6191
6192 if (GET_MODE (loc) != GET_MODE (reloadreg))
6193 reloadreg = gen_rtx (REG, GET_MODE (loc),
6194 REGNO (reloadreg));
6195
6196 if (GET_MODE (loc) != GET_MODE (old))
6197 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6198
546b63fb
RK
6199 emit_insn (gen_move_insn (loc, reloadreg));
6200 emit_insn (gen_move_insn (old, loc));
0dadecf6
RK
6201 }
6202 else
6203#endif
546b63fb 6204 emit_insn (gen_move_insn (old, reloadreg));
0dadecf6 6205 }
32131a9c
RK
6206
6207#ifdef PRESERVE_DEATH_INFO_REGNO_P
6208 /* If final will look at death notes for this reg,
6209 put one on the last output-reload insn to use it. Similarly
6210 for any secondary register. */
6211 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 6212 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6213 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6214 && reg_overlap_mentioned_for_reload_p (reloadreg,
6215 PATTERN (p)))
32131a9c
RK
6216 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6217 reloadreg, REG_NOTES (p));
6218
6219#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6220 if (! special
6221 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 6222 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6223 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6224 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6225 PATTERN (p)))
32131a9c
RK
6226 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6227 second_reloadreg, REG_NOTES (p));
6228#endif
6229#endif
6230 /* Look at all insns we emitted, just to be safe. */
546b63fb 6231 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
6232 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6233 {
6234 /* If this output reload doesn't come from a spill reg,
6235 clear any memory of reloaded copies of the pseudo reg.
6236 If this output reload comes from a spill reg,
6237 reg_has_output_reload will make this do nothing. */
6238 note_stores (PATTERN (p), forget_old_reloads_1);
6239
6240 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6241 store_insn = p;
6242 }
6243
546b63fb
RK
6244 output_reload_insns[reload_opnum[j]] = get_insns ();
6245 end_sequence ();
6246
32131a9c
RK
6247 }
6248
6249 if (reload_spill_index[j] >= 0)
6250 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6251 }
6252
546b63fb
RK
6253 /* Now write all the insns we made for reloads in the order expected by
6254 the allocation functions. Prior to the insn being reloaded, we write
6255 the following reloads:
6256
6257 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6258
6259 RELOAD_OTHER reloads.
6260
6261 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6262 the RELOAD_FOR_INPUT reload for the operand.
6263
6264 RELOAD_FOR_OPERAND_ADDRESS reloads.
6265
6266 After the insn being reloaded, we write the following:
6267
6268 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6269 the RELOAD_FOR_OUTPUT reload for that operand. */
6270
6271 emit_insns_before (other_input_address_reload_insns, before_insn);
6272 emit_insns_before (other_input_reload_insns, before_insn);
6273
6274 for (j = 0; j < reload_n_operands; j++)
6275 {
6276 emit_insns_before (input_address_reload_insns[j], before_insn);
6277 emit_insns_before (input_reload_insns[j], before_insn);
6278 }
6279
6280 emit_insns_before (operand_reload_insns, before_insn);
6281
6282 for (j = 0; j < reload_n_operands; j++)
6283 {
6284 emit_insns_before (output_address_reload_insns[j], following_insn);
6285 emit_insns_before (output_reload_insns[j], following_insn);
6286 }
6287
32131a9c
RK
6288 /* Move death notes from INSN
6289 to output-operand-address and output reload insns. */
6290#ifdef PRESERVE_DEATH_INFO_REGNO_P
6291 {
6292 rtx insn1;
6293 /* Loop over those insns, last ones first. */
6294 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6295 insn1 = PREV_INSN (insn1))
6296 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6297 {
6298 rtx source = SET_SRC (PATTERN (insn1));
6299 rtx dest = SET_DEST (PATTERN (insn1));
6300
6301 /* The note we will examine next. */
6302 rtx reg_notes = REG_NOTES (insn);
6303 /* The place that pointed to this note. */
6304 rtx *prev_reg_note = &REG_NOTES (insn);
6305
6306 /* If the note is for something used in the source of this
6307 reload insn, or in the output address, move the note. */
6308 while (reg_notes)
6309 {
6310 rtx next_reg_notes = XEXP (reg_notes, 1);
6311 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6312 && GET_CODE (XEXP (reg_notes, 0)) == REG
6313 && ((GET_CODE (dest) != REG
bfa30b22
RK
6314 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6315 dest))
6316 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6317 source)))
32131a9c
RK
6318 {
6319 *prev_reg_note = next_reg_notes;
6320 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6321 REG_NOTES (insn1) = reg_notes;
6322 }
6323 else
6324 prev_reg_note = &XEXP (reg_notes, 1);
6325
6326 reg_notes = next_reg_notes;
6327 }
6328 }
6329 }
6330#endif
6331
6332 /* For all the spill regs newly reloaded in this instruction,
6333 record what they were reloaded from, so subsequent instructions
d445b551
RK
6334 can inherit the reloads.
6335
6336 Update spill_reg_store for the reloads of this insn.
e9e79d69 6337 Copy the elements that were updated in the loop above. */
32131a9c
RK
6338
6339 for (j = 0; j < n_reloads; j++)
6340 {
6341 register int r = reload_order[j];
6342 register int i = reload_spill_index[r];
6343
6344 /* I is nonneg if this reload used one of the spill regs.
6345 If reload_reg_rtx[r] is 0, this is an optional reload
546b63fb
RK
6346 that we opted to ignore.
6347
6348 Also ignore reloads that don't reach the end of the insn,
6349 since we will eventually see the one that does. */
d445b551 6350
546b63fb
RK
6351 if (i >= 0 && reload_reg_rtx[r] != 0
6352 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6353 reload_when_needed[r]))
32131a9c
RK
6354 {
6355 /* First, clear out memory of what used to be in this spill reg.
6356 If consecutive registers are used, clear them all. */
6357 int nr
6358 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6359 int k;
6360
6361 for (k = 0; k < nr; k++)
6362 {
6363 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6364 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6365 }
6366
6367 /* Maybe the spill reg contains a copy of reload_out. */
6368 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6369 {
6370 register int nregno = REGNO (reload_out[r]);
d08ea79f
RK
6371 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6372 : HARD_REGNO_NREGS (nregno,
6373 GET_MODE (reload_reg_rtx[r])));
d445b551
RK
6374
6375 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 6376 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6377
d08ea79f
RK
6378 /* If NREGNO is a hard register, it may occupy more than
6379 one register. If it does, say what is in the
6380 rest of the registers assuming that both registers
6381 agree on how many words the object takes. If not,
6382 invalidate the subsequent registers. */
6383
6384 if (nregno < FIRST_PSEUDO_REGISTER)
6385 for (k = 1; k < nnr; k++)
6386 reg_last_reload_reg[nregno + k]
74eb5c52
DE
6387 = (nr == nnr ? gen_rtx (REG,
6388 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
d08ea79f
RK
6389 REGNO (reload_reg_rtx[r]) + k)
6390 : 0);
6391
6392 /* Now do the inverse operation. */
32131a9c
RK
6393 for (k = 0; k < nr; k++)
6394 {
6395 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6396 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6397 : nregno + k);
32131a9c
RK
6398 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6399 }
6400 }
d445b551 6401
2c9ce2ef
RK
6402 /* Maybe the spill reg contains a copy of reload_in. Only do
6403 something if there will not be an output reload for
6404 the register being reloaded. */
32131a9c
RK
6405 else if (reload_out[r] == 0
6406 && reload_in[r] != 0
2c9ce2ef
RK
6407 && ((GET_CODE (reload_in[r]) == REG
6408 && ! reg_has_output_reload[REGNO (reload_in[r])]
6409 || (GET_CODE (reload_in_reg[r]) == REG
6410 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
32131a9c
RK
6411 {
6412 register int nregno;
d08ea79f
RK
6413 int nnr;
6414
32131a9c
RK
6415 if (GET_CODE (reload_in[r]) == REG)
6416 nregno = REGNO (reload_in[r]);
6417 else
6418 nregno = REGNO (reload_in_reg[r]);
6419
d08ea79f
RK
6420 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6421 : HARD_REGNO_NREGS (nregno,
6422 GET_MODE (reload_reg_rtx[r])));
6423
546b63fb 6424 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6425
d08ea79f
RK
6426 if (nregno < FIRST_PSEUDO_REGISTER)
6427 for (k = 1; k < nnr; k++)
6428 reg_last_reload_reg[nregno + k]
74eb5c52
DE
6429 = (nr == nnr ? gen_rtx (REG,
6430 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
d08ea79f
RK
6431 REGNO (reload_reg_rtx[r]) + k)
6432 : 0);
6433
546b63fb
RK
6434 /* Unless we inherited this reload, show we haven't
6435 recently done a store. */
6436 if (! reload_inherited[r])
6437 spill_reg_store[i] = 0;
d445b551 6438
546b63fb
RK
6439 for (k = 0; k < nr; k++)
6440 {
6441 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6442 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6443 : nregno + k);
546b63fb
RK
6444 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6445 = insn;
32131a9c
RK
6446 }
6447 }
6448 }
6449
6450 /* The following if-statement was #if 0'd in 1.34 (or before...).
6451 It's reenabled in 1.35 because supposedly nothing else
6452 deals with this problem. */
6453
6454 /* If a register gets output-reloaded from a non-spill register,
6455 that invalidates any previous reloaded copy of it.
6456 But forget_old_reloads_1 won't get to see it, because
6457 it thinks only about the original insn. So invalidate it here. */
6458 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6459 {
6460 register int nregno = REGNO (reload_out[r]);
36281332
RK
6461 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6462
6463 while (num_regs-- > 0)
6464 reg_last_reload_reg[nregno + num_regs] = 0;
32131a9c
RK
6465 }
6466 }
6467}
6468\f
546b63fb
RK
6469/* Emit code to perform an input reload of IN to RELOADREG. IN is from
6470 operand OPNUM with reload type TYPE.
6471
3c3eeea6 6472 Returns first insn emitted. */
32131a9c
RK
6473
6474rtx
546b63fb 6475gen_input_reload (reloadreg, in, opnum, type)
32131a9c
RK
6476 rtx reloadreg;
6477 rtx in;
546b63fb
RK
6478 int opnum;
6479 enum reload_type type;
32131a9c 6480{
546b63fb 6481 rtx last = get_last_insn ();
32131a9c 6482
a8fdc208 6483 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
6484 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6485 register that didn't get a hard register. In that case we can just
6486 call emit_move_insn.
6487
a7fd196c
JW
6488 We can also be asked to reload a PLUS that adds a register or a MEM to
6489 another register, constant or MEM. This can occur during frame pointer
6490 elimination and while reloading addresses. This case is handled by
6491 trying to emit a single insn to perform the add. If it is not valid,
6492 we use a two insn sequence.
32131a9c
RK
6493
6494 Finally, we could be called to handle an 'o' constraint by putting
6495 an address into a register. In that case, we first try to do this
6496 with a named pattern of "reload_load_address". If no such pattern
6497 exists, we just emit a SET insn and hope for the best (it will normally
6498 be valid on machines that use 'o').
6499
6500 This entire process is made complex because reload will never
6501 process the insns we generate here and so we must ensure that
6502 they will fit their constraints and also by the fact that parts of
6503 IN might be being reloaded separately and replaced with spill registers.
6504 Because of this, we are, in some sense, just guessing the right approach
6505 here. The one listed above seems to work.
6506
6507 ??? At some point, this whole thing needs to be rethought. */
6508
6509 if (GET_CODE (in) == PLUS
a7fd196c
JW
6510 && (GET_CODE (XEXP (in, 0)) == REG
6511 || GET_CODE (XEXP (in, 0)) == MEM)
6512 && (GET_CODE (XEXP (in, 1)) == REG
6513 || CONSTANT_P (XEXP (in, 1))
6514 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 6515 {
a7fd196c
JW
6516 /* We need to compute the sum of a register or a MEM and another
6517 register, constant, or MEM, and put it into the reload
3002e160
JW
6518 register. The best possible way of doing this is if the machine
6519 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
6520
6521 The simplest approach is to try to generate such an insn and see if it
6522 is recognized and matches its constraints. If so, it can be used.
6523
6524 It might be better not to actually emit the insn unless it is valid,
0009eff2 6525 but we need to pass the insn as an operand to `recog' and
b36d7dd7 6526 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 6527 not valid than to dummy things up. */
a8fdc208 6528
af929c62 6529 rtx op0, op1, tem, insn;
32131a9c 6530 int code;
a8fdc208 6531
af929c62
RK
6532 op0 = find_replacement (&XEXP (in, 0));
6533 op1 = find_replacement (&XEXP (in, 1));
6534
32131a9c
RK
6535 /* Since constraint checking is strict, commutativity won't be
6536 checked, so we need to do that here to avoid spurious failure
6537 if the add instruction is two-address and the second operand
6538 of the add is the same as the reload reg, which is frequently
6539 the case. If the insn would be A = B + A, rearrange it so
6540 it will be A = A + B as constrain_operands expects. */
a8fdc208 6541
32131a9c
RK
6542 if (GET_CODE (XEXP (in, 1)) == REG
6543 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
6544 tem = op0, op0 = op1, op1 = tem;
6545
6546 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6547 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c 6548
546b63fb 6549 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6550 code = recog_memoized (insn);
6551
6552 if (code >= 0)
6553 {
6554 insn_extract (insn);
6555 /* We want constrain operands to treat this insn strictly in
6556 its validity determination, i.e., the way it would after reload
6557 has completed. */
6558 if (constrain_operands (code, 1))
6559 return insn;
6560 }
6561
546b63fb 6562 delete_insns_since (last);
32131a9c
RK
6563
6564 /* If that failed, we must use a conservative two-insn sequence.
6565 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
6566 register since "move" will be able to handle an arbitrary operand,
6567 unlike add which can't, in general. Then add the registers.
32131a9c
RK
6568
6569 If there is another way to do this for a specific machine, a
6570 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6571 we emit below. */
6572
af929c62
RK
6573 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6574 || (GET_CODE (op1) == REG
6575 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6576 tem = op0, op0 = op1, op1 = tem;
32131a9c 6577
546b63fb 6578 emit_insn (gen_move_insn (reloadreg, op0));
39b56c2a
RK
6579
6580 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6581 This fixes a problem on the 32K where the stack pointer cannot
6582 be used as an operand of an add insn. */
6583
6584 if (rtx_equal_p (op0, op1))
6585 op1 = reloadreg;
6586
c77c9766
RK
6587 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6588
6589 /* If that failed, copy the address register to the reload register.
6590 Then add the constant to the reload register. */
6591
6592 code = recog_memoized (insn);
6593
6594 if (code >= 0)
6595 {
6596 insn_extract (insn);
6597 /* We want constrain operands to treat this insn strictly in
6598 its validity determination, i.e., the way it would after reload
6599 has completed. */
6600 if (constrain_operands (code, 1))
6601 return insn;
6602 }
6603
6604 delete_insns_since (last);
6605
6606 emit_insn (gen_move_insn (reloadreg, op1));
6607 emit_insn (gen_add2_insn (reloadreg, op0));
32131a9c
RK
6608 }
6609
0dadecf6
RK
6610#ifdef SECONDARY_MEMORY_NEEDED
6611 /* If we need a memory location to do the move, do it that way. */
6612 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6613 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6614 REGNO_REG_CLASS (REGNO (reloadreg)),
6615 GET_MODE (reloadreg)))
6616 {
6617 /* Get the memory to use and rewrite both registers to its mode. */
546b63fb 6618 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
0dadecf6
RK
6619
6620 if (GET_MODE (loc) != GET_MODE (reloadreg))
6621 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6622
6623 if (GET_MODE (loc) != GET_MODE (in))
6624 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6625
546b63fb
RK
6626 emit_insn (gen_move_insn (loc, in));
6627 emit_insn (gen_move_insn (reloadreg, loc));
0dadecf6
RK
6628 }
6629#endif
6630
32131a9c
RK
6631 /* If IN is a simple operand, use gen_move_insn. */
6632 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
546b63fb 6633 emit_insn (gen_move_insn (reloadreg, in));
32131a9c
RK
6634
6635#ifdef HAVE_reload_load_address
6636 else if (HAVE_reload_load_address)
546b63fb 6637 emit_insn (gen_reload_load_address (reloadreg, in));
32131a9c
RK
6638#endif
6639
6640 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6641 else
546b63fb 6642 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6643
6644 /* Return the first insn emitted.
546b63fb 6645 We can not just return get_last_insn, because there may have
32131a9c
RK
6646 been multiple instructions emitted. Also note that gen_move_insn may
6647 emit more than one insn itself, so we can not assume that there is one
6648 insn emitted per emit_insn_before call. */
6649
546b63fb 6650 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
6651}
6652\f
6653/* Delete a previously made output-reload
6654 whose result we now believe is not needed.
6655 First we double-check.
6656
6657 INSN is the insn now being processed.
6658 OUTPUT_RELOAD_INSN is the insn of the output reload.
6659 J is the reload-number for this insn. */
6660
6661static void
6662delete_output_reload (insn, j, output_reload_insn)
6663 rtx insn;
6664 int j;
6665 rtx output_reload_insn;
6666{
6667 register rtx i1;
6668
6669 /* Get the raw pseudo-register referred to. */
6670
6671 rtx reg = reload_in[j];
6672 while (GET_CODE (reg) == SUBREG)
6673 reg = SUBREG_REG (reg);
6674
6675 /* If the pseudo-reg we are reloading is no longer referenced
6676 anywhere between the store into it and here,
6677 and no jumps or labels intervene, then the value can get
6678 here through the reload reg alone.
6679 Otherwise, give up--return. */
6680 for (i1 = NEXT_INSN (output_reload_insn);
6681 i1 != insn; i1 = NEXT_INSN (i1))
6682 {
6683 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6684 return;
6685 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6686 && reg_mentioned_p (reg, PATTERN (i1)))
6687 return;
6688 }
6689
208dffa5
RS
6690 if (cannot_omit_stores[REGNO (reg)])
6691 return;
6692
32131a9c
RK
6693 /* If this insn will store in the pseudo again,
6694 the previous store can be removed. */
6695 if (reload_out[j] == reload_in[j])
6696 delete_insn (output_reload_insn);
6697
6698 /* See if the pseudo reg has been completely replaced
6699 with reload regs. If so, delete the store insn
6700 and forget we had a stack slot for the pseudo. */
6701 else if (reg_n_deaths[REGNO (reg)] == 1
6702 && reg_basic_block[REGNO (reg)] >= 0
6703 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6704 {
6705 rtx i2;
6706
6707 /* We know that it was used only between here
6708 and the beginning of the current basic block.
6709 (We also know that the last use before INSN was
6710 the output reload we are thinking of deleting, but never mind that.)
6711 Search that range; see if any ref remains. */
6712 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6713 {
d445b551
RK
6714 rtx set = single_set (i2);
6715
32131a9c
RK
6716 /* Uses which just store in the pseudo don't count,
6717 since if they are the only uses, they are dead. */
d445b551 6718 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6719 continue;
6720 if (GET_CODE (i2) == CODE_LABEL
6721 || GET_CODE (i2) == JUMP_INSN)
6722 break;
6723 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6724 && reg_mentioned_p (reg, PATTERN (i2)))
6725 /* Some other ref remains;
6726 we can't do anything. */
6727 return;
6728 }
6729
6730 /* Delete the now-dead stores into this pseudo. */
6731 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6732 {
d445b551
RK
6733 rtx set = single_set (i2);
6734
6735 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6736 delete_insn (i2);
6737 if (GET_CODE (i2) == CODE_LABEL
6738 || GET_CODE (i2) == JUMP_INSN)
6739 break;
6740 }
6741
6742 /* For the debugging info,
6743 say the pseudo lives in this reload reg. */
6744 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6745 alter_reg (REGNO (reg), -1);
6746 }
6747}
32131a9c 6748\f
a8fdc208 6749/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 6750 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
6751 is a register or memory location;
6752 so reloading involves incrementing that location.
6753
6754 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 6755 This cannot be deduced from VALUE. */
32131a9c 6756
546b63fb
RK
6757static void
6758inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
6759 rtx reloadreg;
6760 rtx value;
6761 int inc_amount;
32131a9c
RK
6762{
6763 /* REG or MEM to be copied and incremented. */
6764 rtx incloc = XEXP (value, 0);
6765 /* Nonzero if increment after copying. */
6766 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 6767 rtx last;
0009eff2
RK
6768 rtx inc;
6769 rtx add_insn;
6770 int code;
32131a9c
RK
6771
6772 /* No hard register is equivalent to this register after
6773 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6774 we could inc/dec that register as well (maybe even using it for
6775 the source), but I'm not sure it's worth worrying about. */
6776 if (GET_CODE (incloc) == REG)
6777 reg_last_reload_reg[REGNO (incloc)] = 0;
6778
6779 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6780 inc_amount = - inc_amount;
6781
fb3821f7 6782 inc = GEN_INT (inc_amount);
0009eff2
RK
6783
6784 /* If this is post-increment, first copy the location to the reload reg. */
6785 if (post)
546b63fb 6786 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
6787
6788 /* See if we can directly increment INCLOC. Use a method similar to that
6789 in gen_input_reload. */
6790
546b63fb
RK
6791 last = get_last_insn ();
6792 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6793 gen_rtx (PLUS, GET_MODE (incloc),
6794 incloc, inc)));
0009eff2
RK
6795
6796 code = recog_memoized (add_insn);
6797 if (code >= 0)
32131a9c 6798 {
0009eff2
RK
6799 insn_extract (add_insn);
6800 if (constrain_operands (code, 1))
32131a9c 6801 {
0009eff2
RK
6802 /* If this is a pre-increment and we have incremented the value
6803 where it lives, copy the incremented value to RELOADREG to
6804 be used as an address. */
6805
6806 if (! post)
546b63fb
RK
6807 emit_insn (gen_move_insn (reloadreg, incloc));
6808
6809 return;
32131a9c
RK
6810 }
6811 }
0009eff2 6812
546b63fb 6813 delete_insns_since (last);
0009eff2
RK
6814
6815 /* If couldn't do the increment directly, must increment in RELOADREG.
6816 The way we do this depends on whether this is pre- or post-increment.
6817 For pre-increment, copy INCLOC to the reload register, increment it
6818 there, then save back. */
6819
6820 if (! post)
6821 {
546b63fb
RK
6822 emit_insn (gen_move_insn (reloadreg, incloc));
6823 emit_insn (gen_add2_insn (reloadreg, inc));
6824 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 6825 }
32131a9c
RK
6826 else
6827 {
0009eff2
RK
6828 /* Postincrement.
6829 Because this might be a jump insn or a compare, and because RELOADREG
6830 may not be available after the insn in an input reload, we must do
6831 the incrementation before the insn being reloaded for.
6832
6833 We have already copied INCLOC to RELOADREG. Increment the copy in
6834 RELOADREG, save that back, then decrement RELOADREG so it has
6835 the original value. */
6836
546b63fb
RK
6837 emit_insn (gen_add2_insn (reloadreg, inc));
6838 emit_insn (gen_move_insn (incloc, reloadreg));
6839 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 6840 }
0009eff2 6841
546b63fb 6842 return;
32131a9c
RK
6843}
6844\f
6845/* Return 1 if we are certain that the constraint-string STRING allows
6846 the hard register REG. Return 0 if we can't be sure of this. */
6847
6848static int
6849constraint_accepts_reg_p (string, reg)
6850 char *string;
6851 rtx reg;
6852{
6853 int value = 0;
6854 int regno = true_regnum (reg);
6855 int c;
6856
6857 /* Initialize for first alternative. */
6858 value = 0;
6859 /* Check that each alternative contains `g' or `r'. */
6860 while (1)
6861 switch (c = *string++)
6862 {
6863 case 0:
6864 /* If an alternative lacks `g' or `r', we lose. */
6865 return value;
6866 case ',':
6867 /* If an alternative lacks `g' or `r', we lose. */
6868 if (value == 0)
6869 return 0;
6870 /* Initialize for next alternative. */
6871 value = 0;
6872 break;
6873 case 'g':
6874 case 'r':
6875 /* Any general reg wins for this alternative. */
6876 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6877 value = 1;
6878 break;
6879 default:
6880 /* Any reg in specified class wins for this alternative. */
6881 {
0009eff2 6882 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 6883
0009eff2 6884 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
6885 value = 1;
6886 }
6887 }
6888}
6889\f
d445b551
RK
6890/* Return the number of places FIND appears within X, but don't count
6891 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
6892
6893static int
6894count_occurrences (x, find)
6895 register rtx x, find;
6896{
6897 register int i, j;
6898 register enum rtx_code code;
6899 register char *format_ptr;
6900 int count;
6901
6902 if (x == find)
6903 return 1;
6904 if (x == 0)
6905 return 0;
6906
6907 code = GET_CODE (x);
6908
6909 switch (code)
6910 {
6911 case REG:
6912 case QUEUED:
6913 case CONST_INT:
6914 case CONST_DOUBLE:
6915 case SYMBOL_REF:
6916 case CODE_LABEL:
6917 case PC:
6918 case CC0:
6919 return 0;
d445b551
RK
6920
6921 case SET:
6922 if (SET_DEST (x) == find)
6923 return count_occurrences (SET_SRC (x), find);
6924 break;
32131a9c
RK
6925 }
6926
6927 format_ptr = GET_RTX_FORMAT (code);
6928 count = 0;
6929
6930 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6931 {
6932 switch (*format_ptr++)
6933 {
6934 case 'e':
6935 count += count_occurrences (XEXP (x, i), find);
6936 break;
6937
6938 case 'E':
6939 if (XVEC (x, i) != NULL)
6940 {
6941 for (j = 0; j < XVECLEN (x, i); j++)
6942 count += count_occurrences (XVECEXP (x, i, j), find);
6943 }
6944 break;
6945 }
6946 }
6947 return count;
6948}
This page took 1.237872 seconds and 5 git commands to generate.