]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(local_alloc): Fix typo in last change.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
8c15858f 2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
546b63fb
RK
69
70
71#ifndef REGISTER_MOVE_COST
72#define REGISTER_MOVE_COST(x, y) 2
73#endif
74
75#ifndef MEMORY_MOVE_COST
76#define MEMORY_MOVE_COST(x) 4
77#endif
32131a9c
RK
78\f
79/* During reload_as_needed, element N contains a REG rtx for the hard reg
d08ea79f 80 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
81static rtx *reg_last_reload_reg;
82
83/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85static char *reg_has_output_reload;
86
87/* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89static HARD_REG_SET reg_is_output_reload;
90
91/* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95rtx *reg_equiv_constant;
96
97/* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 101rtx *reg_equiv_memory_loc;
32131a9c
RK
102
103/* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106rtx *reg_equiv_address;
107
108/* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110rtx *reg_equiv_mem;
111
112/* Widest width in which each pseudo reg is referred to (via subreg). */
113static int *reg_max_ref_width;
114
115/* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117static rtx *reg_equiv_init;
118
119/* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126/* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132/* Number of spill-regs so far; number of valid elements of spill_regs. */
133static int n_spills;
134
135/* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141/* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146/* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152/* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155HARD_REG_SET forbidden_regs;
156
157/* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
546b63fb
RK
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
32131a9c
RK
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164static HARD_REG_SET bad_spill_regs;
165
166/* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171/* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178/* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183/* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185static HARD_REG_SET counted_for_groups;
186
187/* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191static HARD_REG_SET counted_for_nongroups;
192
208dffa5
RS
193/* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197static char *cannot_omit_stores;
198
32131a9c
RK
199/* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206static char spill_indirect_levels;
207
208/* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212char indirect_symref_ok;
213
214/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216char double_reg_address_ok;
217
218/* Record the stack slot for each spilled hard register. */
219
220static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222/* Width allocated so far for that stack slot. */
223
224static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226/* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231char *basic_block_needs[N_REG_CLASSES];
232
233/* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235int reload_first_uid;
236
237/* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240int caller_save_needed;
241
242/* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245int reload_in_progress = 0;
246
247/* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
d45cf215 254/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258struct obstack reload_obstack;
259char *reload_firstobj;
260
261#define obstack_chunk_alloc xmalloc
262#define obstack_chunk_free free
263
32131a9c
RK
264/* List of labels that must never be deleted. */
265extern rtx forced_labels;
266\f
267/* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272static struct elim_table
273{
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
a8efe40d 281 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290} reg_eliminate[] =
291
292/* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296#ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298#else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300#endif
301
302#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304/* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307static int num_not_at_initial_offset;
308
309/* Count the number of registers that we may be able to eliminate. */
310static int num_eliminable;
311
312/* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319static char *offsets_known_at;
320static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322/* Number of labels in the current function. */
323
324static int num_labels;
546b63fb
RK
325
326struct hard_reg_n_uses { int regno; int uses; };
32131a9c 327\f
546b63fb
RK
328static int possible_group_p PROTO((int, int *));
329static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334static void spill_failure PROTO((rtx));
335static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337static void delete_dead_insn PROTO((rtx));
338static void alter_reg PROTO((int, int));
c307c237 339static void mark_scratch_live PROTO((rtx));
546b63fb
RK
340static void set_label_offsets PROTO((rtx, rtx, int));
341static int eliminate_regs_in_insn PROTO((rtx, int));
342static void mark_not_eliminable PROTO((rtx, rtx));
343static int spill_hard_reg PROTO((int, int, FILE *, int));
344static void scan_paradoxical_subregs PROTO((rtx));
345static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347static void order_regs_for_reload PROTO((void));
1d1a832c 348static int compare_spill_regs PROTO((short *, short *));
546b63fb 349static void reload_as_needed PROTO((rtx, int));
9a881562 350static void forget_old_reloads_1 PROTO((rtx, rtx));
546b63fb
RK
351static int reload_reg_class_lower PROTO((short *, short *));
352static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
be7ae2a4
RK
354static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
546b63fb
RK
356static int reload_reg_free_p PROTO((int, int, enum reload_type));
357static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
351aa1c1 359static int reloads_conflict PROTO((int, int));
546b63fb
RK
360static int allocate_reload_reg PROTO((int, rtx, int, int));
361static void choose_reload_regs PROTO((rtx, rtx));
362static void merge_assigned_reloads PROTO((rtx));
363static void emit_reload_insns PROTO((rtx));
364static void delete_output_reload PROTO((rtx, int, rtx));
365static void inc_for_reload PROTO((rtx, rtx, int));
366static int constraint_accepts_reg_p PROTO((char *, rtx));
367static int count_occurrences PROTO((rtx, rtx));
32131a9c 368\f
546b63fb
RK
369/* Initialize the reload pass once per compilation. */
370
32131a9c
RK
371void
372init_reload ()
373{
374 register int i;
375
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
379
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 384 GEN_INT (4)));
32131a9c
RK
385 spill_indirect_levels = 0;
386
387 while (memory_address_p (QImode, tem))
388 {
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
391 }
392
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
394
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
397
398 /* See if reg+reg is a valid (and offsettable) address. */
399
65701fd2 400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
401 {
402 tem = gen_rtx (PLUS, Pmode,
3ec2ea3e 403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
57caa638
RS
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
407
408 if (memory_address_p (QImode, tem))
409 {
410 double_reg_address_ok = 1;
411 break;
412 }
413 }
32131a9c
RK
414
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
418}
419
546b63fb 420/* Main entry point for the reload pass.
32131a9c
RK
421
422 FIRST is the first insn of the function being compiled.
423
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
429
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
5352b11a 433 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 434
5352b11a
RS
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
437
438int
32131a9c
RK
439reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
443{
444 register int class;
8b3e912b 445 register int i, j, k;
32131a9c
RK
446 register rtx insn;
447 register struct elim_table *ep;
448
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
a8efe40d
RK
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
32131a9c 455
5352b11a
RS
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
458
32131a9c
RK
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
461
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
464
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
467
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
470
0dadecf6
RK
471#ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474#endif
475
32131a9c
RK
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
480
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero (spill_stack_slot, sizeof spill_stack_slot);
483 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
484
a8efe40d
RK
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
a8fdc208 488
32131a9c
RK
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
493
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
496
c307c237
RK
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
500
32131a9c
RK
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
fb3821f7 503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
504
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
509
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
514
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero (reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero (reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero (reg_max_ref_width, max_regno * sizeof (int));
208dffa5
RS
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
32131a9c 529
56f58d3a
RK
530#ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532#endif
533
32131a9c 534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
56f58d3a
RK
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
32131a9c
RK
538
539 for (insn = first; insn; insn = NEXT_INSN (insn))
540 {
541 rtx set = single_set (insn);
542
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
544 {
fb3821f7 545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
546 if (note
547#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550#endif
551 )
32131a9c
RK
552 {
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
556 {
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
560 {
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
d445b551 565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
566 }
567 else
568 continue;
569
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
577 }
578 }
579 }
580
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
589
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
592 }
593
594 /* Does this function require a frame pointer? */
595
596 frame_pointer_needed = (! flag_omit_frame_pointer
597#ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605#endif
606 || FRAME_POINTER_REQUIRED);
607
608 num_eliminable = 0;
609
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612#ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
614 {
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
3ec2ea3e
DE
617 && (ep->from != HARD_FRAME_POINTER_REGNUM
618 || ! frame_pointer_needed));
32131a9c
RK
619 }
620#else
621 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
622 = ! frame_pointer_needed;
623#endif
624
625 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 626 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
627 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
628 We depend on this. */
629 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
630 {
631 num_eliminable += ep->can_eliminate;
632 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
633 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
634 }
635
636 num_labels = max_label_num () - get_first_label_num ();
637
638 /* Allocate the tables used to store offset information at labels. */
639 offsets_known_at = (char *) alloca (num_labels);
640 offsets_at
641 = (int (*)[NUM_ELIMINABLE_REGS])
642 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
643
644 offsets_known_at -= get_first_label_num ();
645 offsets_at -= get_first_label_num ();
646
647 /* Alter each pseudo-reg rtx to contain its hard reg number.
648 Assign stack slots to the pseudos that lack hard regs or equivalents.
649 Do not touch virtual registers. */
650
651 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
652 alter_reg (i, -1);
653
654 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
655 because the stack size may be a part of the offset computation for
656 register elimination. */
657 assign_stack_local (BLKmode, 0, 0);
658
659 /* If we have some registers we think can be eliminated, scan all insns to
660 see if there is an insn that sets one of these registers to something
661 other than itself plus a constant. If so, the register cannot be
662 eliminated. Doing this scan here eliminates an extra pass through the
663 main reload loop in the most common case where register elimination
664 cannot be done. */
665 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
666 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
667 || GET_CODE (insn) == CALL_INSN)
668 note_stores (PATTERN (insn), mark_not_eliminable);
669
670#ifndef REGISTER_CONSTRAINTS
671 /* If all the pseudo regs have hard regs,
672 except for those that are never referenced,
673 we know that no reloads are needed. */
674 /* But that is not true if there are register constraints, since
675 in that case some pseudos might be in the wrong kind of hard reg. */
676
677 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
678 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
679 break;
680
b8093d02 681 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
682 return;
683#endif
684
685 /* Compute the order of preference for hard registers to spill.
686 Store them by decreasing preference in potential_reload_regs. */
687
688 order_regs_for_reload ();
689
690 /* So far, no hard regs have been spilled. */
691 n_spills = 0;
692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
693 spill_reg_order[i] = -1;
694
695 /* On most machines, we can't use any register explicitly used in the
696 rtl as a spill register. But on some, we have to. Those will have
697 taken care to keep the life of hard regs as short as possible. */
698
56f58d3a 699#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
700 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
701#endif
702
703 /* Spill any hard regs that we know we can't eliminate. */
704 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
705 if (! ep->can_eliminate)
706 {
707 spill_hard_reg (ep->from, global, dumpfile, 1);
708 regs_ever_live[ep->from] = 1;
709 }
710
711 if (global)
712 for (i = 0; i < N_REG_CLASSES; i++)
713 {
714 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
715 bzero (basic_block_needs[i], n_basic_blocks);
716 }
717
b2f15f94
RK
718 /* From now on, we need to emit any moves without making new pseudos. */
719 reload_in_progress = 1;
720
32131a9c
RK
721 /* This loop scans the entire function each go-round
722 and repeats until one repetition spills no additional hard regs. */
723
d45cf215 724 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
725 to require another pass. Note that getting an additional reload
726 reg does not necessarily imply any pseudo reg was spilled;
727 sometimes we find a reload reg that no pseudo reg was allocated in. */
728 something_changed = 1;
729 /* This flag is set if there are any insns that require reloading. */
730 something_needs_reloads = 0;
731 /* This flag is set if there are any insns that require register
732 eliminations. */
733 something_needs_elimination = 0;
734 while (something_changed)
735 {
736 rtx after_call = 0;
737
738 /* For each class, number of reload regs needed in that class.
739 This is the maximum over all insns of the needs in that class
740 of the individual insn. */
741 int max_needs[N_REG_CLASSES];
742 /* For each class, size of group of consecutive regs
743 that is needed for the reloads of this class. */
744 int group_size[N_REG_CLASSES];
745 /* For each class, max number of consecutive groups needed.
746 (Each group contains group_size[CLASS] consecutive registers.) */
747 int max_groups[N_REG_CLASSES];
748 /* For each class, max number needed of regs that don't belong
749 to any of the groups. */
750 int max_nongroups[N_REG_CLASSES];
751 /* For each class, the machine mode which requires consecutive
752 groups of regs of that class.
753 If two different modes ever require groups of one class,
754 they must be the same size and equally restrictive for that class,
755 otherwise we can't handle the complexity. */
756 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
757 /* Record the insn where each maximum need is first found. */
758 rtx max_needs_insn[N_REG_CLASSES];
759 rtx max_groups_insn[N_REG_CLASSES];
760 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 761 rtx x;
0dadecf6 762 int starting_frame_size = get_frame_size ();
e404a39a 763 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
764
765 something_changed = 0;
766 bzero (max_needs, sizeof max_needs);
767 bzero (max_groups, sizeof max_groups);
768 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
769 bzero (max_needs_insn, sizeof max_needs_insn);
770 bzero (max_groups_insn, sizeof max_groups_insn);
771 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
772 bzero (group_size, sizeof group_size);
773 for (i = 0; i < N_REG_CLASSES; i++)
774 group_mode[i] = VOIDmode;
775
776 /* Keep track of which basic blocks are needing the reloads. */
777 this_block = 0;
778
779 /* Remember whether any element of basic_block_needs
780 changes from 0 to 1 in this pass. */
781 new_basic_block_needs = 0;
782
783 /* Reset all offsets on eliminable registers to their initial values. */
784#ifdef ELIMINABLE_REGS
785 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
786 {
787 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
788 ep->previous_offset = ep->offset
789 = ep->max_offset = ep->initial_offset;
32131a9c
RK
790 }
791#else
792#ifdef INITIAL_FRAME_POINTER_OFFSET
793 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
794#else
795 if (!FRAME_POINTER_REQUIRED)
796 abort ();
797 reg_eliminate[0].initial_offset = 0;
798#endif
a8efe40d 799 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
800 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
801#endif
802
803 num_not_at_initial_offset = 0;
804
805 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
806
807 /* Set a known offset for each forced label to be at the initial offset
808 of each elimination. We do this because we assume that all
809 computed jumps occur from a location where each elimination is
810 at its initial offset. */
811
812 for (x = forced_labels; x; x = XEXP (x, 1))
813 if (XEXP (x, 0))
fb3821f7 814 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
815
816 /* For each pseudo register that has an equivalent location defined,
817 try to eliminate any eliminable registers (such as the frame pointer)
818 assuming initial offsets for the replacement register, which
819 is the normal case.
820
821 If the resulting location is directly addressable, substitute
822 the MEM we just got directly for the old REG.
823
824 If it is not addressable but is a constant or the sum of a hard reg
825 and constant, it is probably not addressable because the constant is
826 out of range, in that case record the address; we will generate
827 hairy code to compute the address in a register each time it is
6491dbbb
RK
828 needed. Similarly if it is a hard register, but one that is not
829 valid as an address register.
32131a9c
RK
830
831 If the location is not addressable, but does not have one of the
832 above forms, assign a stack slot. We have to do this to avoid the
833 potential of producing lots of reloads if, e.g., a location involves
834 a pseudo that didn't get a hard register and has an equivalent memory
835 location that also involves a pseudo that didn't get a hard register.
836
837 Perhaps at some point we will improve reload_when_needed handling
838 so this problem goes away. But that's very hairy. */
839
840 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
841 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
842 {
fb3821f7 843 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
844
845 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
846 XEXP (x, 0)))
847 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
848 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
849 || (GET_CODE (XEXP (x, 0)) == REG
850 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
851 || (GET_CODE (XEXP (x, 0)) == PLUS
852 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
853 && (REGNO (XEXP (XEXP (x, 0), 0))
854 < FIRST_PSEUDO_REGISTER)
855 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
856 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
857 else
858 {
859 /* Make a new stack slot. Then indicate that something
a8fdc208 860 changed so we go back and recompute offsets for
32131a9c
RK
861 eliminable registers because the allocation of memory
862 below might change some offset. reg_equiv_{mem,address}
863 will be set up for this pseudo on the next pass around
864 the loop. */
865 reg_equiv_memory_loc[i] = 0;
866 reg_equiv_init[i] = 0;
867 alter_reg (i, -1);
868 something_changed = 1;
869 }
870 }
a8fdc208 871
d45cf215 872 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
873 bookkeeping. */
874 if (something_changed)
875 continue;
876
a8efe40d
RK
877 /* If caller-saves needs a group, initialize the group to include
878 the size and mode required for caller-saves. */
879
880 if (caller_save_group_size > 1)
881 {
882 group_mode[(int) caller_save_spill_class] = Pmode;
883 group_size[(int) caller_save_spill_class] = caller_save_group_size;
884 }
885
32131a9c
RK
886 /* Compute the most additional registers needed by any instruction.
887 Collect information separately for each class of regs. */
888
889 for (insn = first; insn; insn = NEXT_INSN (insn))
890 {
891 if (global && this_block + 1 < n_basic_blocks
892 && insn == basic_block_head[this_block+1])
893 ++this_block;
894
895 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
896 might include REG_LABEL), we need to see what effects this
897 has on the known offsets at labels. */
898
899 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
900 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
901 && REG_NOTES (insn) != 0))
902 set_label_offsets (insn, insn, 0);
903
904 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
905 {
906 /* Nonzero means don't use a reload reg that overlaps
907 the place where a function value can be returned. */
908 rtx avoid_return_reg = 0;
909
910 rtx old_body = PATTERN (insn);
911 int old_code = INSN_CODE (insn);
912 rtx old_notes = REG_NOTES (insn);
913 int did_elimination = 0;
546b63fb
RK
914
915 /* To compute the number of reload registers of each class
916 needed for an insn, we must similate what choose_reload_regs
917 can do. We do this by splitting an insn into an "input" and
918 an "output" part. RELOAD_OTHER reloads are used in both.
919 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
920 which must be live over the entire input section of reloads,
921 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
922 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
923 inputs.
924
925 The registers needed for output are RELOAD_OTHER and
926 RELOAD_FOR_OUTPUT, which are live for the entire output
927 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
928 reloads for each operand.
929
930 The total number of registers needed is the maximum of the
931 inputs and outputs. */
932
8b3e912b 933 struct needs
32131a9c 934 {
8b3e912b
RK
935 /* [0] is normal, [1] is nongroup. */
936 int regs[2][N_REG_CLASSES];
937 int groups[N_REG_CLASSES];
938 };
939
940 /* Each `struct needs' corresponds to one RELOAD_... type. */
941 struct {
942 struct needs other;
943 struct needs input;
944 struct needs output;
945 struct needs insn;
946 struct needs other_addr;
947 struct needs op_addr;
948 struct needs in_addr[MAX_RECOG_OPERANDS];
949 struct needs out_addr[MAX_RECOG_OPERANDS];
950 } insn_needs;
32131a9c
RK
951
952 /* If needed, eliminate any eliminable registers. */
953 if (num_eliminable)
954 did_elimination = eliminate_regs_in_insn (insn, 0);
955
956#ifdef SMALL_REGISTER_CLASSES
957 /* Set avoid_return_reg if this is an insn
958 that might use the value of a function call. */
959 if (GET_CODE (insn) == CALL_INSN)
960 {
961 if (GET_CODE (PATTERN (insn)) == SET)
962 after_call = SET_DEST (PATTERN (insn));
963 else if (GET_CODE (PATTERN (insn)) == PARALLEL
964 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
965 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
966 else
967 after_call = 0;
968 }
969 else if (after_call != 0
970 && !(GET_CODE (PATTERN (insn)) == SET
971 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
972 {
2b979c57 973 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
974 avoid_return_reg = after_call;
975 after_call = 0;
976 }
977#endif /* SMALL_REGISTER_CLASSES */
978
979 /* Analyze the instruction. */
980 find_reloads (insn, 0, spill_indirect_levels, global,
981 spill_reg_order);
982
983 /* Remember for later shortcuts which insns had any reloads or
984 register eliminations.
985
986 One might think that it would be worthwhile to mark insns
987 that need register replacements but not reloads, but this is
988 not safe because find_reloads may do some manipulation of
989 the insn (such as swapping commutative operands), which would
990 be lost when we restore the old pattern after register
991 replacement. So the actions of find_reloads must be redone in
992 subsequent passes or in reload_as_needed.
993
994 However, it is safe to mark insns that need reloads
995 but not register replacement. */
996
997 PUT_MODE (insn, (did_elimination ? QImode
998 : n_reloads ? HImode
546b63fb 999 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
1000 : VOIDmode));
1001
1002 /* Discard any register replacements done. */
1003 if (did_elimination)
1004 {
1005 obstack_free (&reload_obstack, reload_firstobj);
1006 PATTERN (insn) = old_body;
1007 INSN_CODE (insn) = old_code;
1008 REG_NOTES (insn) = old_notes;
1009 something_needs_elimination = 1;
1010 }
1011
a8efe40d 1012 /* If this insn has no reloads, we need not do anything except
a8fdc208 1013 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1014 caller-save needs reloads. */
1015
1016 if (n_reloads == 0
1017 && ! (GET_CODE (insn) == CALL_INSN
1018 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1019 continue;
1020
1021 something_needs_reloads = 1;
8b3e912b 1022 bzero (&insn_needs, sizeof insn_needs);
32131a9c
RK
1023
1024 /* Count each reload once in every class
1025 containing the reload's own class. */
1026
1027 for (i = 0; i < n_reloads; i++)
1028 {
1029 register enum reg_class *p;
e85ddd99 1030 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1031 int size;
1032 enum machine_mode mode;
ce0e109b 1033 int nongroup_need;
8b3e912b 1034 struct needs *this_needs;
32131a9c
RK
1035
1036 /* Don't count the dummy reloads, for which one of the
1037 regs mentioned in the insn can be used for reloading.
1038 Don't count optional reloads.
1039 Don't count reloads that got combined with others. */
1040 if (reload_reg_rtx[i] != 0
1041 || reload_optional[i] != 0
1042 || (reload_out[i] == 0 && reload_in[i] == 0
1043 && ! reload_secondary_p[i]))
1044 continue;
1045
e85ddd99
RK
1046 /* Show that a reload register of this class is needed
1047 in this basic block. We do not use insn_needs and
1048 insn_groups because they are overly conservative for
1049 this purpose. */
1050 if (global && ! basic_block_needs[(int) class][this_block])
1051 {
1052 basic_block_needs[(int) class][this_block] = 1;
1053 new_basic_block_needs = 1;
1054 }
1055
ee249c09
RK
1056
1057 mode = reload_inmode[i];
1058 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1059 mode = reload_outmode[i];
1060 size = CLASS_MAX_NREGS (class, mode);
1061
8b3e912b
RK
1062 /* If this class doesn't want a group, determine if we have
1063 a nongroup need or a regular need. We have a nongroup
1064 need if this reload conflicts with a group reload whose
1065 class intersects with this reload's class. */
ce0e109b
RK
1066
1067 nongroup_need = 0;
ee249c09 1068 if (size == 1)
b8f4c738
RK
1069 for (j = 0; j < n_reloads; j++)
1070 if ((CLASS_MAX_NREGS (reload_reg_class[j],
255cf280
RK
1071 (GET_MODE_SIZE (reload_outmode[j])
1072 > GET_MODE_SIZE (reload_inmode[j]))
1073 ? reload_outmode[j]
1074 : reload_inmode[j])
b8f4c738
RK
1075 > 1)
1076 && reloads_conflict (i, j)
ce0e109b
RK
1077 && reg_classes_intersect_p (class,
1078 reload_reg_class[j]))
1079 {
1080 nongroup_need = 1;
1081 break;
1082 }
1083
32131a9c
RK
1084 /* Decide which time-of-use to count this reload for. */
1085 switch (reload_when_needed[i])
1086 {
1087 case RELOAD_OTHER:
8b3e912b 1088 this_needs = &insn_needs.other;
32131a9c 1089 break;
546b63fb 1090 case RELOAD_FOR_INPUT:
8b3e912b 1091 this_needs = &insn_needs.input;
32131a9c 1092 break;
546b63fb 1093 case RELOAD_FOR_OUTPUT:
8b3e912b 1094 this_needs = &insn_needs.output;
32131a9c 1095 break;
546b63fb 1096 case RELOAD_FOR_INSN:
8b3e912b 1097 this_needs = &insn_needs.insn;
546b63fb 1098 break;
546b63fb 1099 case RELOAD_FOR_OTHER_ADDRESS:
8b3e912b 1100 this_needs = &insn_needs.other_addr;
546b63fb 1101 break;
546b63fb 1102 case RELOAD_FOR_INPUT_ADDRESS:
8b3e912b 1103 this_needs = &insn_needs.in_addr[reload_opnum[i]];
546b63fb 1104 break;
546b63fb 1105 case RELOAD_FOR_OUTPUT_ADDRESS:
8b3e912b 1106 this_needs = &insn_needs.out_addr[reload_opnum[i]];
546b63fb 1107 break;
32131a9c 1108 case RELOAD_FOR_OPERAND_ADDRESS:
8b3e912b 1109 this_needs = &insn_needs.op_addr;
32131a9c
RK
1110 break;
1111 }
1112
32131a9c
RK
1113 if (size > 1)
1114 {
1115 enum machine_mode other_mode, allocate_mode;
1116
1117 /* Count number of groups needed separately from
1118 number of individual regs needed. */
8b3e912b 1119 this_needs->groups[(int) class]++;
e85ddd99 1120 p = reg_class_superclasses[(int) class];
32131a9c 1121 while (*p != LIM_REG_CLASSES)
8b3e912b 1122 this_needs->groups[(int) *p++]++;
32131a9c
RK
1123
1124 /* Record size and mode of a group of this class. */
1125 /* If more than one size group is needed,
1126 make all groups the largest needed size. */
e85ddd99 1127 if (group_size[(int) class] < size)
32131a9c 1128 {
e85ddd99 1129 other_mode = group_mode[(int) class];
32131a9c
RK
1130 allocate_mode = mode;
1131
e85ddd99
RK
1132 group_size[(int) class] = size;
1133 group_mode[(int) class] = mode;
32131a9c
RK
1134 }
1135 else
1136 {
1137 other_mode = mode;
e85ddd99 1138 allocate_mode = group_mode[(int) class];
32131a9c
RK
1139 }
1140
1141 /* Crash if two dissimilar machine modes both need
1142 groups of consecutive regs of the same class. */
1143
8b3e912b 1144 if (other_mode != VOIDmode && other_mode != allocate_mode
32131a9c 1145 && ! modes_equiv_for_class_p (allocate_mode,
8b3e912b 1146 other_mode, class))
32131a9c
RK
1147 abort ();
1148 }
1149 else if (size == 1)
1150 {
8b3e912b 1151 this_needs->regs[nongroup_need][(int) class] += 1;
e85ddd99 1152 p = reg_class_superclasses[(int) class];
32131a9c 1153 while (*p != LIM_REG_CLASSES)
8b3e912b 1154 this_needs->regs[nongroup_need][(int) *p++] += 1;
32131a9c
RK
1155 }
1156 else
1157 abort ();
1158 }
1159
1160 /* All reloads have been counted for this insn;
1161 now merge the various times of use.
1162 This sets insn_needs, etc., to the maximum total number
1163 of registers needed at any point in this insn. */
1164
1165 for (i = 0; i < N_REG_CLASSES; i++)
1166 {
546b63fb
RK
1167 int in_max, out_max;
1168
8b3e912b
RK
1169 /* Compute normal and nongroup needs. */
1170 for (j = 0; j <= 1; j++)
546b63fb 1171 {
8b3e912b
RK
1172 for (in_max = 0, out_max = 0, k = 0;
1173 k < reload_n_operands; k++)
1174 {
1175 in_max
1176 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1177 out_max
1178 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1179 }
546b63fb 1180
8b3e912b
RK
1181 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1182 and operand addresses but not things used to reload
1183 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1184 don't conflict with things needed to reload inputs or
1185 outputs. */
546b63fb 1186
8b3e912b
RK
1187 in_max = MAX (in_max, insn_needs.op_addr.regs[j][i]);
1188 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
546b63fb 1189
8b3e912b
RK
1190 insn_needs.input.regs[j][i]
1191 = MAX (insn_needs.input.regs[j][i]
1192 + insn_needs.op_addr.regs[j][i]
1193 + insn_needs.insn.regs[j][i],
1194 in_max + insn_needs.input.regs[j][i]);
546b63fb 1195
8b3e912b
RK
1196 insn_needs.output.regs[j][i] += out_max;
1197 insn_needs.other.regs[j][i]
1198 += MAX (MAX (insn_needs.input.regs[j][i],
1199 insn_needs.output.regs[j][i]),
1200 insn_needs.other_addr.regs[j][i]);
546b63fb 1201
ce0e109b
RK
1202 }
1203
8b3e912b 1204 /* Now compute group needs. */
546b63fb
RK
1205 for (in_max = 0, out_max = 0, j = 0;
1206 j < reload_n_operands; j++)
1207 {
8b3e912b
RK
1208 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1209 out_max
1210 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
546b63fb
RK
1211 }
1212
8b3e912b
RK
1213 in_max = MAX (in_max, insn_needs.op_addr.groups[i]);
1214 out_max = MAX (out_max, insn_needs.insn.groups[i]);
546b63fb 1215
8b3e912b
RK
1216 insn_needs.input.groups[i]
1217 = MAX (insn_needs.input.groups[i]
1218 + insn_needs.op_addr.groups[i]
1219 + insn_needs.insn.groups[i],
1220 in_max + insn_needs.input.groups[i]);
546b63fb 1221
8b3e912b
RK
1222 insn_needs.output.groups[i] += out_max;
1223 insn_needs.other.groups[i]
1224 += MAX (MAX (insn_needs.input.groups[i],
1225 insn_needs.output.groups[i]),
1226 insn_needs.other_addr.groups[i]);
546b63fb
RK
1227 }
1228
a8efe40d
RK
1229 /* If this is a CALL_INSN and caller-saves will need
1230 a spill register, act as if the spill register is
1231 needed for this insn. However, the spill register
1232 can be used by any reload of this insn, so we only
1233 need do something if no need for that class has
a8fdc208 1234 been recorded.
a8efe40d
RK
1235
1236 The assumption that every CALL_INSN will trigger a
1237 caller-save is highly conservative, however, the number
1238 of cases where caller-saves will need a spill register but
1239 a block containing a CALL_INSN won't need a spill register
1240 of that class should be quite rare.
1241
1242 If a group is needed, the size and mode of the group will
d45cf215 1243 have been set up at the beginning of this loop. */
a8efe40d
RK
1244
1245 if (GET_CODE (insn) == CALL_INSN
1246 && caller_save_spill_class != NO_REGS)
1247 {
8b3e912b
RK
1248 /* See if this register would conflict with any reload
1249 that needs a group. */
1250 int nongroup_need = 0;
1251 int *caller_save_needs;
1252
1253 for (j = 0; j < n_reloads; j++)
1254 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1255 (GET_MODE_SIZE (reload_outmode[j])
1256 > GET_MODE_SIZE (reload_inmode[j]))
1257 ? reload_outmode[j]
1258 : reload_inmode[j])
1259 > 1)
1260 && reg_classes_intersect_p (caller_save_spill_class,
1261 reload_reg_class[j]))
1262 {
1263 nongroup_need = 1;
1264 break;
1265 }
1266
1267 caller_save_needs
1268 = (caller_save_group_size > 1
1269 ? insn_needs.other.groups
1270 : insn_needs.other.regs[nongroup_need]);
a8efe40d
RK
1271
1272 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1273 {
1274 register enum reg_class *p
1275 = reg_class_superclasses[(int) caller_save_spill_class];
1276
1277 caller_save_needs[(int) caller_save_spill_class]++;
1278
1279 while (*p != LIM_REG_CLASSES)
0aaa6af8 1280 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1281 }
1282
8b3e912b 1283 /* Show that this basic block will need a register of
d1c1397e
RS
1284 this class. */
1285
8b3e912b
RK
1286 if (global
1287 && ! (basic_block_needs[(int) caller_save_spill_class]
1288 [this_block]))
1289 {
1290 basic_block_needs[(int) caller_save_spill_class]
1291 [this_block] = 1;
1292 new_basic_block_needs = 1;
1293 }
a8efe40d
RK
1294 }
1295
32131a9c
RK
1296#ifdef SMALL_REGISTER_CLASSES
1297 /* If this insn stores the value of a function call,
1298 and that value is in a register that has been spilled,
1299 and if the insn needs a reload in a class
1300 that might use that register as the reload register,
1301 then add add an extra need in that class.
1302 This makes sure we have a register available that does
1303 not overlap the return value. */
8b3e912b 1304
32131a9c
RK
1305 if (avoid_return_reg)
1306 {
1307 int regno = REGNO (avoid_return_reg);
1308 int nregs
1309 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1310 int r;
546b63fb
RK
1311 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1312
1313 /* First compute the "basic needs", which counts a
1314 need only in the smallest class in which it
1315 is required. */
1316
8b3e912b
RK
1317 bcopy (insn_needs.other.regs[0], basic_needs,
1318 sizeof basic_needs);
1319 bcopy (insn_needs.other.groups, basic_groups,
1320 sizeof basic_groups);
546b63fb
RK
1321
1322 for (i = 0; i < N_REG_CLASSES; i++)
1323 {
1324 enum reg_class *p;
1325
1326 if (basic_needs[i] >= 0)
1327 for (p = reg_class_superclasses[i];
1328 *p != LIM_REG_CLASSES; p++)
1329 basic_needs[(int) *p] -= basic_needs[i];
1330
1331 if (basic_groups[i] >= 0)
1332 for (p = reg_class_superclasses[i];
1333 *p != LIM_REG_CLASSES; p++)
1334 basic_groups[(int) *p] -= basic_groups[i];
1335 }
1336
1337 /* Now count extra regs if there might be a conflict with
1338 the return value register.
1339
1340 ??? This is not quite correct because we don't properly
1341 handle the case of groups, but if we end up doing
1342 something wrong, it either will end up not mattering or
1343 we will abort elsewhere. */
1344
32131a9c
RK
1345 for (r = regno; r < regno + nregs; r++)
1346 if (spill_reg_order[r] >= 0)
1347 for (i = 0; i < N_REG_CLASSES; i++)
1348 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1349 {
546b63fb
RK
1350 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1351 {
1352 enum reg_class *p;
1353
8b3e912b 1354 insn_needs.other.regs[0][i]++;
546b63fb
RK
1355 p = reg_class_superclasses[i];
1356 while (*p != LIM_REG_CLASSES)
8b3e912b 1357 insn_needs.other.regs[0][(int) *p++]++;
546b63fb 1358 }
32131a9c 1359 }
32131a9c
RK
1360 }
1361#endif /* SMALL_REGISTER_CLASSES */
1362
1363 /* For each class, collect maximum need of any insn. */
1364
1365 for (i = 0; i < N_REG_CLASSES; i++)
1366 {
8b3e912b 1367 if (max_needs[i] < insn_needs.other.regs[0][i])
5352b11a 1368 {
8b3e912b 1369 max_needs[i] = insn_needs.other.regs[0][i];
5352b11a
RS
1370 max_needs_insn[i] = insn;
1371 }
8b3e912b 1372 if (max_groups[i] < insn_needs.other.groups[i])
5352b11a 1373 {
8b3e912b 1374 max_groups[i] = insn_needs.other.groups[i];
5352b11a
RS
1375 max_groups_insn[i] = insn;
1376 }
8b3e912b 1377 if (max_nongroups[i] < insn_needs.other.regs[1][i])
ce0e109b 1378 {
8b3e912b 1379 max_nongroups[i] = insn_needs.other.regs[1][i];
ce0e109b
RK
1380 max_nongroups_insn[i] = insn;
1381 }
32131a9c
RK
1382 }
1383 }
1384 /* Note that there is a continue statement above. */
1385 }
1386
0dadecf6
RK
1387 /* If we allocated any new memory locations, make another pass
1388 since it might have changed elimination offsets. */
1389 if (starting_frame_size != get_frame_size ())
1390 something_changed = 1;
1391
e404a39a
RK
1392 if (dumpfile)
1393 for (i = 0; i < N_REG_CLASSES; i++)
1394 {
1395 if (max_needs[i] > 0)
1396 fprintf (dumpfile,
1397 ";; Need %d reg%s of class %s (for insn %d).\n",
1398 max_needs[i], max_needs[i] == 1 ? "" : "s",
1399 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1400 if (max_nongroups[i] > 0)
1401 fprintf (dumpfile,
1402 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1403 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1404 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1405 if (max_groups[i] > 0)
1406 fprintf (dumpfile,
1407 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1408 max_groups[i], max_groups[i] == 1 ? "" : "s",
1409 mode_name[(int) group_mode[i]],
1410 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1411 }
1412
d445b551 1413 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1414 will need a spill register. */
32131a9c 1415
d445b551 1416 if (caller_save_needed
a8efe40d
RK
1417 && ! setup_save_areas (&something_changed)
1418 && caller_save_spill_class == NO_REGS)
32131a9c 1419 {
a8efe40d
RK
1420 /* The class we will need depends on whether the machine
1421 supports the sum of two registers for an address; see
1422 find_address_reloads for details. */
1423
a8fdc208 1424 caller_save_spill_class
a8efe40d
RK
1425 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1426 caller_save_group_size
1427 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1428 something_changed = 1;
32131a9c
RK
1429 }
1430
5c23c401
RK
1431 /* See if anything that happened changes which eliminations are valid.
1432 For example, on the Sparc, whether or not the frame pointer can
1433 be eliminated can depend on what registers have been used. We need
1434 not check some conditions again (such as flag_omit_frame_pointer)
1435 since they can't have changed. */
1436
1437 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3ec2ea3e 1438 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5c23c401
RK
1439#ifdef ELIMINABLE_REGS
1440 || ! CAN_ELIMINATE (ep->from, ep->to)
1441#endif
1442 )
1443 ep->can_eliminate = 0;
1444
32131a9c
RK
1445 /* Look for the case where we have discovered that we can't replace
1446 register A with register B and that means that we will now be
1447 trying to replace register A with register C. This means we can
1448 no longer replace register C with register B and we need to disable
1449 such an elimination, if it exists. This occurs often with A == ap,
1450 B == sp, and C == fp. */
a8fdc208 1451
32131a9c
RK
1452 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1453 {
1454 struct elim_table *op;
1455 register int new_to = -1;
1456
1457 if (! ep->can_eliminate && ep->can_eliminate_previous)
1458 {
1459 /* Find the current elimination for ep->from, if there is a
1460 new one. */
1461 for (op = reg_eliminate;
1462 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1463 if (op->from == ep->from && op->can_eliminate)
1464 {
1465 new_to = op->to;
1466 break;
1467 }
1468
1469 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1470 disable it. */
1471 for (op = reg_eliminate;
1472 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1473 if (op->from == new_to && op->to == ep->to)
1474 op->can_eliminate = 0;
1475 }
1476 }
1477
1478 /* See if any registers that we thought we could eliminate the previous
1479 time are no longer eliminable. If so, something has changed and we
1480 must spill the register. Also, recompute the number of eliminable
1481 registers and see if the frame pointer is needed; it is if there is
1482 no elimination of the frame pointer that we can perform. */
1483
1484 frame_pointer_needed = 1;
1485 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1486 {
3ec2ea3e
DE
1487 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1488 && ep->to != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
1489 frame_pointer_needed = 0;
1490
1491 if (! ep->can_eliminate && ep->can_eliminate_previous)
1492 {
1493 ep->can_eliminate_previous = 0;
1494 spill_hard_reg (ep->from, global, dumpfile, 1);
1495 regs_ever_live[ep->from] = 1;
1496 something_changed = 1;
1497 num_eliminable--;
1498 }
1499 }
1500
1501 /* If all needs are met, we win. */
1502
1503 for (i = 0; i < N_REG_CLASSES; i++)
1504 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1505 break;
1506 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1507 break;
1508
546b63fb
RK
1509 /* Not all needs are met; must spill some hard regs. */
1510
1511 /* Put all registers spilled so far back in potential_reload_regs, but
1512 put them at the front, since we've already spilled most of the
1513 psuedos in them (we might have left some pseudos unspilled if they
1514 were in a block that didn't need any spill registers of a conflicting
1515 class. We used to try to mark off the need for those registers,
1516 but doing so properly is very complex and reallocating them is the
1517 simpler approach. First, "pack" potential_reload_regs by pushing
1518 any nonnegative entries towards the end. That will leave room
1519 for the registers we already spilled.
1520
1521 Also, undo the marking of the spill registers from the last time
1522 around in FORBIDDEN_REGS since we will be probably be allocating
1523 them again below.
1524
1525 ??? It is theoretically possible that we might end up not using one
1526 of our previously-spilled registers in this allocation, even though
1527 they are at the head of the list. It's not clear what to do about
1528 this, but it was no better before, when we marked off the needs met
1529 by the previously-spilled registers. With the current code, globals
1530 can be allocated into these registers, but locals cannot. */
1531
1532 if (n_spills)
1533 {
1534 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1535 if (potential_reload_regs[i] != -1)
1536 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1537
546b63fb
RK
1538 for (i = 0; i < n_spills; i++)
1539 {
1540 potential_reload_regs[i] = spill_regs[i];
1541 spill_reg_order[spill_regs[i]] = -1;
1542 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1543 }
32131a9c 1544
546b63fb
RK
1545 n_spills = 0;
1546 }
32131a9c
RK
1547
1548 /* Now find more reload regs to satisfy the remaining need
1549 Do it by ascending class number, since otherwise a reg
1550 might be spilled for a big class and might fail to count
1551 for a smaller class even though it belongs to that class.
1552
1553 Count spilled regs in `spills', and add entries to
1554 `spill_regs' and `spill_reg_order'.
1555
1556 ??? Note there is a problem here.
1557 When there is a need for a group in a high-numbered class,
1558 and also need for non-group regs that come from a lower class,
1559 the non-group regs are chosen first. If there aren't many regs,
1560 they might leave no room for a group.
1561
1562 This was happening on the 386. To fix it, we added the code
1563 that calls possible_group_p, so that the lower class won't
1564 break up the last possible group.
1565
1566 Really fixing the problem would require changes above
1567 in counting the regs already spilled, and in choose_reload_regs.
1568 It might be hard to avoid introducing bugs there. */
1569
546b63fb
RK
1570 CLEAR_HARD_REG_SET (counted_for_groups);
1571 CLEAR_HARD_REG_SET (counted_for_nongroups);
1572
32131a9c
RK
1573 for (class = 0; class < N_REG_CLASSES; class++)
1574 {
1575 /* First get the groups of registers.
1576 If we got single registers first, we might fragment
1577 possible groups. */
1578 while (max_groups[class] > 0)
1579 {
1580 /* If any single spilled regs happen to form groups,
1581 count them now. Maybe we don't really need
1582 to spill another group. */
1583 count_possible_groups (group_size, group_mode, max_groups);
1584
93193ab5
RK
1585 if (max_groups[class] <= 0)
1586 break;
1587
32131a9c
RK
1588 /* Groups of size 2 (the only groups used on most machines)
1589 are treated specially. */
1590 if (group_size[class] == 2)
1591 {
1592 /* First, look for a register that will complete a group. */
1593 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1594 {
32131a9c 1595 int other;
546b63fb
RK
1596
1597 j = potential_reload_regs[i];
32131a9c
RK
1598 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1599 &&
1600 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1601 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1602 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1603 && HARD_REGNO_MODE_OK (other, group_mode[class])
1604 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1605 other)
1606 /* We don't want one part of another group.
1607 We could get "two groups" that overlap! */
1608 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1609 ||
1610 (j < FIRST_PSEUDO_REGISTER - 1
1611 && (other = j + 1, spill_reg_order[other] >= 0)
1612 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1613 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1614 && HARD_REGNO_MODE_OK (j, group_mode[class])
1615 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1616 other)
1617 && ! TEST_HARD_REG_BIT (counted_for_groups,
1618 other))))
1619 {
1620 register enum reg_class *p;
1621
1622 /* We have found one that will complete a group,
1623 so count off one group as provided. */
1624 max_groups[class]--;
1625 p = reg_class_superclasses[class];
1626 while (*p != LIM_REG_CLASSES)
1627 max_groups[(int) *p++]--;
1628
1629 /* Indicate both these regs are part of a group. */
1630 SET_HARD_REG_BIT (counted_for_groups, j);
1631 SET_HARD_REG_BIT (counted_for_groups, other);
1632 break;
1633 }
1634 }
1635 /* We can't complete a group, so start one. */
92b0556d
RS
1636#ifdef SMALL_REGISTER_CLASSES
1637 /* Look for a pair neither of which is explicitly used. */
1638 if (i == FIRST_PSEUDO_REGISTER)
1639 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1640 {
1641 int k;
1642 j = potential_reload_regs[i];
1643 /* Verify that J+1 is a potential reload reg. */
1644 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1645 if (potential_reload_regs[k] == j + 1)
1646 break;
1647 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1648 && k < FIRST_PSEUDO_REGISTER
1649 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1650 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1651 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1652 && HARD_REGNO_MODE_OK (j, group_mode[class])
1653 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1654 j + 1)
1655 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1656 /* Reject J at this stage
1657 if J+1 was explicitly used. */
1658 && ! regs_explicitly_used[j + 1])
1659 break;
1660 }
1661#endif
1662 /* Now try any group at all
1663 whose registers are not in bad_spill_regs. */
32131a9c
RK
1664 if (i == FIRST_PSEUDO_REGISTER)
1665 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1666 {
57697575 1667 int k;
546b63fb 1668 j = potential_reload_regs[i];
57697575
RS
1669 /* Verify that J+1 is a potential reload reg. */
1670 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1671 if (potential_reload_regs[k] == j + 1)
1672 break;
32131a9c 1673 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1674 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1675 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1676 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1677 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1678 && HARD_REGNO_MODE_OK (j, group_mode[class])
1679 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1680 j + 1)
1681 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1682 break;
1683 }
1684
1685 /* I should be the index in potential_reload_regs
1686 of the new reload reg we have found. */
1687
5352b11a
RS
1688 if (i >= FIRST_PSEUDO_REGISTER)
1689 {
1690 /* There are no groups left to spill. */
1691 spill_failure (max_groups_insn[class]);
1692 failure = 1;
1693 goto failed;
1694 }
1695 else
1696 something_changed
fb3821f7 1697 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1698 global, dumpfile);
32131a9c
RK
1699 }
1700 else
1701 {
1702 /* For groups of more than 2 registers,
1703 look for a sufficient sequence of unspilled registers,
1704 and spill them all at once. */
1705 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1706 {
32131a9c 1707 int k;
546b63fb
RK
1708
1709 j = potential_reload_regs[i];
9d1a4667
RS
1710 if (j >= 0
1711 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1712 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1713 {
1714 /* Check each reg in the sequence. */
1715 for (k = 0; k < group_size[class]; k++)
1716 if (! (spill_reg_order[j + k] < 0
1717 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1718 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1719 break;
1720 /* We got a full sequence, so spill them all. */
1721 if (k == group_size[class])
1722 {
1723 register enum reg_class *p;
1724 for (k = 0; k < group_size[class]; k++)
1725 {
1726 int idx;
1727 SET_HARD_REG_BIT (counted_for_groups, j + k);
1728 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1729 if (potential_reload_regs[idx] == j + k)
1730 break;
9d1a4667
RS
1731 something_changed
1732 |= new_spill_reg (idx, class,
1733 max_needs, NULL_PTR,
1734 global, dumpfile);
32131a9c
RK
1735 }
1736
1737 /* We have found one that will complete a group,
1738 so count off one group as provided. */
1739 max_groups[class]--;
1740 p = reg_class_superclasses[class];
1741 while (*p != LIM_REG_CLASSES)
1742 max_groups[(int) *p++]--;
1743
1744 break;
1745 }
1746 }
1747 }
fa52261e 1748 /* We couldn't find any registers for this reload.
9d1a4667
RS
1749 Avoid going into an infinite loop. */
1750 if (i >= FIRST_PSEUDO_REGISTER)
1751 {
1752 /* There are no groups left. */
1753 spill_failure (max_groups_insn[class]);
1754 failure = 1;
1755 goto failed;
1756 }
32131a9c
RK
1757 }
1758 }
1759
1760 /* Now similarly satisfy all need for single registers. */
1761
1762 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1763 {
9a6cde3a
RS
1764#ifdef SMALL_REGISTER_CLASSES
1765 /* This should be right for all machines, but only the 386
1766 is known to need it, so this conditional plays safe.
1767 ??? For 2.5, try making this unconditional. */
1768 /* If we spilled enough regs, but they weren't counted
1769 against the non-group need, see if we can count them now.
1770 If so, we can avoid some actual spilling. */
1771 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1772 for (i = 0; i < n_spills; i++)
1773 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1774 spill_regs[i])
1775 && !TEST_HARD_REG_BIT (counted_for_groups,
1776 spill_regs[i])
1777 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1778 spill_regs[i])
1779 && max_nongroups[class] > 0)
1780 {
1781 register enum reg_class *p;
1782
1783 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1784 max_nongroups[class]--;
1785 p = reg_class_superclasses[class];
1786 while (*p != LIM_REG_CLASSES)
1787 max_nongroups[(int) *p++]--;
1788 }
1789 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1790 break;
1791#endif
1792
32131a9c
RK
1793 /* Consider the potential reload regs that aren't
1794 yet in use as reload regs, in order of preference.
1795 Find the most preferred one that's in this class. */
1796
1797 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1798 if (potential_reload_regs[i] >= 0
1799 && TEST_HARD_REG_BIT (reg_class_contents[class],
1800 potential_reload_regs[i])
1801 /* If this reg will not be available for groups,
1802 pick one that does not foreclose possible groups.
1803 This is a kludge, and not very general,
1804 but it should be sufficient to make the 386 work,
1805 and the problem should not occur on machines with
1806 more registers. */
1807 && (max_nongroups[class] == 0
1808 || possible_group_p (potential_reload_regs[i], max_groups)))
1809 break;
1810
e404a39a
RK
1811 /* If we couldn't get a register, try to get one even if we
1812 might foreclose possible groups. This may cause problems
1813 later, but that's better than aborting now, since it is
1814 possible that we will, in fact, be able to form the needed
1815 group even with this allocation. */
1816
1817 if (i >= FIRST_PSEUDO_REGISTER
1818 && (asm_noperands (max_needs[class] > 0
1819 ? max_needs_insn[class]
1820 : max_nongroups_insn[class])
1821 < 0))
1822 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1823 if (potential_reload_regs[i] >= 0
1824 && TEST_HARD_REG_BIT (reg_class_contents[class],
1825 potential_reload_regs[i]))
1826 break;
1827
32131a9c
RK
1828 /* I should be the index in potential_reload_regs
1829 of the new reload reg we have found. */
1830
5352b11a
RS
1831 if (i >= FIRST_PSEUDO_REGISTER)
1832 {
1833 /* There are no possible registers left to spill. */
1834 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1835 : max_nongroups_insn[class]);
1836 failure = 1;
1837 goto failed;
1838 }
1839 else
1840 something_changed
1841 |= new_spill_reg (i, class, max_needs, max_nongroups,
1842 global, dumpfile);
32131a9c
RK
1843 }
1844 }
1845 }
1846
1847 /* If global-alloc was run, notify it of any register eliminations we have
1848 done. */
1849 if (global)
1850 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1851 if (ep->can_eliminate)
1852 mark_elimination (ep->from, ep->to);
1853
32131a9c 1854 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1855 around calls. Tell if what mode to use so that we will process
1856 those insns in reload_as_needed if we have to. */
32131a9c
RK
1857
1858 if (caller_save_needed)
a8efe40d
RK
1859 save_call_clobbered_regs (num_eliminable ? QImode
1860 : caller_save_spill_class != NO_REGS ? HImode
1861 : VOIDmode);
32131a9c
RK
1862
1863 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1864 If that insn didn't set the register (i.e., it copied the register to
1865 memory), just delete that insn instead of the equivalencing insn plus
1866 anything now dead. If we call delete_dead_insn on that insn, we may
1867 delete the insn that actually sets the register if the register die
1868 there and that is incorrect. */
1869
1870 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1871 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1872 && GET_CODE (reg_equiv_init[i]) != NOTE)
1873 {
1874 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1875 delete_dead_insn (reg_equiv_init[i]);
1876 else
1877 {
1878 PUT_CODE (reg_equiv_init[i], NOTE);
1879 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1880 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1881 }
1882 }
1883
1884 /* Use the reload registers where necessary
1885 by generating move instructions to move the must-be-register
1886 values into or out of the reload registers. */
1887
a8efe40d
RK
1888 if (something_needs_reloads || something_needs_elimination
1889 || (caller_save_needed && num_eliminable)
1890 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1891 reload_as_needed (first, global);
1892
2a1f8b6b 1893 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1894 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1895 virtue of being in a pseudo, that pseudo will be marked live
1896 and hence the frame pointer will be known to be live via that
1897 pseudo. */
1898
1899 if (! frame_pointer_needed)
1900 for (i = 0; i < n_basic_blocks; i++)
3ec2ea3e
DE
1901 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1902 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1903 % REGSET_ELT_BITS));
2a1f8b6b 1904
5352b11a
RS
1905 /* Come here (with failure set nonzero) if we can't get enough spill regs
1906 and we decide not to abort about it. */
1907 failed:
1908
a3ec87a8
RS
1909 reload_in_progress = 0;
1910
32131a9c
RK
1911 /* Now eliminate all pseudo regs by modifying them into
1912 their equivalent memory references.
1913 The REG-rtx's for the pseudos are modified in place,
1914 so all insns that used to refer to them now refer to memory.
1915
1916 For a reg that has a reg_equiv_address, all those insns
1917 were changed by reloading so that no insns refer to it any longer;
1918 but the DECL_RTL of a variable decl may refer to it,
1919 and if so this causes the debugging info to mention the variable. */
1920
1921 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1922 {
1923 rtx addr = 0;
ab1fd483 1924 int in_struct = 0;
32131a9c 1925 if (reg_equiv_mem[i])
ab1fd483
RS
1926 {
1927 addr = XEXP (reg_equiv_mem[i], 0);
1928 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1929 }
32131a9c
RK
1930 if (reg_equiv_address[i])
1931 addr = reg_equiv_address[i];
1932 if (addr)
1933 {
1934 if (reg_renumber[i] < 0)
1935 {
1936 rtx reg = regno_reg_rtx[i];
1937 XEXP (reg, 0) = addr;
1938 REG_USERVAR_P (reg) = 0;
ab1fd483 1939 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1940 PUT_CODE (reg, MEM);
1941 }
1942 else if (reg_equiv_mem[i])
1943 XEXP (reg_equiv_mem[i], 0) = addr;
1944 }
1945 }
1946
1947#ifdef PRESERVE_DEATH_INFO_REGNO_P
1948 /* Make a pass over all the insns and remove death notes for things that
1949 are no longer registers or no longer die in the insn (e.g., an input
1950 and output pseudo being tied). */
1951
1952 for (insn = first; insn; insn = NEXT_INSN (insn))
1953 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1954 {
1955 rtx note, next;
1956
1957 for (note = REG_NOTES (insn); note; note = next)
1958 {
1959 next = XEXP (note, 1);
1960 if (REG_NOTE_KIND (note) == REG_DEAD
1961 && (GET_CODE (XEXP (note, 0)) != REG
1962 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1963 remove_note (insn, note);
1964 }
1965 }
1966#endif
1967
1968 /* Indicate that we no longer have known memory locations or constants. */
1969 reg_equiv_constant = 0;
1970 reg_equiv_memory_loc = 0;
5352b11a 1971
c8ab4464
RS
1972 if (scratch_list)
1973 free (scratch_list);
c307c237 1974 scratch_list = 0;
c8ab4464
RS
1975 if (scratch_block)
1976 free (scratch_block);
c307c237
RK
1977 scratch_block = 0;
1978
5352b11a 1979 return failure;
32131a9c
RK
1980}
1981\f
1982/* Nonzero if, after spilling reg REGNO for non-groups,
1983 it will still be possible to find a group if we still need one. */
1984
1985static int
1986possible_group_p (regno, max_groups)
1987 int regno;
1988 int *max_groups;
1989{
1990 int i;
1991 int class = (int) NO_REGS;
1992
1993 for (i = 0; i < (int) N_REG_CLASSES; i++)
1994 if (max_groups[i] > 0)
1995 {
1996 class = i;
1997 break;
1998 }
1999
2000 if (class == (int) NO_REGS)
2001 return 1;
2002
2003 /* Consider each pair of consecutive registers. */
2004 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2005 {
2006 /* Ignore pairs that include reg REGNO. */
2007 if (i == regno || i + 1 == regno)
2008 continue;
2009
2010 /* Ignore pairs that are outside the class that needs the group.
2011 ??? Here we fail to handle the case where two different classes
2012 independently need groups. But this never happens with our
2013 current machine descriptions. */
2014 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2015 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2016 continue;
2017
2018 /* A pair of consecutive regs we can still spill does the trick. */
2019 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2020 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2021 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2022 return 1;
2023
2024 /* A pair of one already spilled and one we can spill does it
2025 provided the one already spilled is not otherwise reserved. */
2026 if (spill_reg_order[i] < 0
2027 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2028 && spill_reg_order[i + 1] >= 0
2029 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2030 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2031 return 1;
2032 if (spill_reg_order[i + 1] < 0
2033 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2034 && spill_reg_order[i] >= 0
2035 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2036 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2037 return 1;
2038 }
2039
2040 return 0;
2041}
2042\f
2043/* Count any groups that can be formed from the registers recently spilled.
2044 This is done class by class, in order of ascending class number. */
2045
2046static void
2047count_possible_groups (group_size, group_mode, max_groups)
546b63fb 2048 int *group_size;
32131a9c 2049 enum machine_mode *group_mode;
546b63fb 2050 int *max_groups;
32131a9c
RK
2051{
2052 int i;
2053 /* Now find all consecutive groups of spilled registers
2054 and mark each group off against the need for such groups.
2055 But don't count them against ordinary need, yet. */
2056
2057 for (i = 0; i < N_REG_CLASSES; i++)
2058 if (group_size[i] > 1)
2059 {
93193ab5 2060 HARD_REG_SET new;
32131a9c
RK
2061 int j;
2062
93193ab5
RK
2063 CLEAR_HARD_REG_SET (new);
2064
32131a9c
RK
2065 /* Make a mask of all the regs that are spill regs in class I. */
2066 for (j = 0; j < n_spills; j++)
2067 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2068 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2069 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2070 spill_regs[j]))
93193ab5
RK
2071 SET_HARD_REG_BIT (new, spill_regs[j]);
2072
32131a9c
RK
2073 /* Find each consecutive group of them. */
2074 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
93193ab5
RK
2075 if (TEST_HARD_REG_BIT (new, j)
2076 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
2077 /* Next line in case group-mode for this class
2078 demands an even-odd pair. */
2079 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2080 {
2081 int k;
2082 for (k = 1; k < group_size[i]; k++)
93193ab5 2083 if (! TEST_HARD_REG_BIT (new, j + k))
32131a9c
RK
2084 break;
2085 if (k == group_size[i])
2086 {
2087 /* We found a group. Mark it off against this class's
2088 need for groups, and against each superclass too. */
2089 register enum reg_class *p;
2090 max_groups[i]--;
2091 p = reg_class_superclasses[i];
2092 while (*p != LIM_REG_CLASSES)
2093 max_groups[(int) *p++]--;
a8fdc208 2094 /* Don't count these registers again. */
32131a9c
RK
2095 for (k = 0; k < group_size[i]; k++)
2096 SET_HARD_REG_BIT (counted_for_groups, j + k);
2097 }
fa52261e
RS
2098 /* Skip to the last reg in this group. When j is incremented
2099 above, it will then point to the first reg of the next
2100 possible group. */
2101 j += k - 1;
32131a9c
RK
2102 }
2103 }
2104
2105}
2106\f
2107/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2108 another mode that needs to be reloaded for the same register class CLASS.
2109 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2110 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2111
2112 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2113 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2114 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2115 causes unnecessary failures on machines requiring alignment of register
2116 groups when the two modes are different sizes, because the larger mode has
2117 more strict alignment rules than the smaller mode. */
2118
2119static int
2120modes_equiv_for_class_p (allocate_mode, other_mode, class)
2121 enum machine_mode allocate_mode, other_mode;
2122 enum reg_class class;
2123{
2124 register int regno;
2125 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2126 {
2127 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2128 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2129 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2130 return 0;
2131 }
2132 return 1;
2133}
2134
5352b11a
RS
2135/* Handle the failure to find a register to spill.
2136 INSN should be one of the insns which needed this particular spill reg. */
2137
2138static void
2139spill_failure (insn)
2140 rtx insn;
2141{
2142 if (asm_noperands (PATTERN (insn)) >= 0)
2143 error_for_asm (insn, "`asm' needs too many reloads");
2144 else
2145 abort ();
2146}
2147
32131a9c
RK
2148/* Add a new register to the tables of available spill-registers
2149 (as well as spilling all pseudos allocated to the register).
2150 I is the index of this register in potential_reload_regs.
2151 CLASS is the regclass whose need is being satisfied.
2152 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2153 so that this register can count off against them.
2154 MAX_NONGROUPS is 0 if this register is part of a group.
2155 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2156
2157static int
2158new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2159 int i;
2160 int class;
2161 int *max_needs;
2162 int *max_nongroups;
2163 int global;
2164 FILE *dumpfile;
2165{
2166 register enum reg_class *p;
2167 int val;
2168 int regno = potential_reload_regs[i];
2169
2170 if (i >= FIRST_PSEUDO_REGISTER)
2171 abort (); /* Caller failed to find any register. */
2172
2173 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2174 fatal ("fixed or forbidden register was spilled.\n\
56f58d3a
RK
2175This may be due to a compiler bug or to impossible asm\n\
2176statements or clauses.");
32131a9c
RK
2177
2178 /* Make reg REGNO an additional reload reg. */
2179
2180 potential_reload_regs[i] = -1;
2181 spill_regs[n_spills] = regno;
2182 spill_reg_order[regno] = n_spills;
2183 if (dumpfile)
2184 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2185
2186 /* Clear off the needs we just satisfied. */
2187
2188 max_needs[class]--;
2189 p = reg_class_superclasses[class];
2190 while (*p != LIM_REG_CLASSES)
2191 max_needs[(int) *p++]--;
2192
2193 if (max_nongroups && max_nongroups[class] > 0)
2194 {
2195 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2196 max_nongroups[class]--;
2197 p = reg_class_superclasses[class];
2198 while (*p != LIM_REG_CLASSES)
2199 max_nongroups[(int) *p++]--;
2200 }
2201
2202 /* Spill every pseudo reg that was allocated to this reg
2203 or to something that overlaps this reg. */
2204
2205 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2206
2207 /* If there are some registers still to eliminate and this register
2208 wasn't ever used before, additional stack space may have to be
2209 allocated to store this register. Thus, we may have changed the offset
2210 between the stack and frame pointers, so mark that something has changed.
2211 (If new pseudos were spilled, thus requiring more space, VAL would have
2212 been set non-zero by the call to spill_hard_reg above since additional
2213 reloads may be needed in that case.
2214
2215 One might think that we need only set VAL to 1 if this is a call-used
2216 register. However, the set of registers that must be saved by the
2217 prologue is not identical to the call-used set. For example, the
2218 register used by the call insn for the return PC is a call-used register,
2219 but must be saved by the prologue. */
2220 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2221 val = 1;
2222
2223 regs_ever_live[spill_regs[n_spills]] = 1;
2224 n_spills++;
2225
2226 return val;
2227}
2228\f
2229/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2230 data that is dead in INSN. */
2231
2232static void
2233delete_dead_insn (insn)
2234 rtx insn;
2235{
2236 rtx prev = prev_real_insn (insn);
2237 rtx prev_dest;
2238
2239 /* If the previous insn sets a register that dies in our insn, delete it
2240 too. */
2241 if (prev && GET_CODE (PATTERN (prev)) == SET
2242 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2243 && reg_mentioned_p (prev_dest, PATTERN (insn))
2244 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2245 delete_dead_insn (prev);
2246
2247 PUT_CODE (insn, NOTE);
2248 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2249 NOTE_SOURCE_FILE (insn) = 0;
2250}
2251
2252/* Modify the home of pseudo-reg I.
2253 The new home is present in reg_renumber[I].
2254
2255 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2256 or it may be -1, meaning there is none or it is not relevant.
2257 This is used so that all pseudos spilled from a given hard reg
2258 can share one stack slot. */
2259
2260static void
2261alter_reg (i, from_reg)
2262 register int i;
2263 int from_reg;
2264{
2265 /* When outputting an inline function, this can happen
2266 for a reg that isn't actually used. */
2267 if (regno_reg_rtx[i] == 0)
2268 return;
2269
2270 /* If the reg got changed to a MEM at rtl-generation time,
2271 ignore it. */
2272 if (GET_CODE (regno_reg_rtx[i]) != REG)
2273 return;
2274
2275 /* Modify the reg-rtx to contain the new hard reg
2276 number or else to contain its pseudo reg number. */
2277 REGNO (regno_reg_rtx[i])
2278 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2279
2280 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2281 allocate a stack slot for it. */
2282
2283 if (reg_renumber[i] < 0
2284 && reg_n_refs[i] > 0
2285 && reg_equiv_constant[i] == 0
2286 && reg_equiv_memory_loc[i] == 0)
2287 {
2288 register rtx x;
2289 int inherent_size = PSEUDO_REGNO_BYTES (i);
2290 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2291 int adjust = 0;
2292
2293 /* Each pseudo reg has an inherent size which comes from its own mode,
2294 and a total size which provides room for paradoxical subregs
2295 which refer to the pseudo reg in wider modes.
2296
2297 We can use a slot already allocated if it provides both
2298 enough inherent space and enough total space.
2299 Otherwise, we allocate a new slot, making sure that it has no less
2300 inherent space, and no less total space, then the previous slot. */
2301 if (from_reg == -1)
2302 {
2303 /* No known place to spill from => no slot to reuse. */
2304 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2305#if BYTES_BIG_ENDIAN
2306 /* Cancel the big-endian correction done in assign_stack_local.
2307 Get the address of the beginning of the slot.
2308 This is so we can do a big-endian correction unconditionally
2309 below. */
2310 adjust = inherent_size - total_size;
2311#endif
2312 }
2313 /* Reuse a stack slot if possible. */
2314 else if (spill_stack_slot[from_reg] != 0
2315 && spill_stack_slot_width[from_reg] >= total_size
2316 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2317 >= inherent_size))
2318 x = spill_stack_slot[from_reg];
2319 /* Allocate a bigger slot. */
2320 else
2321 {
2322 /* Compute maximum size needed, both for inherent size
2323 and for total size. */
2324 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2325 if (spill_stack_slot[from_reg])
2326 {
2327 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2328 > inherent_size)
2329 mode = GET_MODE (spill_stack_slot[from_reg]);
2330 if (spill_stack_slot_width[from_reg] > total_size)
2331 total_size = spill_stack_slot_width[from_reg];
2332 }
2333 /* Make a slot with that size. */
2334 x = assign_stack_local (mode, total_size, -1);
2335#if BYTES_BIG_ENDIAN
2336 /* Cancel the big-endian correction done in assign_stack_local.
2337 Get the address of the beginning of the slot.
2338 This is so we can do a big-endian correction unconditionally
2339 below. */
2340 adjust = GET_MODE_SIZE (mode) - total_size;
2341#endif
2342 spill_stack_slot[from_reg] = x;
2343 spill_stack_slot_width[from_reg] = total_size;
2344 }
2345
2346#if BYTES_BIG_ENDIAN
2347 /* On a big endian machine, the "address" of the slot
2348 is the address of the low part that fits its inherent mode. */
2349 if (inherent_size < total_size)
2350 adjust += (total_size - inherent_size);
2351#endif /* BYTES_BIG_ENDIAN */
2352
2353 /* If we have any adjustment to make, or if the stack slot is the
2354 wrong mode, make a new stack slot. */
2355 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2356 {
2357 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2358 plus_constant (XEXP (x, 0), adjust));
2359 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2360 }
2361
2362 /* Save the stack slot for later. */
2363 reg_equiv_memory_loc[i] = x;
2364 }
2365}
2366
2367/* Mark the slots in regs_ever_live for the hard regs
2368 used by pseudo-reg number REGNO. */
2369
2370void
2371mark_home_live (regno)
2372 int regno;
2373{
2374 register int i, lim;
2375 i = reg_renumber[regno];
2376 if (i < 0)
2377 return;
2378 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2379 while (i < lim)
2380 regs_ever_live[i++] = 1;
2381}
c307c237
RK
2382
2383/* Mark the registers used in SCRATCH as being live. */
2384
2385static void
2386mark_scratch_live (scratch)
2387 rtx scratch;
2388{
2389 register int i;
2390 int regno = REGNO (scratch);
2391 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2392
2393 for (i = regno; i < lim; i++)
2394 regs_ever_live[i] = 1;
2395}
32131a9c
RK
2396\f
2397/* This function handles the tracking of elimination offsets around branches.
2398
2399 X is a piece of RTL being scanned.
2400
2401 INSN is the insn that it came from, if any.
2402
2403 INITIAL_P is non-zero if we are to set the offset to be the initial
2404 offset and zero if we are setting the offset of the label to be the
2405 current offset. */
2406
2407static void
2408set_label_offsets (x, insn, initial_p)
2409 rtx x;
2410 rtx insn;
2411 int initial_p;
2412{
2413 enum rtx_code code = GET_CODE (x);
2414 rtx tem;
2415 int i;
2416 struct elim_table *p;
2417
2418 switch (code)
2419 {
2420 case LABEL_REF:
8be386d9
RS
2421 if (LABEL_REF_NONLOCAL_P (x))
2422 return;
2423
32131a9c
RK
2424 x = XEXP (x, 0);
2425
2426 /* ... fall through ... */
2427
2428 case CODE_LABEL:
2429 /* If we know nothing about this label, set the desired offsets. Note
2430 that this sets the offset at a label to be the offset before a label
2431 if we don't know anything about the label. This is not correct for
2432 the label after a BARRIER, but is the best guess we can make. If
2433 we guessed wrong, we will suppress an elimination that might have
2434 been possible had we been able to guess correctly. */
2435
2436 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2437 {
2438 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2439 offsets_at[CODE_LABEL_NUMBER (x)][i]
2440 = (initial_p ? reg_eliminate[i].initial_offset
2441 : reg_eliminate[i].offset);
2442 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2443 }
2444
2445 /* Otherwise, if this is the definition of a label and it is
d45cf215 2446 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2447 that label. */
2448
2449 else if (x == insn
2450 && (tem = prev_nonnote_insn (insn)) != 0
2451 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2452 {
2453 num_not_at_initial_offset = 0;
2454 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2455 {
2456 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2457 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2458 if (reg_eliminate[i].can_eliminate
2459 && (reg_eliminate[i].offset
2460 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2461 num_not_at_initial_offset++;
2462 }
2463 }
32131a9c
RK
2464
2465 else
2466 /* If neither of the above cases is true, compare each offset
2467 with those previously recorded and suppress any eliminations
2468 where the offsets disagree. */
a8fdc208 2469
32131a9c
RK
2470 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2471 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2472 != (initial_p ? reg_eliminate[i].initial_offset
2473 : reg_eliminate[i].offset))
2474 reg_eliminate[i].can_eliminate = 0;
2475
2476 return;
2477
2478 case JUMP_INSN:
2479 set_label_offsets (PATTERN (insn), insn, initial_p);
2480
2481 /* ... fall through ... */
2482
2483 case INSN:
2484 case CALL_INSN:
2485 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2486 and hence must have all eliminations at their initial offsets. */
2487 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2488 if (REG_NOTE_KIND (tem) == REG_LABEL)
2489 set_label_offsets (XEXP (tem, 0), insn, 1);
2490 return;
2491
2492 case ADDR_VEC:
2493 case ADDR_DIFF_VEC:
2494 /* Each of the labels in the address vector must be at their initial
2495 offsets. We want the first first for ADDR_VEC and the second
2496 field for ADDR_DIFF_VEC. */
2497
2498 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2499 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2500 insn, initial_p);
2501 return;
2502
2503 case SET:
2504 /* We only care about setting PC. If the source is not RETURN,
2505 IF_THEN_ELSE, or a label, disable any eliminations not at
2506 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2507 isn't one of those possibilities. For branches to a label,
2508 call ourselves recursively.
2509
2510 Note that this can disable elimination unnecessarily when we have
2511 a non-local goto since it will look like a non-constant jump to
2512 someplace in the current function. This isn't a significant
2513 problem since such jumps will normally be when all elimination
2514 pairs are back to their initial offsets. */
2515
2516 if (SET_DEST (x) != pc_rtx)
2517 return;
2518
2519 switch (GET_CODE (SET_SRC (x)))
2520 {
2521 case PC:
2522 case RETURN:
2523 return;
2524
2525 case LABEL_REF:
2526 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2527 return;
2528
2529 case IF_THEN_ELSE:
2530 tem = XEXP (SET_SRC (x), 1);
2531 if (GET_CODE (tem) == LABEL_REF)
2532 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2533 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2534 break;
2535
2536 tem = XEXP (SET_SRC (x), 2);
2537 if (GET_CODE (tem) == LABEL_REF)
2538 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2539 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2540 break;
2541 return;
2542 }
2543
2544 /* If we reach here, all eliminations must be at their initial
2545 offset because we are doing a jump to a variable address. */
2546 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2547 if (p->offset != p->initial_offset)
2548 p->can_eliminate = 0;
2549 }
2550}
2551\f
2552/* Used for communication between the next two function to properly share
2553 the vector for an ASM_OPERANDS. */
2554
2555static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2556
a8fdc208 2557/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2558 replacement (such as sp), plus an offset.
2559
2560 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2561 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2562 MEM, we are allowed to replace a sum of a register and the constant zero
2563 with the register, which we cannot do outside a MEM. In addition, we need
2564 to record the fact that a register is referenced outside a MEM.
2565
ff32812a 2566 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2567 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2568 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2569 that the REG is being modified.
2570
ff32812a
RS
2571 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2572 That's used when we eliminate in expressions stored in notes.
2573 This means, do not set ref_outside_mem even if the reference
2574 is outside of MEMs.
2575
32131a9c
RK
2576 If we see a modification to a register we know about, take the
2577 appropriate action (see case SET, below).
2578
2579 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2580 replacements done assuming all offsets are at their initial values. If
2581 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2582 encounter, return the actual location so that find_reloads will do
2583 the proper thing. */
2584
2585rtx
2586eliminate_regs (x, mem_mode, insn)
2587 rtx x;
2588 enum machine_mode mem_mode;
2589 rtx insn;
2590{
2591 enum rtx_code code = GET_CODE (x);
2592 struct elim_table *ep;
2593 int regno;
2594 rtx new;
2595 int i, j;
2596 char *fmt;
2597 int copied = 0;
2598
2599 switch (code)
2600 {
2601 case CONST_INT:
2602 case CONST_DOUBLE:
2603 case CONST:
2604 case SYMBOL_REF:
2605 case CODE_LABEL:
2606 case PC:
2607 case CC0:
2608 case ASM_INPUT:
2609 case ADDR_VEC:
2610 case ADDR_DIFF_VEC:
2611 case RETURN:
2612 return x;
2613
2614 case REG:
2615 regno = REGNO (x);
2616
2617 /* First handle the case where we encounter a bare register that
2618 is eliminable. Replace it with a PLUS. */
2619 if (regno < FIRST_PSEUDO_REGISTER)
2620 {
2621 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2622 ep++)
2623 if (ep->from_rtx == x && ep->can_eliminate)
2624 {
ff32812a
RS
2625 if (! mem_mode
2626 /* Refs inside notes don't count for this purpose. */
fe089a90 2627 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2628 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2629 ep->ref_outside_mem = 1;
2630 return plus_constant (ep->to_rtx, ep->previous_offset);
2631 }
2632
2633 }
2634 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2635 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2636 {
2637 /* In this case, find_reloads would attempt to either use an
2638 incorrect address (if something is not at its initial offset)
2639 or substitute an replaced address into an insn (which loses
2640 if the offset is changed by some later action). So we simply
2641 return the replaced stack slot (assuming it is changed by
2642 elimination) and ignore the fact that this is actually a
2643 reference to the pseudo. Ensure we make a copy of the
2644 address in case it is shared. */
fb3821f7 2645 new = eliminate_regs (reg_equiv_memory_loc[regno],
e5687447 2646 mem_mode, insn);
32131a9c 2647 if (new != reg_equiv_memory_loc[regno])
208dffa5
RS
2648 {
2649 cannot_omit_stores[regno] = 1;
2650 return copy_rtx (new);
2651 }
32131a9c
RK
2652 }
2653 return x;
2654
2655 case PLUS:
2656 /* If this is the sum of an eliminable register and a constant, rework
2657 the sum. */
2658 if (GET_CODE (XEXP (x, 0)) == REG
2659 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2660 && CONSTANT_P (XEXP (x, 1)))
2661 {
2662 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2663 ep++)
2664 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2665 {
e5687447
JW
2666 if (! mem_mode
2667 /* Refs inside notes don't count for this purpose. */
2668 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2669 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2670 ep->ref_outside_mem = 1;
2671
2672 /* The only time we want to replace a PLUS with a REG (this
2673 occurs when the constant operand of the PLUS is the negative
2674 of the offset) is when we are inside a MEM. We won't want
2675 to do so at other times because that would change the
2676 structure of the insn in a way that reload can't handle.
2677 We special-case the commonest situation in
2678 eliminate_regs_in_insn, so just replace a PLUS with a
2679 PLUS here, unless inside a MEM. */
a23b64d5 2680 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2681 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2682 return ep->to_rtx;
2683 else
2684 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2685 plus_constant (XEXP (x, 1),
2686 ep->previous_offset));
2687 }
2688
2689 /* If the register is not eliminable, we are done since the other
2690 operand is a constant. */
2691 return x;
2692 }
2693
2694 /* If this is part of an address, we want to bring any constant to the
2695 outermost PLUS. We will do this by doing register replacement in
2696 our operands and seeing if a constant shows up in one of them.
2697
2698 We assume here this is part of an address (or a "load address" insn)
2699 since an eliminable register is not likely to appear in any other
2700 context.
2701
2702 If we have (plus (eliminable) (reg)), we want to produce
2703 (plus (plus (replacement) (reg) (const))). If this was part of a
2704 normal add insn, (plus (replacement) (reg)) will be pushed as a
2705 reload. This is the desired action. */
2706
2707 {
e5687447
JW
2708 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2709 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2710
2711 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2712 {
2713 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2714 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2715 we must replace the constant here since it may no longer
2716 be in the position of any operand. */
2717 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2718 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2719 && reg_renumber[REGNO (new1)] < 0
2720 && reg_equiv_constant != 0
2721 && reg_equiv_constant[REGNO (new1)] != 0)
2722 new1 = reg_equiv_constant[REGNO (new1)];
2723 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2724 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2725 && reg_renumber[REGNO (new0)] < 0
2726 && reg_equiv_constant[REGNO (new0)] != 0)
2727 new0 = reg_equiv_constant[REGNO (new0)];
2728
2729 new = form_sum (new0, new1);
2730
2731 /* As above, if we are not inside a MEM we do not want to
2732 turn a PLUS into something else. We might try to do so here
2733 for an addition of 0 if we aren't optimizing. */
2734 if (! mem_mode && GET_CODE (new) != PLUS)
2735 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2736 else
2737 return new;
2738 }
2739 }
2740 return x;
2741
981c7390
RK
2742 case MULT:
2743 /* If this is the product of an eliminable register and a
2744 constant, apply the distribute law and move the constant out
2745 so that we have (plus (mult ..) ..). This is needed in order
2746 to keep load-address insns valid. This case is pathalogical.
2747 We ignore the possibility of overflow here. */
2748 if (GET_CODE (XEXP (x, 0)) == REG
2749 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2750 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2751 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2752 ep++)
2753 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2754 {
2755 if (! mem_mode
2756 /* Refs inside notes don't count for this purpose. */
2757 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2758 || GET_CODE (insn) == INSN_LIST)))
2759 ep->ref_outside_mem = 1;
2760
2761 return
2762 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2763 ep->previous_offset * INTVAL (XEXP (x, 1)));
2764 }
32131a9c
RK
2765
2766 /* ... fall through ... */
2767
32131a9c
RK
2768 case CALL:
2769 case COMPARE:
2770 case MINUS:
32131a9c
RK
2771 case DIV: case UDIV:
2772 case MOD: case UMOD:
2773 case AND: case IOR: case XOR:
45620ed4
RK
2774 case ROTATERT: case ROTATE:
2775 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
2776 case NE: case EQ:
2777 case GE: case GT: case GEU: case GTU:
2778 case LE: case LT: case LEU: case LTU:
2779 {
e5687447 2780 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2781 rtx new1
e5687447 2782 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2783
2784 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2785 return gen_rtx (code, GET_MODE (x), new0, new1);
2786 }
2787 return x;
2788
981c7390
RK
2789 case EXPR_LIST:
2790 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2791 if (XEXP (x, 0))
2792 {
2793 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2794 if (new != XEXP (x, 0))
2795 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2796 }
2797
2798 /* ... fall through ... */
2799
2800 case INSN_LIST:
2801 /* Now do eliminations in the rest of the chain. If this was
2802 an EXPR_LIST, this might result in allocating more memory than is
2803 strictly needed, but it simplifies the code. */
2804 if (XEXP (x, 1))
2805 {
2806 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2807 if (new != XEXP (x, 1))
2808 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2809 }
2810 return x;
2811
32131a9c
RK
2812 case PRE_INC:
2813 case POST_INC:
2814 case PRE_DEC:
2815 case POST_DEC:
2816 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2817 if (ep->to_rtx == XEXP (x, 0))
2818 {
4c05b187
RK
2819 int size = GET_MODE_SIZE (mem_mode);
2820
2821 /* If more bytes than MEM_MODE are pushed, account for them. */
2822#ifdef PUSH_ROUNDING
2823 if (ep->to_rtx == stack_pointer_rtx)
2824 size = PUSH_ROUNDING (size);
2825#endif
32131a9c 2826 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2827 ep->offset += size;
32131a9c 2828 else
4c05b187 2829 ep->offset -= size;
32131a9c
RK
2830 }
2831
2832 /* Fall through to generic unary operation case. */
2833 case USE:
2834 case STRICT_LOW_PART:
2835 case NEG: case NOT:
2836 case SIGN_EXTEND: case ZERO_EXTEND:
2837 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2838 case FLOAT: case FIX:
2839 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2840 case ABS:
2841 case SQRT:
2842 case FFS:
e5687447 2843 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2844 if (new != XEXP (x, 0))
2845 return gen_rtx (code, GET_MODE (x), new);
2846 return x;
2847
2848 case SUBREG:
2849 /* Similar to above processing, but preserve SUBREG_WORD.
2850 Convert (subreg (mem)) to (mem) if not paradoxical.
2851 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2852 pseudo didn't get a hard reg, we must replace this with the
2853 eliminated version of the memory location because push_reloads
2854 may do the replacement in certain circumstances. */
2855 if (GET_CODE (SUBREG_REG (x)) == REG
2856 && (GET_MODE_SIZE (GET_MODE (x))
2857 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2858 && reg_equiv_memory_loc != 0
2859 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2860 {
2861 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
e5687447 2862 mem_mode, insn);
32131a9c
RK
2863
2864 /* If we didn't change anything, we must retain the pseudo. */
2865 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2866 new = XEXP (x, 0);
2867 else
2868 /* Otherwise, ensure NEW isn't shared in case we have to reload
2869 it. */
2870 new = copy_rtx (new);
2871 }
2872 else
e5687447 2873 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
2874
2875 if (new != XEXP (x, 0))
2876 {
2877 if (GET_CODE (new) == MEM
2878 && (GET_MODE_SIZE (GET_MODE (x))
a3b75c07 2879 <= GET_MODE_SIZE (GET_MODE (new)))
e90d3cbb 2880#ifdef LOAD_EXTEND_OP
a3b75c07
RS
2881 /* On these machines we will be reloading what is
2882 inside the SUBREG if it originally was a pseudo and
2883 the inner and outer modes are both a word or
2884 smaller. So leave the SUBREG then. */
2885 && ! (GET_CODE (SUBREG_REG (x)) == REG
2886 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2887 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2888#endif
2889 )
32131a9c
RK
2890 {
2891 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2892 enum machine_mode mode = GET_MODE (x);
2893
2894#if BYTES_BIG_ENDIAN
2895 offset += (MIN (UNITS_PER_WORD,
2896 GET_MODE_SIZE (GET_MODE (new)))
2897 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2898#endif
2899
2900 PUT_MODE (new, mode);
2901 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2902 return new;
2903 }
2904 else
2905 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2906 }
2907
2908 return x;
2909
2910 case CLOBBER:
2911 /* If clobbering a register that is the replacement register for an
d45cf215 2912 elimination we still think can be performed, note that it cannot
32131a9c
RK
2913 be performed. Otherwise, we need not be concerned about it. */
2914 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2915 if (ep->to_rtx == XEXP (x, 0))
2916 ep->can_eliminate = 0;
2917
e5687447 2918 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c
JVA
2919 if (new != XEXP (x, 0))
2920 return gen_rtx (code, GET_MODE (x), new);
32131a9c
RK
2921 return x;
2922
2923 case ASM_OPERANDS:
2924 {
2925 rtx *temp_vec;
2926 /* Properly handle sharing input and constraint vectors. */
2927 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2928 {
2929 /* When we come to a new vector not seen before,
2930 scan all its elements; keep the old vector if none
2931 of them changes; otherwise, make a copy. */
2932 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2933 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2934 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2935 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
e5687447 2936 mem_mode, insn);
32131a9c
RK
2937
2938 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2939 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2940 break;
2941
2942 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2943 new_asm_operands_vec = old_asm_operands_vec;
2944 else
2945 new_asm_operands_vec
2946 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2947 }
2948
2949 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2950 if (new_asm_operands_vec == old_asm_operands_vec)
2951 return x;
2952
2953 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2954 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2955 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2956 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2957 ASM_OPERANDS_SOURCE_FILE (x),
2958 ASM_OPERANDS_SOURCE_LINE (x));
2959 new->volatil = x->volatil;
2960 return new;
2961 }
2962
2963 case SET:
2964 /* Check for setting a register that we know about. */
2965 if (GET_CODE (SET_DEST (x)) == REG)
2966 {
2967 /* See if this is setting the replacement register for an
a8fdc208 2968 elimination.
32131a9c 2969
3ec2ea3e
DE
2970 If DEST is the hard frame pointer, we do nothing because we
2971 assume that all assignments to the frame pointer are for
2972 non-local gotos and are being done at a time when they are valid
2973 and do not disturb anything else. Some machines want to
2974 eliminate a fake argument pointer (or even a fake frame pointer)
2975 with either the real frame or the stack pointer. Assignments to
2976 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
2977
2978 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2979 ep++)
2980 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 2981 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 2982 {
6dc42e49 2983 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2984 this elimination can't be done. */
2985 rtx src = SET_SRC (x);
2986
2987 if (GET_CODE (src) == PLUS
2988 && XEXP (src, 0) == SET_DEST (x)
2989 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2990 ep->offset -= INTVAL (XEXP (src, 1));
2991 else
2992 ep->can_eliminate = 0;
2993 }
2994
2995 /* Now check to see we are assigning to a register that can be
2996 eliminated. If so, it must be as part of a PARALLEL, since we
2997 will not have been called if this is a single SET. So indicate
2998 that we can no longer eliminate this reg. */
2999 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3000 ep++)
3001 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3002 ep->can_eliminate = 0;
3003 }
3004
3005 /* Now avoid the loop below in this common case. */
3006 {
e5687447
JW
3007 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3008 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3009
ff32812a 3010 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3011 write a CLOBBER insn. */
3012 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3013 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3014 && GET_CODE (insn) != INSN_LIST)
32131a9c
RK
3015 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3016
3017 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3018 return gen_rtx (SET, VOIDmode, new0, new1);
3019 }
3020
3021 return x;
3022
3023 case MEM:
3024 /* Our only special processing is to pass the mode of the MEM to our
3025 recursive call and copy the flags. While we are here, handle this
3026 case more efficiently. */
e5687447 3027 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3028 if (new != XEXP (x, 0))
3029 {
3030 new = gen_rtx (MEM, GET_MODE (x), new);
3031 new->volatil = x->volatil;
3032 new->unchanging = x->unchanging;
3033 new->in_struct = x->in_struct;
3034 return new;
3035 }
3036 else
3037 return x;
3038 }
3039
3040 /* Process each of our operands recursively. If any have changed, make a
3041 copy of the rtx. */
3042 fmt = GET_RTX_FORMAT (code);
3043 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3044 {
3045 if (*fmt == 'e')
3046 {
e5687447 3047 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3048 if (new != XEXP (x, i) && ! copied)
3049 {
3050 rtx new_x = rtx_alloc (code);
3051 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3052 + (sizeof (new_x->fld[0])
3053 * GET_RTX_LENGTH (code))));
3054 x = new_x;
3055 copied = 1;
3056 }
3057 XEXP (x, i) = new;
3058 }
3059 else if (*fmt == 'E')
3060 {
3061 int copied_vec = 0;
3062 for (j = 0; j < XVECLEN (x, i); j++)
3063 {
3064 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3065 if (new != XVECEXP (x, i, j) && ! copied_vec)
3066 {
3067 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3068 &XVECEXP (x, i, 0));
3069 if (! copied)
3070 {
3071 rtx new_x = rtx_alloc (code);
3072 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3073 + (sizeof (new_x->fld[0])
3074 * GET_RTX_LENGTH (code))));
3075 x = new_x;
3076 copied = 1;
3077 }
3078 XVEC (x, i) = new_v;
3079 copied_vec = 1;
3080 }
3081 XVECEXP (x, i, j) = new;
3082 }
3083 }
3084 }
3085
3086 return x;
3087}
3088\f
3089/* Scan INSN and eliminate all eliminable registers in it.
3090
3091 If REPLACE is nonzero, do the replacement destructively. Also
3092 delete the insn as dead it if it is setting an eliminable register.
3093
3094 If REPLACE is zero, do all our allocations in reload_obstack.
3095
3096 If no eliminations were done and this insn doesn't require any elimination
3097 processing (these are not identical conditions: it might be updating sp,
3098 but not referencing fp; this needs to be seen during reload_as_needed so
3099 that the offset between fp and sp can be taken into consideration), zero
3100 is returned. Otherwise, 1 is returned. */
3101
3102static int
3103eliminate_regs_in_insn (insn, replace)
3104 rtx insn;
3105 int replace;
3106{
3107 rtx old_body = PATTERN (insn);
3108 rtx new_body;
3109 int val = 0;
3110 struct elim_table *ep;
3111
3112 if (! replace)
3113 push_obstacks (&reload_obstack, &reload_obstack);
3114
3115 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3116 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3117 {
3118 /* Check for setting an eliminable register. */
3119 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3120 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3121 {
3122 /* In this case this insn isn't serving a useful purpose. We
3123 will delete it in reload_as_needed once we know that this
3124 elimination is, in fact, being done.
3125
3126 If REPLACE isn't set, we can't delete this insn, but neededn't
3127 process it since it won't be used unless something changes. */
3128 if (replace)
3129 delete_dead_insn (insn);
3130 val = 1;
3131 goto done;
3132 }
3133
3134 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3135 in the insn is the negative of the offset in FROM. Substitute
3136 (set (reg) (reg to)) for the insn and change its code.
3137
3138 We have to do this here, rather than in eliminate_regs, do that we can
3139 change the insn code. */
3140
3141 if (GET_CODE (SET_SRC (old_body)) == PLUS
3142 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3143 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3144 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3145 ep++)
3146 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
922d9d40 3147 && ep->can_eliminate)
32131a9c 3148 {
922d9d40
RK
3149 /* We must stop at the first elimination that will be used.
3150 If this one would replace the PLUS with a REG, do it
3151 now. Otherwise, quit the loop and let eliminate_regs
3152 do its normal replacement. */
3153 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3154 {
3155 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3156 SET_DEST (old_body), ep->to_rtx);
3157 INSN_CODE (insn) = -1;
3158 val = 1;
3159 goto done;
3160 }
3161
3162 break;
32131a9c
RK
3163 }
3164 }
3165
3166 old_asm_operands_vec = 0;
3167
3168 /* Replace the body of this insn with a substituted form. If we changed
05b4c365 3169 something, return non-zero.
32131a9c
RK
3170
3171 If we are replacing a body that was a (set X (plus Y Z)), try to
3172 re-recognize the insn. We do this in case we had a simple addition
3173 but now can do this as a load-address. This saves an insn in this
3174 common case. */
3175
fb3821f7 3176 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3177 if (new_body != old_body)
3178 {
7c791b13
RK
3179 /* If we aren't replacing things permanently and we changed something,
3180 make another copy to ensure that all the RTL is new. Otherwise
3181 things can go wrong if find_reload swaps commutative operands
3182 and one is inside RTL that has been copied while the other is not. */
3183
4d411872
RS
3184 /* Don't copy an asm_operands because (1) there's no need and (2)
3185 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3186 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3187 new_body = copy_rtx (new_body);
3188
4a5d0fb5 3189 /* If we had a move insn but now we don't, rerecognize it. */
0ba846c7
RS
3190 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3191 && (GET_CODE (new_body) != SET
3192 || GET_CODE (SET_SRC (new_body)) != REG))
51b8cba1
JL
3193 /* If this was a load from or store to memory, compare
3194 the MEM in recog_operand to the one in the insn. If they
3195 are not equal, then rerecognize the insn. */
3196 || (GET_CODE (old_body) == SET
3197 && ((GET_CODE (SET_SRC (old_body)) == MEM
3198 && SET_SRC (old_body) != recog_operand[1])
3199 || (GET_CODE (SET_DEST (old_body)) == MEM
3200 && SET_DEST (old_body) != recog_operand[0])))
0ba846c7
RS
3201 /* If this was an add insn before, rerecognize. */
3202 ||
3203 (GET_CODE (old_body) == SET
3204 && GET_CODE (SET_SRC (old_body)) == PLUS))
4a5d0fb5
RS
3205 {
3206 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3207 /* If recognition fails, store the new body anyway.
3208 It's normal to have recognition failures here
3209 due to bizarre memory addresses; reloading will fix them. */
3210 PATTERN (insn) = new_body;
4a5d0fb5 3211 }
0ba846c7 3212 else
32131a9c
RK
3213 PATTERN (insn) = new_body;
3214
32131a9c
RK
3215 val = 1;
3216 }
a8fdc208 3217
32131a9c
RK
3218 /* Loop through all elimination pairs. See if any have changed and
3219 recalculate the number not at initial offset.
3220
a8efe40d
RK
3221 Compute the maximum offset (minimum offset if the stack does not
3222 grow downward) for each elimination pair.
3223
32131a9c
RK
3224 We also detect a cases where register elimination cannot be done,
3225 namely, if a register would be both changed and referenced outside a MEM
3226 in the resulting insn since such an insn is often undefined and, even if
3227 not, we cannot know what meaning will be given to it. Note that it is
3228 valid to have a register used in an address in an insn that changes it
3229 (presumably with a pre- or post-increment or decrement).
3230
3231 If anything changes, return nonzero. */
3232
3233 num_not_at_initial_offset = 0;
3234 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3235 {
3236 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3237 ep->can_eliminate = 0;
3238
3239 ep->ref_outside_mem = 0;
3240
3241 if (ep->previous_offset != ep->offset)
3242 val = 1;
3243
3244 ep->previous_offset = ep->offset;
3245 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3246 num_not_at_initial_offset++;
a8efe40d
RK
3247
3248#ifdef STACK_GROWS_DOWNWARD
3249 ep->max_offset = MAX (ep->max_offset, ep->offset);
3250#else
3251 ep->max_offset = MIN (ep->max_offset, ep->offset);
3252#endif
32131a9c
RK
3253 }
3254
3255 done:
05b4c365
RK
3256 /* If we changed something, perform elmination in REG_NOTES. This is
3257 needed even when REPLACE is zero because a REG_DEAD note might refer
3258 to a register that we eliminate and could cause a different number
3259 of spill registers to be needed in the final reload pass than in
3260 the pre-passes. */
20748cab 3261 if (val && REG_NOTES (insn) != 0)
ff32812a 3262 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3263
32131a9c
RK
3264 if (! replace)
3265 pop_obstacks ();
3266
3267 return val;
3268}
3269
3270/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3271 replacement we currently believe is valid, mark it as not eliminable if X
3272 modifies DEST in any way other than by adding a constant integer to it.
3273
3274 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3275 all assignments to the hard frame pointer are nonlocal gotos and are being
3276 done at a time when they are valid and do not disturb anything else.
32131a9c 3277 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3278 frame or stack pointer. Assignments to the hard frame pointer must not
3279 prevent this elimination.
32131a9c
RK
3280
3281 Called via note_stores from reload before starting its passes to scan
3282 the insns of the function. */
3283
3284static void
3285mark_not_eliminable (dest, x)
3286 rtx dest;
3287 rtx x;
3288{
3289 register int i;
3290
3291 /* A SUBREG of a hard register here is just changing its mode. We should
3292 not see a SUBREG of an eliminable hard register, but check just in
3293 case. */
3294 if (GET_CODE (dest) == SUBREG)
3295 dest = SUBREG_REG (dest);
3296
3ec2ea3e 3297 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3298 return;
3299
3300 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3301 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3302 && (GET_CODE (x) != SET
3303 || GET_CODE (SET_SRC (x)) != PLUS
3304 || XEXP (SET_SRC (x), 0) != dest
3305 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3306 {
3307 reg_eliminate[i].can_eliminate_previous
3308 = reg_eliminate[i].can_eliminate = 0;
3309 num_eliminable--;
3310 }
3311}
3312\f
3313/* Kick all pseudos out of hard register REGNO.
3314 If GLOBAL is nonzero, try to find someplace else to put them.
3315 If DUMPFILE is nonzero, log actions taken on that file.
3316
3317 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3318 because we found we can't eliminate some register. In the case, no pseudos
3319 are allowed to be in the register, even if they are only in a block that
3320 doesn't require spill registers, unlike the case when we are spilling this
3321 hard reg to produce another spill register.
3322
3323 Return nonzero if any pseudos needed to be kicked out. */
3324
3325static int
3326spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3327 register int regno;
3328 int global;
3329 FILE *dumpfile;
3330 int cant_eliminate;
3331{
c307c237 3332 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3333 int something_changed = 0;
3334 register int i;
3335
3336 SET_HARD_REG_BIT (forbidden_regs, regno);
3337
3338 /* Spill every pseudo reg that was allocated to this reg
3339 or to something that overlaps this reg. */
3340
3341 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3342 if (reg_renumber[i] >= 0
3343 && reg_renumber[i] <= regno
a8fdc208 3344 && (reg_renumber[i]
32131a9c
RK
3345 + HARD_REGNO_NREGS (reg_renumber[i],
3346 PSEUDO_REGNO_MODE (i))
3347 > regno))
3348 {
32131a9c
RK
3349 /* If this register belongs solely to a basic block which needed no
3350 spilling of any class that this register is contained in,
3351 leave it be, unless we are spilling this register because
3352 it was a hard register that can't be eliminated. */
3353
3354 if (! cant_eliminate
3355 && basic_block_needs[0]
3356 && reg_basic_block[i] >= 0
3357 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3358 {
3359 enum reg_class *p;
3360
3361 for (p = reg_class_superclasses[(int) class];
3362 *p != LIM_REG_CLASSES; p++)
3363 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3364 break;
a8fdc208 3365
32131a9c
RK
3366 if (*p == LIM_REG_CLASSES)
3367 continue;
3368 }
3369
3370 /* Mark it as no longer having a hard register home. */
3371 reg_renumber[i] = -1;
3372 /* We will need to scan everything again. */
3373 something_changed = 1;
3374 if (global)
3375 retry_global_alloc (i, forbidden_regs);
3376
3377 alter_reg (i, regno);
3378 if (dumpfile)
3379 {
3380 if (reg_renumber[i] == -1)
3381 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3382 else
3383 fprintf (dumpfile, " Register %d now in %d.\n\n",
3384 i, reg_renumber[i]);
3385 }
3386 }
c307c237
RK
3387 for (i = 0; i < scratch_list_length; i++)
3388 {
3389 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3390 {
3391 if (! cant_eliminate && basic_block_needs[0]
3392 && ! basic_block_needs[(int) class][scratch_block[i]])
3393 {
3394 enum reg_class *p;
3395
3396 for (p = reg_class_superclasses[(int) class];
3397 *p != LIM_REG_CLASSES; p++)
3398 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3399 break;
3400
3401 if (*p == LIM_REG_CLASSES)
3402 continue;
3403 }
3404 PUT_CODE (scratch_list[i], SCRATCH);
3405 scratch_list[i] = 0;
3406 something_changed = 1;
3407 continue;
3408 }
3409 }
32131a9c
RK
3410
3411 return something_changed;
3412}
3413\f
56f58d3a
RK
3414/* Find all paradoxical subregs within X and update reg_max_ref_width.
3415 Also mark any hard registers used to store user variables as
3416 forbidden from being used for spill registers. */
32131a9c
RK
3417
3418static void
3419scan_paradoxical_subregs (x)
3420 register rtx x;
3421{
3422 register int i;
3423 register char *fmt;
3424 register enum rtx_code code = GET_CODE (x);
3425
3426 switch (code)
3427 {
56f58d3a
RK
3428 case REG:
3429#ifdef SMALL_REGISTER_CLASSES
3430 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3431 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3432#endif
3433 return;
3434
32131a9c
RK
3435 case CONST_INT:
3436 case CONST:
3437 case SYMBOL_REF:
3438 case LABEL_REF:
3439 case CONST_DOUBLE:
3440 case CC0:
3441 case PC:
32131a9c
RK
3442 case USE:
3443 case CLOBBER:
3444 return;
3445
3446 case SUBREG:
3447 if (GET_CODE (SUBREG_REG (x)) == REG
3448 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3449 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3450 = GET_MODE_SIZE (GET_MODE (x));
3451 return;
3452 }
3453
3454 fmt = GET_RTX_FORMAT (code);
3455 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3456 {
3457 if (fmt[i] == 'e')
3458 scan_paradoxical_subregs (XEXP (x, i));
3459 else if (fmt[i] == 'E')
3460 {
3461 register int j;
3462 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3463 scan_paradoxical_subregs (XVECEXP (x, i, j));
3464 }
3465 }
3466}
3467\f
32131a9c
RK
3468static int
3469hard_reg_use_compare (p1, p2)
3470 struct hard_reg_n_uses *p1, *p2;
3471{
3472 int tem = p1->uses - p2->uses;
3473 if (tem != 0) return tem;
3474 /* If regs are equally good, sort by regno,
3475 so that the results of qsort leave nothing to chance. */
3476 return p1->regno - p2->regno;
3477}
3478
3479/* Choose the order to consider regs for use as reload registers
3480 based on how much trouble would be caused by spilling one.
3481 Store them in order of decreasing preference in potential_reload_regs. */
3482
3483static void
3484order_regs_for_reload ()
3485{
3486 register int i;
3487 register int o = 0;
3488 int large = 0;
3489
3490 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3491
3492 CLEAR_HARD_REG_SET (bad_spill_regs);
3493
3494 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3495 potential_reload_regs[i] = -1;
3496
3497 /* Count number of uses of each hard reg by pseudo regs allocated to it
3498 and then order them by decreasing use. */
3499
3500 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3501 {
3502 hard_reg_n_uses[i].uses = 0;
3503 hard_reg_n_uses[i].regno = i;
3504 }
3505
3506 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3507 {
3508 int regno = reg_renumber[i];
3509 if (regno >= 0)
3510 {
3511 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3512 while (regno < lim)
3513 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3514 }
3515 large += reg_n_refs[i];
3516 }
3517
3518 /* Now fixed registers (which cannot safely be used for reloading)
3519 get a very high use count so they will be considered least desirable.
3520 Registers used explicitly in the rtl code are almost as bad. */
3521
3522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3523 {
3524 if (fixed_regs[i])
3525 {
3526 hard_reg_n_uses[i].uses += 2 * large + 2;
3527 SET_HARD_REG_BIT (bad_spill_regs, i);
3528 }
3529 else if (regs_explicitly_used[i])
3530 {
3531 hard_reg_n_uses[i].uses += large + 1;
546b63fb 3532#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
3533 /* ??? We are doing this here because of the potential that
3534 bad code may be generated if a register explicitly used in
3535 an insn was used as a spill register for that insn. But
3536 not using these are spill registers may lose on some machine.
3537 We'll have to see how this works out. */
3538 SET_HARD_REG_BIT (bad_spill_regs, i);
546b63fb 3539#endif
32131a9c
RK
3540 }
3541 }
3ec2ea3e
DE
3542 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3543 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
32131a9c
RK
3544
3545#ifdef ELIMINABLE_REGS
3546 /* If registers other than the frame pointer are eliminable, mark them as
3547 poor choices. */
3548 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3549 {
3550 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3551 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3552 }
3553#endif
3554
3555 /* Prefer registers not so far used, for use in temporary loading.
3556 Among them, if REG_ALLOC_ORDER is defined, use that order.
3557 Otherwise, prefer registers not preserved by calls. */
3558
3559#ifdef REG_ALLOC_ORDER
3560 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3561 {
3562 int regno = reg_alloc_order[i];
3563
3564 if (hard_reg_n_uses[regno].uses == 0)
3565 potential_reload_regs[o++] = regno;
3566 }
3567#else
3568 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3569 {
3570 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3571 potential_reload_regs[o++] = i;
3572 }
3573 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3574 {
3575 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3576 potential_reload_regs[o++] = i;
3577 }
3578#endif
3579
3580 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3581 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3582
3583 /* Now add the regs that are already used,
3584 preferring those used less often. The fixed and otherwise forbidden
3585 registers will be at the end of this list. */
3586
3587 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3588 if (hard_reg_n_uses[i].uses != 0)
3589 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3590}
3591\f
a5339699
RK
3592/* Used in reload_as_needed to sort the spilled regs. */
3593static int
3594compare_spill_regs (r1, r2)
3595 short *r1, *r2;
3596{
3597 return *r1 < *r2 ? -1: 1;
3598}
3599
32131a9c
RK
3600/* Reload pseudo-registers into hard regs around each insn as needed.
3601 Additional register load insns are output before the insn that needs it
3602 and perhaps store insns after insns that modify the reloaded pseudo reg.
3603
3604 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3605 which registers are already available in reload registers.
32131a9c
RK
3606 We update these for the reloads that we perform,
3607 as the insns are scanned. */
3608
3609static void
3610reload_as_needed (first, live_known)
3611 rtx first;
3612 int live_known;
3613{
3614 register rtx insn;
3615 register int i;
3616 int this_block = 0;
3617 rtx x;
3618 rtx after_call = 0;
3619
3620 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3621 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3622 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3623 reg_has_output_reload = (char *) alloca (max_regno);
3624 for (i = 0; i < n_spills; i++)
3625 {
3626 reg_reloaded_contents[i] = -1;
3627 reg_reloaded_insn[i] = 0;
3628 }
3629
3630 /* Reset all offsets on eliminable registers to their initial values. */
3631#ifdef ELIMINABLE_REGS
3632 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3633 {
3634 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3635 reg_eliminate[i].initial_offset);
32131a9c
RK
3636 reg_eliminate[i].previous_offset
3637 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3638 }
3639#else
3640 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3641 reg_eliminate[0].previous_offset
3642 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3643#endif
3644
3645 num_not_at_initial_offset = 0;
3646
a5339699
RK
3647 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3648 pack registers with group needs. */
3649 if (n_spills > 1)
3650 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3651
32131a9c
RK
3652 for (insn = first; insn;)
3653 {
3654 register rtx next = NEXT_INSN (insn);
3655
3656 /* Notice when we move to a new basic block. */
aa2c50d6 3657 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3658 && insn == basic_block_head[this_block+1])
3659 ++this_block;
3660
3661 /* If we pass a label, copy the offsets from the label information
3662 into the current offsets of each elimination. */
3663 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3664 {
3665 num_not_at_initial_offset = 0;
3666 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3667 {
3668 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3669 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3670 if (reg_eliminate[i].can_eliminate
3671 && (reg_eliminate[i].offset
3672 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3673 num_not_at_initial_offset++;
3674 }
3675 }
32131a9c
RK
3676
3677 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3678 {
3679 rtx avoid_return_reg = 0;
3680
3681#ifdef SMALL_REGISTER_CLASSES
3682 /* Set avoid_return_reg if this is an insn
3683 that might use the value of a function call. */
3684 if (GET_CODE (insn) == CALL_INSN)
3685 {
3686 if (GET_CODE (PATTERN (insn)) == SET)
3687 after_call = SET_DEST (PATTERN (insn));
3688 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3689 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3690 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3691 else
3692 after_call = 0;
3693 }
3694 else if (after_call != 0
3695 && !(GET_CODE (PATTERN (insn)) == SET
3696 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3697 {
2b979c57 3698 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
3699 avoid_return_reg = after_call;
3700 after_call = 0;
3701 }
3702#endif /* SMALL_REGISTER_CLASSES */
3703
2758481d
RS
3704 /* If this is a USE and CLOBBER of a MEM, ensure that any
3705 references to eliminable registers have been removed. */
3706
3707 if ((GET_CODE (PATTERN (insn)) == USE
3708 || GET_CODE (PATTERN (insn)) == CLOBBER)
3709 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3710 XEXP (XEXP (PATTERN (insn), 0), 0)
3711 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3712 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3713
32131a9c
RK
3714 /* If we need to do register elimination processing, do so.
3715 This might delete the insn, in which case we are done. */
3716 if (num_eliminable && GET_MODE (insn) == QImode)
3717 {
3718 eliminate_regs_in_insn (insn, 1);
3719 if (GET_CODE (insn) == NOTE)
3720 {
3721 insn = next;
3722 continue;
3723 }
3724 }
3725
3726 if (GET_MODE (insn) == VOIDmode)
3727 n_reloads = 0;
3728 /* First find the pseudo regs that must be reloaded for this insn.
3729 This info is returned in the tables reload_... (see reload.h).
3730 Also modify the body of INSN by substituting RELOAD
3731 rtx's for those pseudo regs. */
3732 else
3733 {
3734 bzero (reg_has_output_reload, max_regno);
3735 CLEAR_HARD_REG_SET (reg_is_output_reload);
3736
3737 find_reloads (insn, 1, spill_indirect_levels, live_known,
3738 spill_reg_order);
3739 }
3740
3741 if (n_reloads > 0)
3742 {
3c3eeea6
RK
3743 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3744 rtx p;
32131a9c
RK
3745 int class;
3746
3747 /* If this block has not had spilling done for a
546b63fb
RK
3748 particular clas and we have any non-optionals that need a
3749 spill reg in that class, abort. */
32131a9c
RK
3750
3751 for (class = 0; class < N_REG_CLASSES; class++)
3752 if (basic_block_needs[class] != 0
3753 && basic_block_needs[class][this_block] == 0)
3754 for (i = 0; i < n_reloads; i++)
546b63fb
RK
3755 if (class == (int) reload_reg_class[i]
3756 && reload_reg_rtx[i] == 0
3757 && ! reload_optional[i]
3758 && (reload_in[i] != 0 || reload_out[i] != 0
3759 || reload_secondary_p[i] != 0))
3760 abort ();
32131a9c
RK
3761
3762 /* Now compute which reload regs to reload them into. Perhaps
3763 reusing reload regs from previous insns, or else output
3764 load insns to reload them. Maybe output store insns too.
3765 Record the choices of reload reg in reload_reg_rtx. */
3766 choose_reload_regs (insn, avoid_return_reg);
3767
546b63fb
RK
3768#ifdef SMALL_REGISTER_CLASSES
3769 /* Merge any reloads that we didn't combine for fear of
3770 increasing the number of spill registers needed but now
3771 discover can be safely merged. */
3772 merge_assigned_reloads (insn);
3773#endif
3774
32131a9c
RK
3775 /* Generate the insns to reload operands into or out of
3776 their reload regs. */
3777 emit_reload_insns (insn);
3778
3779 /* Substitute the chosen reload regs from reload_reg_rtx
3780 into the insn's body (or perhaps into the bodies of other
3781 load and store insn that we just made for reloading
3782 and that we moved the structure into). */
3783 subst_reloads ();
3c3eeea6
RK
3784
3785 /* If this was an ASM, make sure that all the reload insns
3786 we have generated are valid. If not, give an error
3787 and delete them. */
3788
3789 if (asm_noperands (PATTERN (insn)) >= 0)
3790 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3791 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3792 && (recog_memoized (p) < 0
3793 || (insn_extract (p),
3794 ! constrain_operands (INSN_CODE (p), 1))))
3795 {
3796 error_for_asm (insn,
3797 "`asm' operand requires impossible reload");
3798 PUT_CODE (p, NOTE);
3799 NOTE_SOURCE_FILE (p) = 0;
3800 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3801 }
32131a9c
RK
3802 }
3803 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3804 is no longer validly lying around to save a future reload.
3805 Note that this does not detect pseudos that were reloaded
3806 for this insn in order to be stored in
3807 (obeying register constraints). That is correct; such reload
3808 registers ARE still valid. */
3809 note_stores (PATTERN (insn), forget_old_reloads_1);
3810
3811 /* There may have been CLOBBER insns placed after INSN. So scan
3812 between INSN and NEXT and use them to forget old reloads. */
3813 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3814 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3815 note_stores (PATTERN (x), forget_old_reloads_1);
3816
3817#ifdef AUTO_INC_DEC
3818 /* Likewise for regs altered by auto-increment in this insn.
3819 But note that the reg-notes are not changed by reloading:
3820 they still contain the pseudo-regs, not the spill regs. */
3821 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3822 if (REG_NOTE_KIND (x) == REG_INC)
3823 {
3824 /* See if this pseudo reg was reloaded in this insn.
3825 If so, its last-reload info is still valid
3826 because it is based on this insn's reload. */
3827 for (i = 0; i < n_reloads; i++)
3828 if (reload_out[i] == XEXP (x, 0))
3829 break;
3830
08fb99fa 3831 if (i == n_reloads)
9a881562 3832 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
3833 }
3834#endif
3835 }
3836 /* A reload reg's contents are unknown after a label. */
3837 if (GET_CODE (insn) == CODE_LABEL)
3838 for (i = 0; i < n_spills; i++)
3839 {
3840 reg_reloaded_contents[i] = -1;
3841 reg_reloaded_insn[i] = 0;
3842 }
3843
3844 /* Don't assume a reload reg is still good after a call insn
3845 if it is a call-used reg. */
546b63fb 3846 else if (GET_CODE (insn) == CALL_INSN)
32131a9c
RK
3847 for (i = 0; i < n_spills; i++)
3848 if (call_used_regs[spill_regs[i]])
3849 {
3850 reg_reloaded_contents[i] = -1;
3851 reg_reloaded_insn[i] = 0;
3852 }
3853
3854 /* In case registers overlap, allow certain insns to invalidate
3855 particular hard registers. */
3856
3857#ifdef INSN_CLOBBERS_REGNO_P
3858 for (i = 0 ; i < n_spills ; i++)
3859 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3860 {
3861 reg_reloaded_contents[i] = -1;
3862 reg_reloaded_insn[i] = 0;
3863 }
3864#endif
3865
3866 insn = next;
3867
3868#ifdef USE_C_ALLOCA
3869 alloca (0);
3870#endif
3871 }
3872}
3873
3874/* Discard all record of any value reloaded from X,
3875 or reloaded in X from someplace else;
3876 unless X is an output reload reg of the current insn.
3877
3878 X may be a hard reg (the reload reg)
3879 or it may be a pseudo reg that was reloaded from. */
3880
3881static void
9a881562 3882forget_old_reloads_1 (x, ignored)
32131a9c 3883 rtx x;
9a881562 3884 rtx ignored;
32131a9c
RK
3885{
3886 register int regno;
3887 int nr;
0a2e51a9
RS
3888 int offset = 0;
3889
3890 /* note_stores does give us subregs of hard regs. */
3891 while (GET_CODE (x) == SUBREG)
3892 {
3893 offset += SUBREG_WORD (x);
3894 x = SUBREG_REG (x);
3895 }
32131a9c
RK
3896
3897 if (GET_CODE (x) != REG)
3898 return;
3899
0a2e51a9 3900 regno = REGNO (x) + offset;
32131a9c
RK
3901
3902 if (regno >= FIRST_PSEUDO_REGISTER)
3903 nr = 1;
3904 else
3905 {
3906 int i;
3907 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3908 /* Storing into a spilled-reg invalidates its contents.
3909 This can happen if a block-local pseudo is allocated to that reg
3910 and it wasn't spilled because this block's total need is 0.
3911 Then some insn might have an optional reload and use this reg. */
3912 for (i = 0; i < nr; i++)
3913 if (spill_reg_order[regno + i] >= 0
3914 /* But don't do this if the reg actually serves as an output
3915 reload reg in the current instruction. */
3916 && (n_reloads == 0
3917 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3918 {
3919 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3920 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3921 }
3922 }
3923
3924 /* Since value of X has changed,
3925 forget any value previously copied from it. */
3926
3927 while (nr-- > 0)
3928 /* But don't forget a copy if this is the output reload
3929 that establishes the copy's validity. */
3930 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3931 reg_last_reload_reg[regno + nr] = 0;
3932}
3933\f
3934/* For each reload, the mode of the reload register. */
3935static enum machine_mode reload_mode[MAX_RELOADS];
3936
3937/* For each reload, the largest number of registers it will require. */
3938static int reload_nregs[MAX_RELOADS];
3939
3940/* Comparison function for qsort to decide which of two reloads
3941 should be handled first. *P1 and *P2 are the reload numbers. */
3942
3943static int
3944reload_reg_class_lower (p1, p2)
3945 short *p1, *p2;
3946{
3947 register int r1 = *p1, r2 = *p2;
3948 register int t;
a8fdc208 3949
32131a9c
RK
3950 /* Consider required reloads before optional ones. */
3951 t = reload_optional[r1] - reload_optional[r2];
3952 if (t != 0)
3953 return t;
3954
3955 /* Count all solitary classes before non-solitary ones. */
3956 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3957 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3958 if (t != 0)
3959 return t;
3960
3961 /* Aside from solitaires, consider all multi-reg groups first. */
3962 t = reload_nregs[r2] - reload_nregs[r1];
3963 if (t != 0)
3964 return t;
3965
3966 /* Consider reloads in order of increasing reg-class number. */
3967 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3968 if (t != 0)
3969 return t;
3970
3971 /* If reloads are equally urgent, sort by reload number,
3972 so that the results of qsort leave nothing to chance. */
3973 return r1 - r2;
3974}
3975\f
3976/* The following HARD_REG_SETs indicate when each hard register is
3977 used for a reload of various parts of the current insn. */
3978
3979/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3980static HARD_REG_SET reload_reg_used;
546b63fb
RK
3981/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3982static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3983/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3984static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
3985/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
3986static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
3987/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
3988static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
3989/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3990static HARD_REG_SET reload_reg_used_in_op_addr;
546b63fb
RK
3991/* If reg is in use for a RELOAD_FOR_INSN reload. */
3992static HARD_REG_SET reload_reg_used_in_insn;
3993/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
3994static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
3995
3996/* If reg is in use as a reload reg for any sort of reload. */
3997static HARD_REG_SET reload_reg_used_at_all;
3998
be7ae2a4
RK
3999/* If reg is use as an inherited reload. We just mark the first register
4000 in the group. */
4001static HARD_REG_SET reload_reg_used_for_inherit;
4002
546b63fb
RK
4003/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4004 TYPE. MODE is used to indicate how many consecutive regs are
4005 actually used. */
32131a9c
RK
4006
4007static void
546b63fb 4008mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 4009 int regno;
546b63fb
RK
4010 int opnum;
4011 enum reload_type type;
32131a9c
RK
4012 enum machine_mode mode;
4013{
4014 int nregs = HARD_REGNO_NREGS (regno, mode);
4015 int i;
4016
4017 for (i = regno; i < nregs + regno; i++)
4018 {
546b63fb 4019 switch (type)
32131a9c
RK
4020 {
4021 case RELOAD_OTHER:
4022 SET_HARD_REG_BIT (reload_reg_used, i);
4023 break;
4024
546b63fb
RK
4025 case RELOAD_FOR_INPUT_ADDRESS:
4026 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4027 break;
4028
546b63fb
RK
4029 case RELOAD_FOR_OUTPUT_ADDRESS:
4030 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4031 break;
4032
4033 case RELOAD_FOR_OPERAND_ADDRESS:
4034 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4035 break;
4036
546b63fb
RK
4037 case RELOAD_FOR_OTHER_ADDRESS:
4038 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4039 break;
4040
32131a9c 4041 case RELOAD_FOR_INPUT:
546b63fb 4042 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4043 break;
4044
4045 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4046 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4047 break;
4048
4049 case RELOAD_FOR_INSN:
4050 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4051 break;
4052 }
4053
4054 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4055 }
4056}
4057
be7ae2a4
RK
4058/* Similarly, but show REGNO is no longer in use for a reload. */
4059
4060static void
4061clear_reload_reg_in_use (regno, opnum, type, mode)
4062 int regno;
4063 int opnum;
4064 enum reload_type type;
4065 enum machine_mode mode;
4066{
4067 int nregs = HARD_REGNO_NREGS (regno, mode);
4068 int i;
4069
4070 for (i = regno; i < nregs + regno; i++)
4071 {
4072 switch (type)
4073 {
4074 case RELOAD_OTHER:
4075 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4076 break;
4077
4078 case RELOAD_FOR_INPUT_ADDRESS:
4079 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4080 break;
4081
4082 case RELOAD_FOR_OUTPUT_ADDRESS:
4083 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4084 break;
4085
4086 case RELOAD_FOR_OPERAND_ADDRESS:
4087 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4088 break;
4089
4090 case RELOAD_FOR_OTHER_ADDRESS:
4091 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4092 break;
4093
4094 case RELOAD_FOR_INPUT:
4095 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4096 break;
4097
4098 case RELOAD_FOR_OUTPUT:
4099 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4100 break;
4101
4102 case RELOAD_FOR_INSN:
4103 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4104 break;
4105 }
4106 }
4107}
4108
32131a9c 4109/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4110 specified by OPNUM and TYPE. */
32131a9c
RK
4111
4112static int
546b63fb 4113reload_reg_free_p (regno, opnum, type)
32131a9c 4114 int regno;
546b63fb
RK
4115 int opnum;
4116 enum reload_type type;
32131a9c 4117{
546b63fb
RK
4118 int i;
4119
4120 /* In use for a RELOAD_OTHER means it's not available for anything except
4121 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4122 to be used only for inputs. */
4123
4124 if (type != RELOAD_FOR_OTHER_ADDRESS
4125 && TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4126 return 0;
546b63fb
RK
4127
4128 switch (type)
32131a9c
RK
4129 {
4130 case RELOAD_OTHER:
4131 /* In use for anything means not available for a RELOAD_OTHER. */
4132 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4133
4134 /* The other kinds of use can sometimes share a register. */
4135 case RELOAD_FOR_INPUT:
546b63fb
RK
4136 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4137 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4138 return 0;
4139
4140 /* If it is used for some other input, can't use it. */
4141 for (i = 0; i < reload_n_operands; i++)
4142 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4143 return 0;
4144
4145 /* If it is used in a later operand's address, can't use it. */
4146 for (i = opnum + 1; i < reload_n_operands; i++)
4147 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4148 return 0;
4149
4150 return 1;
4151
4152 case RELOAD_FOR_INPUT_ADDRESS:
4153 /* Can't use a register if it is used for an input address for this
4154 operand or used as an input in an earlier one. */
4155 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4156 return 0;
4157
4158 for (i = 0; i < opnum; i++)
4159 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4160 return 0;
4161
4162 return 1;
4163
4164 case RELOAD_FOR_OUTPUT_ADDRESS:
4165 /* Can't use a register if it is used for an output address for this
4166 operand or used as an output in this or a later operand. */
4167 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4168 return 0;
4169
4170 for (i = opnum; i < reload_n_operands; i++)
4171 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4172 return 0;
4173
4174 return 1;
4175
32131a9c 4176 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4177 for (i = 0; i < reload_n_operands; i++)
4178 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4179 return 0;
4180
4181 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4182 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4183
32131a9c 4184 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4185 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4186 outputs, or an operand address for this or an earlier output. */
4187 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4188 return 0;
4189
4190 for (i = 0; i < reload_n_operands; i++)
4191 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4192 return 0;
4193
4194 for (i = 0; i <= opnum; i++)
4195 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4196 return 0;
4197
4198 return 1;
4199
4200 case RELOAD_FOR_INSN:
4201 for (i = 0; i < reload_n_operands; i++)
4202 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4203 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4204 return 0;
4205
4206 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4207 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4208
4209 case RELOAD_FOR_OTHER_ADDRESS:
4210 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4211 }
4212 abort ();
4213}
4214
4215/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4216 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4217 is not in use for a reload in any prior part of the insn.
4218
4219 We can assume that the reload reg was already tested for availability
4220 at the time it is needed, and we should not check this again,
4221 in case the reg has already been marked in use. */
4222
4223static int
546b63fb 4224reload_reg_free_before_p (regno, opnum, type)
32131a9c 4225 int regno;
546b63fb
RK
4226 int opnum;
4227 enum reload_type type;
32131a9c 4228{
546b63fb
RK
4229 int i;
4230
4231 switch (type)
32131a9c 4232 {
546b63fb
RK
4233 case RELOAD_FOR_OTHER_ADDRESS:
4234 /* These always come first. */
32131a9c
RK
4235 return 1;
4236
546b63fb
RK
4237 case RELOAD_OTHER:
4238 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4239
32131a9c 4240 /* If this use is for part of the insn,
546b63fb
RK
4241 check the reg is not in use for any prior part. It is tempting
4242 to try to do this by falling through from objecs that occur
4243 later in the insn to ones that occur earlier, but that will not
4244 correctly take into account the fact that here we MUST ignore
4245 things that would prevent the register from being allocated in
4246 the first place, since we know that it was allocated. */
4247
4248 case RELOAD_FOR_OUTPUT_ADDRESS:
4249 /* Earlier reloads are for earlier outputs or their addresses,
4250 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4251 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4252 RELOAD_OTHER).. */
4253 for (i = 0; i < opnum; i++)
4254 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4255 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4256 return 0;
4257
4258 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4259 return 0;
546b63fb
RK
4260
4261 for (i = 0; i < reload_n_operands; i++)
4262 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4263 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4264 return 0;
4265
4266 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4267 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4268 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4269
32131a9c 4270 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4271 /* This can't be used in the output address for this operand and
4272 anything that can't be used for it, except that we've already
4273 tested for RELOAD_FOR_INSN objects. */
4274
4275 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
32131a9c 4276 return 0;
546b63fb
RK
4277
4278 for (i = 0; i < opnum; i++)
4279 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4280 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4281 return 0;
4282
4283 for (i = 0; i < reload_n_operands; i++)
4284 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4285 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4286 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4287 return 0;
4288
4289 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4290
32131a9c 4291 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4292 case RELOAD_FOR_INSN:
4293 /* These can't conflict with inputs, or each other, so all we have to
4294 test is input addresses and the addresses of OTHER items. */
4295
4296 for (i = 0; i < reload_n_operands; i++)
4297 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4298 return 0;
4299
4300 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4301
32131a9c 4302 case RELOAD_FOR_INPUT:
546b63fb
RK
4303 /* The only things earlier are the address for this and
4304 earlier inputs, other inputs (which we know we don't conflict
4305 with), and addresses of RELOAD_OTHER objects. */
4306
4307 for (i = 0; i <= opnum; i++)
4308 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4309 return 0;
4310
4311 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4312
4313 case RELOAD_FOR_INPUT_ADDRESS:
4314 /* Similarly, all we have to check is for use in earlier inputs'
4315 addresses. */
4316 for (i = 0; i < opnum; i++)
4317 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4318 return 0;
4319
4320 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4321 }
4322 abort ();
4323}
4324
4325/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4326 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4327 is still available in REGNO at the end of the insn.
4328
4329 We can assume that the reload reg was already tested for availability
4330 at the time it is needed, and we should not check this again,
4331 in case the reg has already been marked in use. */
4332
4333static int
546b63fb 4334reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4335 int regno;
546b63fb
RK
4336 int opnum;
4337 enum reload_type type;
32131a9c 4338{
546b63fb
RK
4339 int i;
4340
4341 switch (type)
32131a9c
RK
4342 {
4343 case RELOAD_OTHER:
4344 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4345 its value must reach the end. */
4346 return 1;
4347
4348 /* If this use is for part of the insn,
546b63fb
RK
4349 its value reaches if no subsequent part uses the same register.
4350 Just like the above function, don't try to do this with lots
4351 of fallthroughs. */
4352
4353 case RELOAD_FOR_OTHER_ADDRESS:
4354 /* Here we check for everything else, since these don't conflict
4355 with anything else and everything comes later. */
4356
4357 for (i = 0; i < reload_n_operands; i++)
4358 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4359 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4360 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4361 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4362 return 0;
4363
4364 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4365 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4366 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4367
4368 case RELOAD_FOR_INPUT_ADDRESS:
4369 /* Similar, except that we check only for this and subsequent inputs
4370 and the address of only subsequent inputs and we do not need
4371 to check for RELOAD_OTHER objects since they are known not to
4372 conflict. */
4373
4374 for (i = opnum; i < reload_n_operands; i++)
4375 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4376 return 0;
4377
4378 for (i = opnum + 1; i < reload_n_operands; i++)
4379 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4380 return 0;
4381
4382 for (i = 0; i < reload_n_operands; i++)
4383 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4384 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4385 return 0;
4386
4387 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4388 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4389
32131a9c 4390 case RELOAD_FOR_INPUT:
546b63fb
RK
4391 /* Similar to input address, except we start at the next operand for
4392 both input and input address and we do not check for
4393 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4394 would conflict. */
4395
4396 for (i = opnum + 1; i < reload_n_operands; i++)
4397 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4398 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4399 return 0;
4400
4401 /* ... fall through ... */
4402
32131a9c 4403 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4404 /* Check outputs and their addresses. */
4405
4406 for (i = 0; i < reload_n_operands; i++)
4407 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4408 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4409 return 0;
4410
4411 return 1;
4412
4413 case RELOAD_FOR_INSN:
4414 /* These conflict with other outputs with with RELOAD_OTHER. So
4415 we need only check for output addresses. */
4416
4417 opnum = -1;
4418
4419 /* ... fall through ... */
4420
32131a9c 4421 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4422 case RELOAD_FOR_OUTPUT_ADDRESS:
4423 /* We already know these can't conflict with a later output. So the
4424 only thing to check are later output addresses. */
4425 for (i = opnum + 1; i < reload_n_operands; i++)
4426 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4427 return 0;
4428
32131a9c
RK
4429 return 1;
4430 }
546b63fb 4431
32131a9c
RK
4432 abort ();
4433}
4434\f
351aa1c1
RK
4435/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4436 Return 0 otherwise.
4437
4438 This function uses the same algorithm as reload_reg_free_p above. */
4439
4440static int
4441reloads_conflict (r1, r2)
4442 int r1, r2;
4443{
4444 enum reload_type r1_type = reload_when_needed[r1];
4445 enum reload_type r2_type = reload_when_needed[r2];
4446 int r1_opnum = reload_opnum[r1];
4447 int r2_opnum = reload_opnum[r2];
4448
4449 /* RELOAD_OTHER conflicts with everything except
4450 RELOAD_FOR_OTHER_ADDRESS. */
4451
4452 if ((r1_type == RELOAD_OTHER && r2_type != RELOAD_FOR_OTHER_ADDRESS)
4453 || (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS))
4454 return 1;
4455
4456 /* Otherwise, check conflicts differently for each type. */
4457
4458 switch (r1_type)
4459 {
4460 case RELOAD_FOR_INPUT:
4461 return (r2_type == RELOAD_FOR_INSN
4462 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4463 || r2_type == RELOAD_FOR_INPUT
4464 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4465
4466 case RELOAD_FOR_INPUT_ADDRESS:
4467 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4468 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4469
4470 case RELOAD_FOR_OUTPUT_ADDRESS:
4471 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4472 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4473
4474 case RELOAD_FOR_OPERAND_ADDRESS:
4475 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4476 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4477
4478 case RELOAD_FOR_OUTPUT:
4479 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
0aaf4a58 4480 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
351aa1c1
RK
4481 && r2_opnum >= r1_opnum));
4482
4483 case RELOAD_FOR_INSN:
4484 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4485 || r2_type == RELOAD_FOR_INSN
4486 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4487
4488 case RELOAD_FOR_OTHER_ADDRESS:
4489 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4490
4491 default:
4492 abort ();
4493 }
4494}
4495\f
32131a9c
RK
4496/* Vector of reload-numbers showing the order in which the reloads should
4497 be processed. */
4498short reload_order[MAX_RELOADS];
4499
4500/* Indexed by reload number, 1 if incoming value
4501 inherited from previous insns. */
4502char reload_inherited[MAX_RELOADS];
4503
4504/* For an inherited reload, this is the insn the reload was inherited from,
4505 if we know it. Otherwise, this is 0. */
4506rtx reload_inheritance_insn[MAX_RELOADS];
4507
4508/* If non-zero, this is a place to get the value of the reload,
4509 rather than using reload_in. */
4510rtx reload_override_in[MAX_RELOADS];
4511
4512/* For each reload, the index in spill_regs of the spill register used,
4513 or -1 if we did not need one of the spill registers for this reload. */
4514int reload_spill_index[MAX_RELOADS];
4515
4516/* Index of last register assigned as a spill register. We allocate in
4517 a round-robin fashio. */
4518
1d2310f3 4519static int last_spill_reg = 0;
32131a9c
RK
4520
4521/* Find a spill register to use as a reload register for reload R.
4522 LAST_RELOAD is non-zero if this is the last reload for the insn being
4523 processed.
4524
4525 Set reload_reg_rtx[R] to the register allocated.
4526
4527 If NOERROR is nonzero, we return 1 if successful,
4528 or 0 if we couldn't find a spill reg and we didn't change anything. */
4529
4530static int
4531allocate_reload_reg (r, insn, last_reload, noerror)
4532 int r;
4533 rtx insn;
4534 int last_reload;
4535 int noerror;
4536{
4537 int i;
4538 int pass;
4539 int count;
4540 rtx new;
4541 int regno;
4542
4543 /* If we put this reload ahead, thinking it is a group,
4544 then insist on finding a group. Otherwise we can grab a
a8fdc208 4545 reg that some other reload needs.
32131a9c
RK
4546 (That can happen when we have a 68000 DATA_OR_FP_REG
4547 which is a group of data regs or one fp reg.)
4548 We need not be so restrictive if there are no more reloads
4549 for this insn.
4550
4551 ??? Really it would be nicer to have smarter handling
4552 for that kind of reg class, where a problem like this is normal.
4553 Perhaps those classes should be avoided for reloading
4554 by use of more alternatives. */
4555
4556 int force_group = reload_nregs[r] > 1 && ! last_reload;
4557
4558 /* If we want a single register and haven't yet found one,
4559 take any reg in the right class and not in use.
4560 If we want a consecutive group, here is where we look for it.
4561
4562 We use two passes so we can first look for reload regs to
4563 reuse, which are already in use for other reloads in this insn,
4564 and only then use additional registers.
4565 I think that maximizing reuse is needed to make sure we don't
4566 run out of reload regs. Suppose we have three reloads, and
4567 reloads A and B can share regs. These need two regs.
4568 Suppose A and B are given different regs.
4569 That leaves none for C. */
4570 for (pass = 0; pass < 2; pass++)
4571 {
4572 /* I is the index in spill_regs.
4573 We advance it round-robin between insns to use all spill regs
4574 equally, so that inherited reloads have a chance
a5339699
RK
4575 of leapfrogging each other. Don't do this, however, when we have
4576 group needs and failure would be fatal; if we only have a relatively
4577 small number of spill registers, and more than one of them has
4578 group needs, then by starting in the middle, we may end up
4579 allocating the first one in such a way that we are not left with
4580 sufficient groups to handle the rest. */
4581
4582 if (noerror || ! force_group)
4583 i = last_spill_reg;
4584 else
4585 i = -1;
4586
4587 for (count = 0; count < n_spills; count++)
32131a9c
RK
4588 {
4589 int class = (int) reload_reg_class[r];
4590
4591 i = (i + 1) % n_spills;
4592
546b63fb
RK
4593 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4594 reload_when_needed[r])
32131a9c
RK
4595 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4596 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
4597 /* Look first for regs to share, then for unshared. But
4598 don't share regs used for inherited reloads; they are
4599 the ones we want to preserve. */
4600 && (pass
4601 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4602 spill_regs[i])
4603 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4604 spill_regs[i]))))
32131a9c
RK
4605 {
4606 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4607 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4608 (on 68000) got us two FP regs. If NR is 1,
4609 we would reject both of them. */
4610 if (force_group)
4611 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4612 /* If we need only one reg, we have already won. */
4613 if (nr == 1)
4614 {
4615 /* But reject a single reg if we demand a group. */
4616 if (force_group)
4617 continue;
4618 break;
4619 }
4620 /* Otherwise check that as many consecutive regs as we need
4621 are available here.
4622 Also, don't use for a group registers that are
4623 needed for nongroups. */
4624 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4625 while (nr > 1)
4626 {
4627 regno = spill_regs[i] + nr - 1;
4628 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4629 && spill_reg_order[regno] >= 0
546b63fb
RK
4630 && reload_reg_free_p (regno, reload_opnum[r],
4631 reload_when_needed[r])
32131a9c
RK
4632 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4633 regno)))
4634 break;
4635 nr--;
4636 }
4637 if (nr == 1)
4638 break;
4639 }
4640 }
4641
4642 /* If we found something on pass 1, omit pass 2. */
4643 if (count < n_spills)
4644 break;
4645 }
4646
4647 /* We should have found a spill register by now. */
4648 if (count == n_spills)
4649 {
4650 if (noerror)
4651 return 0;
139fc12e 4652 goto failure;
32131a9c
RK
4653 }
4654
be7ae2a4
RK
4655 /* I is the index in SPILL_REG_RTX of the reload register we are to
4656 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
4657
4658 new = spill_reg_rtx[i];
4659
4660 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4
RK
4661 spill_reg_rtx[i] = new
4662 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4663
32131a9c
RK
4664 regno = true_regnum (new);
4665
4666 /* Detect when the reload reg can't hold the reload mode.
4667 This used to be one `if', but Sequent compiler can't handle that. */
4668 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4669 {
4670 enum machine_mode test_mode = VOIDmode;
4671 if (reload_in[r])
4672 test_mode = GET_MODE (reload_in[r]);
4673 /* If reload_in[r] has VOIDmode, it means we will load it
4674 in whatever mode the reload reg has: to wit, reload_mode[r].
4675 We have already tested that for validity. */
4676 /* Aside from that, we need to test that the expressions
4677 to reload from or into have modes which are valid for this
4678 reload register. Otherwise the reload insns would be invalid. */
4679 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4680 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4681 if (! (reload_out[r] != 0
4682 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
4683 {
4684 /* The reg is OK. */
4685 last_spill_reg = i;
4686
4687 /* Mark as in use for this insn the reload regs we use
4688 for this. */
4689 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4690 reload_when_needed[r], reload_mode[r]);
4691
4692 reload_reg_rtx[r] = new;
4693 reload_spill_index[r] = i;
4694 return 1;
4695 }
32131a9c
RK
4696 }
4697
4698 /* The reg is not OK. */
4699 if (noerror)
4700 return 0;
4701
139fc12e 4702 failure:
32131a9c
RK
4703 if (asm_noperands (PATTERN (insn)) < 0)
4704 /* It's the compiler's fault. */
4705 abort ();
4706
4707 /* It's the user's fault; the operand's mode and constraint
4708 don't match. Disable this reload so we don't crash in final. */
4709 error_for_asm (insn,
4710 "`asm' operand constraint incompatible with operand size");
4711 reload_in[r] = 0;
4712 reload_out[r] = 0;
4713 reload_reg_rtx[r] = 0;
4714 reload_optional[r] = 1;
4715 reload_secondary_p[r] = 1;
4716
4717 return 1;
4718}
4719\f
4720/* Assign hard reg targets for the pseudo-registers we must reload
4721 into hard regs for this insn.
4722 Also output the instructions to copy them in and out of the hard regs.
4723
4724 For machines with register classes, we are responsible for
4725 finding a reload reg in the proper class. */
4726
4727static void
4728choose_reload_regs (insn, avoid_return_reg)
4729 rtx insn;
32131a9c
RK
4730 rtx avoid_return_reg;
4731{
4732 register int i, j;
4733 int max_group_size = 1;
4734 enum reg_class group_class = NO_REGS;
4735 int inheritance;
4736
4737 rtx save_reload_reg_rtx[MAX_RELOADS];
4738 char save_reload_inherited[MAX_RELOADS];
4739 rtx save_reload_inheritance_insn[MAX_RELOADS];
4740 rtx save_reload_override_in[MAX_RELOADS];
4741 int save_reload_spill_index[MAX_RELOADS];
4742 HARD_REG_SET save_reload_reg_used;
546b63fb
RK
4743 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4744 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4745 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4746 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 4747 HARD_REG_SET save_reload_reg_used_in_op_addr;
546b63fb
RK
4748 HARD_REG_SET save_reload_reg_used_in_insn;
4749 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
4750 HARD_REG_SET save_reload_reg_used_at_all;
4751
4752 bzero (reload_inherited, MAX_RELOADS);
4753 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4754 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4755
4756 CLEAR_HARD_REG_SET (reload_reg_used);
4757 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 4758 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
546b63fb
RK
4759 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4760 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 4761
546b63fb
RK
4762 for (i = 0; i < reload_n_operands; i++)
4763 {
4764 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4765 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4766 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4767 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4768 }
32131a9c
RK
4769
4770#ifdef SMALL_REGISTER_CLASSES
4771 /* Don't bother with avoiding the return reg
4772 if we have no mandatory reload that could use it. */
4773 if (avoid_return_reg)
4774 {
4775 int do_avoid = 0;
4776 int regno = REGNO (avoid_return_reg);
4777 int nregs
4778 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4779 int r;
4780
4781 for (r = regno; r < regno + nregs; r++)
4782 if (spill_reg_order[r] >= 0)
4783 for (j = 0; j < n_reloads; j++)
4784 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4785 && (reload_in[j] != 0 || reload_out[j] != 0
4786 || reload_secondary_p[j])
4787 &&
4788 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4789 do_avoid = 1;
4790 if (!do_avoid)
4791 avoid_return_reg = 0;
4792 }
4793#endif /* SMALL_REGISTER_CLASSES */
4794
4795#if 0 /* Not needed, now that we can always retry without inheritance. */
4796 /* See if we have more mandatory reloads than spill regs.
4797 If so, then we cannot risk optimizations that could prevent
a8fdc208 4798 reloads from sharing one spill register.
32131a9c
RK
4799
4800 Since we will try finding a better register than reload_reg_rtx
4801 unless it is equal to reload_in or reload_out, count such reloads. */
4802
4803 {
4804 int tem = 0;
4805#ifdef SMALL_REGISTER_CLASSES
4806 int tem = (avoid_return_reg != 0);
a8fdc208 4807#endif
32131a9c
RK
4808 for (j = 0; j < n_reloads; j++)
4809 if (! reload_optional[j]
4810 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4811 && (reload_reg_rtx[j] == 0
4812 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4813 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4814 tem++;
4815 if (tem > n_spills)
4816 must_reuse = 1;
4817 }
4818#endif
4819
4820#ifdef SMALL_REGISTER_CLASSES
4821 /* Don't use the subroutine call return reg for a reload
4822 if we are supposed to avoid it. */
4823 if (avoid_return_reg)
4824 {
4825 int regno = REGNO (avoid_return_reg);
4826 int nregs
4827 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4828 int r;
4829
4830 for (r = regno; r < regno + nregs; r++)
4831 if (spill_reg_order[r] >= 0)
4832 SET_HARD_REG_BIT (reload_reg_used, r);
4833 }
4834#endif /* SMALL_REGISTER_CLASSES */
4835
4836 /* In order to be certain of getting the registers we need,
4837 we must sort the reloads into order of increasing register class.
4838 Then our grabbing of reload registers will parallel the process
a8fdc208 4839 that provided the reload registers.
32131a9c
RK
4840
4841 Also note whether any of the reloads wants a consecutive group of regs.
4842 If so, record the maximum size of the group desired and what
4843 register class contains all the groups needed by this insn. */
4844
4845 for (j = 0; j < n_reloads; j++)
4846 {
4847 reload_order[j] = j;
4848 reload_spill_index[j] = -1;
4849
4850 reload_mode[j]
546b63fb
RK
4851 = (reload_inmode[j] == VOIDmode
4852 || (GET_MODE_SIZE (reload_outmode[j])
4853 > GET_MODE_SIZE (reload_inmode[j])))
4854 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
4855
4856 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4857
4858 if (reload_nregs[j] > 1)
4859 {
4860 max_group_size = MAX (reload_nregs[j], max_group_size);
4861 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4862 }
4863
4864 /* If we have already decided to use a certain register,
4865 don't use it in another way. */
4866 if (reload_reg_rtx[j])
546b63fb 4867 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
4868 reload_when_needed[j], reload_mode[j]);
4869 }
4870
4871 if (n_reloads > 1)
4872 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4873
4874 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4875 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4876 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4877 sizeof reload_inheritance_insn);
4878 bcopy (reload_override_in, save_reload_override_in,
4879 sizeof reload_override_in);
4880 bcopy (reload_spill_index, save_reload_spill_index,
4881 sizeof reload_spill_index);
4882 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4883 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
4884 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4885 reload_reg_used_in_op_addr);
546b63fb
RK
4886 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4887 reload_reg_used_in_insn);
4888 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4889 reload_reg_used_in_other_addr);
4890
4891 for (i = 0; i < reload_n_operands; i++)
4892 {
4893 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4894 reload_reg_used_in_output[i]);
4895 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4896 reload_reg_used_in_input[i]);
4897 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4898 reload_reg_used_in_input_addr[i]);
4899 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4900 reload_reg_used_in_output_addr[i]);
4901 }
32131a9c 4902
58b1581b
RS
4903 /* If -O, try first with inheritance, then turning it off.
4904 If not -O, don't do inheritance.
4905 Using inheritance when not optimizing leads to paradoxes
4906 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4907 because one side of the comparison might be inherited. */
32131a9c 4908
58b1581b 4909 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
4910 {
4911 /* Process the reloads in order of preference just found.
4912 Beyond this point, subregs can be found in reload_reg_rtx.
4913
4914 This used to look for an existing reloaded home for all
4915 of the reloads, and only then perform any new reloads.
4916 But that could lose if the reloads were done out of reg-class order
4917 because a later reload with a looser constraint might have an old
4918 home in a register needed by an earlier reload with a tighter constraint.
4919
4920 To solve this, we make two passes over the reloads, in the order
4921 described above. In the first pass we try to inherit a reload
4922 from a previous insn. If there is a later reload that needs a
4923 class that is a proper subset of the class being processed, we must
4924 also allocate a spill register during the first pass.
4925
4926 Then make a second pass over the reloads to allocate any reloads
4927 that haven't been given registers yet. */
4928
be7ae2a4
RK
4929 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4930
32131a9c
RK
4931 for (j = 0; j < n_reloads; j++)
4932 {
4933 register int r = reload_order[j];
4934
4935 /* Ignore reloads that got marked inoperative. */
4936 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4937 continue;
4938
4939 /* If find_reloads chose a to use reload_in or reload_out as a reload
4940 register, we don't need to chose one. Otherwise, try even if it found
4941 one since we might save an insn if we find the value lying around. */
4942 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4943 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4944 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4945 continue;
4946
4947#if 0 /* No longer needed for correct operation.
4948 It might give better code, or might not; worth an experiment? */
4949 /* If this is an optional reload, we can't inherit from earlier insns
4950 until we are sure that any non-optional reloads have been allocated.
4951 The following code takes advantage of the fact that optional reloads
4952 are at the end of reload_order. */
4953 if (reload_optional[r] != 0)
4954 for (i = 0; i < j; i++)
4955 if ((reload_out[reload_order[i]] != 0
4956 || reload_in[reload_order[i]] != 0
4957 || reload_secondary_p[reload_order[i]])
4958 && ! reload_optional[reload_order[i]]
4959 && reload_reg_rtx[reload_order[i]] == 0)
4960 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4961#endif
4962
4963 /* First see if this pseudo is already available as reloaded
4964 for a previous insn. We cannot try to inherit for reloads
4965 that are smaller than the maximum number of registers needed
4966 for groups unless the register we would allocate cannot be used
4967 for the groups.
4968
4969 We could check here to see if this is a secondary reload for
4970 an object that is already in a register of the desired class.
4971 This would avoid the need for the secondary reload register.
4972 But this is complex because we can't easily determine what
4973 objects might want to be loaded via this reload. So let a register
4974 be allocated here. In `emit_reload_insns' we suppress one of the
4975 loads in the case described above. */
4976
4977 if (inheritance)
4978 {
4979 register int regno = -1;
db660765 4980 enum machine_mode mode;
32131a9c
RK
4981
4982 if (reload_in[r] == 0)
4983 ;
4984 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
4985 {
4986 regno = REGNO (reload_in[r]);
4987 mode = GET_MODE (reload_in[r]);
4988 }
32131a9c 4989 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
4990 {
4991 regno = REGNO (reload_in_reg[r]);
4992 mode = GET_MODE (reload_in_reg[r]);
4993 }
32131a9c
RK
4994#if 0
4995 /* This won't work, since REGNO can be a pseudo reg number.
4996 Also, it takes much more hair to keep track of all the things
4997 that can invalidate an inherited reload of part of a pseudoreg. */
4998 else if (GET_CODE (reload_in[r]) == SUBREG
4999 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5000 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5001#endif
5002
5003 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5004 {
5005 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5006
5007 if (reg_reloaded_contents[i] == regno
db660765
TW
5008 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5009 >= GET_MODE_SIZE (mode))
32131a9c
RK
5010 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5011 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5012 spill_regs[i])
5013 && (reload_nregs[r] == max_group_size
5014 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5015 spill_regs[i]))
546b63fb
RK
5016 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5017 reload_when_needed[r])
32131a9c 5018 && reload_reg_free_before_p (spill_regs[i],
546b63fb 5019 reload_opnum[r],
32131a9c
RK
5020 reload_when_needed[r]))
5021 {
5022 /* If a group is needed, verify that all the subsequent
5023 registers still have their values intact. */
5024 int nr
5025 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5026 int k;
5027
5028 for (k = 1; k < nr; k++)
5029 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5030 != regno)
5031 break;
5032
5033 if (k == nr)
5034 {
c74fa651
RS
5035 int i1;
5036
5037 /* We found a register that contains the
5038 value we need. If this register is the
5039 same as an `earlyclobber' operand of the
5040 current insn, just mark it as a place to
5041 reload from since we can't use it as the
5042 reload register itself. */
5043
5044 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5045 if (reg_overlap_mentioned_for_reload_p
5046 (reg_last_reload_reg[regno],
5047 reload_earlyclobbers[i1]))
5048 break;
5049
8908158d
RS
5050 if (i1 != n_earlyclobbers
5051 /* Don't really use the inherited spill reg
5052 if we need it wider than we've got it. */
5053 || (GET_MODE_SIZE (reload_mode[r])
5054 > GET_MODE_SIZE (mode)))
c74fa651
RS
5055 reload_override_in[r] = reg_last_reload_reg[regno];
5056 else
5057 {
54c40e68 5058 int k;
c74fa651
RS
5059 /* We can use this as a reload reg. */
5060 /* Mark the register as in use for this part of
5061 the insn. */
5062 mark_reload_reg_in_use (spill_regs[i],
5063 reload_opnum[r],
5064 reload_when_needed[r],
5065 reload_mode[r]);
5066 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5067 reload_inherited[r] = 1;
5068 reload_inheritance_insn[r]
5069 = reg_reloaded_insn[i];
5070 reload_spill_index[r] = i;
54c40e68
RS
5071 for (k = 0; k < nr; k++)
5072 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5073 spill_regs[i + k]);
c74fa651 5074 }
32131a9c
RK
5075 }
5076 }
5077 }
5078 }
5079
5080 /* Here's another way to see if the value is already lying around. */
5081 if (inheritance
5082 && reload_in[r] != 0
5083 && ! reload_inherited[r]
5084 && reload_out[r] == 0
5085 && (CONSTANT_P (reload_in[r])
5086 || GET_CODE (reload_in[r]) == PLUS
5087 || GET_CODE (reload_in[r]) == REG
5088 || GET_CODE (reload_in[r]) == MEM)
5089 && (reload_nregs[r] == max_group_size
5090 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5091 {
5092 register rtx equiv
5093 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 5094 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
5095 int regno;
5096
5097 if (equiv != 0)
5098 {
5099 if (GET_CODE (equiv) == REG)
5100 regno = REGNO (equiv);
5101 else if (GET_CODE (equiv) == SUBREG)
5102 {
f8a9e02b
RK
5103 /* This must be a SUBREG of a hard register.
5104 Make a new REG since this might be used in an
5105 address and not all machines support SUBREGs
5106 there. */
5107 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5108 equiv = gen_rtx (REG, reload_mode[r], regno);
32131a9c
RK
5109 }
5110 else
5111 abort ();
5112 }
5113
5114 /* If we found a spill reg, reject it unless it is free
5115 and of the desired class. */
5116 if (equiv != 0
5117 && ((spill_reg_order[regno] >= 0
546b63fb 5118 && ! reload_reg_free_before_p (regno, reload_opnum[r],
32131a9c
RK
5119 reload_when_needed[r]))
5120 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5121 regno)))
5122 equiv = 0;
5123
5124 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5125 equiv = 0;
5126
5127 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5128 equiv = 0;
5129
5130 /* We found a register that contains the value we need.
5131 If this register is the same as an `earlyclobber' operand
5132 of the current insn, just mark it as a place to reload from
5133 since we can't use it as the reload register itself. */
5134
5135 if (equiv != 0)
5136 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5137 if (reg_overlap_mentioned_for_reload_p (equiv,
5138 reload_earlyclobbers[i]))
32131a9c
RK
5139 {
5140 reload_override_in[r] = equiv;
5141 equiv = 0;
5142 break;
5143 }
5144
5145 /* JRV: If the equiv register we have found is explicitly
5146 clobbered in the current insn, mark but don't use, as above. */
5147
5148 if (equiv != 0 && regno_clobbered_p (regno, insn))
5149 {
5150 reload_override_in[r] = equiv;
5151 equiv = 0;
5152 }
5153
5154 /* If we found an equivalent reg, say no code need be generated
5155 to load it, and use it as our reload reg. */
3ec2ea3e 5156 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
5157 {
5158 reload_reg_rtx[r] = equiv;
5159 reload_inherited[r] = 1;
5160 /* If it is a spill reg,
5161 mark the spill reg as in use for this insn. */
5162 i = spill_reg_order[regno];
5163 if (i >= 0)
be7ae2a4 5164 {
54c40e68
RS
5165 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5166 int k;
be7ae2a4
RK
5167 mark_reload_reg_in_use (regno, reload_opnum[r],
5168 reload_when_needed[r],
5169 reload_mode[r]);
54c40e68
RS
5170 for (k = 0; k < nr; k++)
5171 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
be7ae2a4 5172 }
32131a9c
RK
5173 }
5174 }
5175
5176 /* If we found a register to use already, or if this is an optional
5177 reload, we are done. */
5178 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5179 continue;
5180
5181#if 0 /* No longer needed for correct operation. Might or might not
5182 give better code on the average. Want to experiment? */
5183
5184 /* See if there is a later reload that has a class different from our
5185 class that intersects our class or that requires less register
5186 than our reload. If so, we must allocate a register to this
5187 reload now, since that reload might inherit a previous reload
5188 and take the only available register in our class. Don't do this
5189 for optional reloads since they will force all previous reloads
5190 to be allocated. Also don't do this for reloads that have been
5191 turned off. */
5192
5193 for (i = j + 1; i < n_reloads; i++)
5194 {
5195 int s = reload_order[i];
5196
d45cf215
RS
5197 if ((reload_in[s] == 0 && reload_out[s] == 0
5198 && ! reload_secondary_p[s])
32131a9c
RK
5199 || reload_optional[s])
5200 continue;
5201
5202 if ((reload_reg_class[s] != reload_reg_class[r]
5203 && reg_classes_intersect_p (reload_reg_class[r],
5204 reload_reg_class[s]))
5205 || reload_nregs[s] < reload_nregs[r])
5206 break;
5207 }
5208
5209 if (i == n_reloads)
5210 continue;
5211
5212 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5213#endif
5214 }
5215
5216 /* Now allocate reload registers for anything non-optional that
5217 didn't get one yet. */
5218 for (j = 0; j < n_reloads; j++)
5219 {
5220 register int r = reload_order[j];
5221
5222 /* Ignore reloads that got marked inoperative. */
5223 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5224 continue;
5225
5226 /* Skip reloads that already have a register allocated or are
5227 optional. */
5228 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5229 continue;
5230
5231 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5232 break;
5233 }
5234
5235 /* If that loop got all the way, we have won. */
5236 if (j == n_reloads)
5237 break;
5238
5239 fail:
5240 /* Loop around and try without any inheritance. */
5241 /* First undo everything done by the failed attempt
5242 to allocate with inheritance. */
5243 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5244 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5245 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5246 sizeof reload_inheritance_insn);
5247 bcopy (save_reload_override_in, reload_override_in,
5248 sizeof reload_override_in);
5249 bcopy (save_reload_spill_index, reload_spill_index,
5250 sizeof reload_spill_index);
5251 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5252 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5253 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5254 save_reload_reg_used_in_op_addr);
546b63fb
RK
5255 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5256 save_reload_reg_used_in_insn);
5257 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5258 save_reload_reg_used_in_other_addr);
5259
5260 for (i = 0; i < reload_n_operands; i++)
5261 {
5262 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5263 save_reload_reg_used_in_input[i]);
5264 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5265 save_reload_reg_used_in_output[i]);
5266 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5267 save_reload_reg_used_in_input_addr[i]);
5268 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5269 save_reload_reg_used_in_output_addr[i]);
5270 }
32131a9c
RK
5271 }
5272
5273 /* If we thought we could inherit a reload, because it seemed that
5274 nothing else wanted the same reload register earlier in the insn,
5275 verify that assumption, now that all reloads have been assigned. */
5276
5277 for (j = 0; j < n_reloads; j++)
5278 {
5279 register int r = reload_order[j];
5280
5281 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5282 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
546b63fb 5283 reload_opnum[r],
32131a9c
RK
5284 reload_when_needed[r]))
5285 reload_inherited[r] = 0;
5286
5287 /* If we found a better place to reload from,
5288 validate it in the same fashion, if it is a reload reg. */
5289 if (reload_override_in[r]
5290 && (GET_CODE (reload_override_in[r]) == REG
5291 || GET_CODE (reload_override_in[r]) == SUBREG))
5292 {
5293 int regno = true_regnum (reload_override_in[r]);
5294 if (spill_reg_order[regno] >= 0
546b63fb
RK
5295 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5296 reload_when_needed[r]))
32131a9c
RK
5297 reload_override_in[r] = 0;
5298 }
5299 }
5300
5301 /* Now that reload_override_in is known valid,
5302 actually override reload_in. */
5303 for (j = 0; j < n_reloads; j++)
5304 if (reload_override_in[j])
5305 reload_in[j] = reload_override_in[j];
5306
5307 /* If this reload won't be done because it has been cancelled or is
5308 optional and not inherited, clear reload_reg_rtx so other
5309 routines (such as subst_reloads) don't get confused. */
5310 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
5311 if (reload_reg_rtx[j] != 0
5312 && ((reload_optional[j] && ! reload_inherited[j])
5313 || (reload_in[j] == 0 && reload_out[j] == 0
5314 && ! reload_secondary_p[j])))
5315 {
5316 int regno = true_regnum (reload_reg_rtx[j]);
5317
5318 if (spill_reg_order[regno] >= 0)
5319 clear_reload_reg_in_use (regno, reload_opnum[j],
5320 reload_when_needed[j], reload_mode[j]);
5321 reload_reg_rtx[j] = 0;
5322 }
32131a9c
RK
5323
5324 /* Record which pseudos and which spill regs have output reloads. */
5325 for (j = 0; j < n_reloads; j++)
5326 {
5327 register int r = reload_order[j];
5328
5329 i = reload_spill_index[r];
5330
5331 /* I is nonneg if this reload used one of the spill regs.
5332 If reload_reg_rtx[r] is 0, this is an optional reload
5333 that we opted to ignore. */
5334 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5335 && reload_reg_rtx[r] != 0)
5336 {
5337 register int nregno = REGNO (reload_out[r]);
372e033b
RS
5338 int nr = 1;
5339
5340 if (nregno < FIRST_PSEUDO_REGISTER)
5341 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
5342
5343 while (--nr >= 0)
372e033b
RS
5344 reg_has_output_reload[nregno + nr] = 1;
5345
5346 if (i >= 0)
32131a9c 5347 {
372e033b
RS
5348 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5349 while (--nr >= 0)
32131a9c
RK
5350 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5351 }
5352
5353 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
5354 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5355 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
5356 abort ();
5357 }
5358 }
5359}
5360\f
546b63fb
RK
5361/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5362 reloads of the same item for fear that we might not have enough reload
5363 registers. However, normally they will get the same reload register
5364 and hence actually need not be loaded twice.
5365
5366 Here we check for the most common case of this phenomenon: when we have
5367 a number of reloads for the same object, each of which were allocated
5368 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5369 reload, and is not modified in the insn itself. If we find such,
5370 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5371 This will not increase the number of spill registers needed and will
5372 prevent redundant code. */
5373
5374#ifdef SMALL_REGISTER_CLASSES
5375
5376static void
5377merge_assigned_reloads (insn)
5378 rtx insn;
5379{
5380 int i, j;
5381
5382 /* Scan all the reloads looking for ones that only load values and
5383 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5384 assigned and not modified by INSN. */
5385
5386 for (i = 0; i < n_reloads; i++)
5387 {
5388 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5389 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5390 || reg_set_p (reload_reg_rtx[i], insn))
5391 continue;
5392
5393 /* Look at all other reloads. Ensure that the only use of this
5394 reload_reg_rtx is in a reload that just loads the same value
5395 as we do. Note that any secondary reloads must be of the identical
5396 class since the values, modes, and result registers are the
5397 same, so we need not do anything with any secondary reloads. */
5398
5399 for (j = 0; j < n_reloads; j++)
5400 {
5401 if (i == j || reload_reg_rtx[j] == 0
5402 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5403 reload_reg_rtx[i]))
5404 continue;
5405
5406 /* If the reload regs aren't exactly the same (e.g, different modes)
5407 or if the values are different, we can't merge anything with this
5408 reload register. */
5409
5410 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5411 || reload_out[j] != 0 || reload_in[j] == 0
5412 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5413 break;
5414 }
5415
5416 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5417 we, in fact, found any matching reloads. */
5418
5419 if (j == n_reloads)
5420 {
5421 for (j = 0; j < n_reloads; j++)
5422 if (i != j && reload_reg_rtx[j] != 0
5423 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5424 {
5425 reload_when_needed[i] = RELOAD_OTHER;
5426 reload_in[j] = 0;
5427 transfer_replacements (i, j);
5428 }
5429
5430 /* If this is now RELOAD_OTHER, look for any reloads that load
5431 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5432 if they were for inputs, RELOAD_OTHER for outputs. Note that
5433 this test is equivalent to looking for reloads for this operand
5434 number. */
5435
5436 if (reload_when_needed[i] == RELOAD_OTHER)
5437 for (j = 0; j < n_reloads; j++)
5438 if (reload_in[j] != 0
5439 && reload_when_needed[i] != RELOAD_OTHER
5440 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5441 reload_in[i]))
5442 reload_when_needed[j]
5443 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5444 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5445 }
5446 }
5447}
5448#endif /* SMALL_RELOAD_CLASSES */
5449\f
32131a9c
RK
5450/* Output insns to reload values in and out of the chosen reload regs. */
5451
5452static void
5453emit_reload_insns (insn)
5454 rtx insn;
5455{
5456 register int j;
546b63fb
RK
5457 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5458 rtx other_input_address_reload_insns = 0;
5459 rtx other_input_reload_insns = 0;
5460 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5461 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5462 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5463 rtx operand_reload_insns = 0;
32131a9c 5464 rtx following_insn = NEXT_INSN (insn);
a8efe40d 5465 rtx before_insn = insn;
32131a9c
RK
5466 int special;
5467 /* Values to be put in spill_reg_store are put here first. */
5468 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5469
546b63fb
RK
5470 for (j = 0; j < reload_n_operands; j++)
5471 input_reload_insns[j] = input_address_reload_insns[j]
5472 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5473
32131a9c
RK
5474 /* Now output the instructions to copy the data into and out of the
5475 reload registers. Do these in the order that the reloads were reported,
5476 since reloads of base and index registers precede reloads of operands
5477 and the operands may need the base and index registers reloaded. */
5478
5479 for (j = 0; j < n_reloads; j++)
5480 {
5481 register rtx old;
5482 rtx oldequiv_reg = 0;
32131a9c
RK
5483 rtx store_insn = 0;
5484
5485 old = reload_in[j];
5486 if (old != 0 && ! reload_inherited[j]
5487 && ! rtx_equal_p (reload_reg_rtx[j], old)
5488 && reload_reg_rtx[j] != 0)
5489 {
5490 register rtx reloadreg = reload_reg_rtx[j];
5491 rtx oldequiv = 0;
5492 enum machine_mode mode;
546b63fb 5493 rtx *where;
32131a9c
RK
5494
5495 /* Determine the mode to reload in.
5496 This is very tricky because we have three to choose from.
5497 There is the mode the insn operand wants (reload_inmode[J]).
5498 There is the mode of the reload register RELOADREG.
5499 There is the intrinsic mode of the operand, which we could find
5500 by stripping some SUBREGs.
5501 It turns out that RELOADREG's mode is irrelevant:
5502 we can change that arbitrarily.
5503
5504 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5505 then the reload reg may not support QImode moves, so use SImode.
5506 If foo is in memory due to spilling a pseudo reg, this is safe,
5507 because the QImode value is in the least significant part of a
5508 slot big enough for a SImode. If foo is some other sort of
5509 memory reference, then it is impossible to reload this case,
5510 so previous passes had better make sure this never happens.
5511
5512 Then consider a one-word union which has SImode and one of its
5513 members is a float, being fetched as (SUBREG:SF union:SI).
5514 We must fetch that as SFmode because we could be loading into
5515 a float-only register. In this case OLD's mode is correct.
5516
5517 Consider an immediate integer: it has VOIDmode. Here we need
5518 to get a mode from something else.
5519
5520 In some cases, there is a fourth mode, the operand's
5521 containing mode. If the insn specifies a containing mode for
5522 this operand, it overrides all others.
5523
5524 I am not sure whether the algorithm here is always right,
5525 but it does the right things in those cases. */
5526
5527 mode = GET_MODE (old);
5528 if (mode == VOIDmode)
5529 mode = reload_inmode[j];
32131a9c
RK
5530
5531#ifdef SECONDARY_INPUT_RELOAD_CLASS
5532 /* If we need a secondary register for this operation, see if
5533 the value is already in a register in that class. Don't
5534 do this if the secondary register will be used as a scratch
5535 register. */
5536
b80bba27
RK
5537 if (reload_secondary_in_reload[j] >= 0
5538 && reload_secondary_in_icode[j] == CODE_FOR_nothing
58b1581b 5539 && optimize)
32131a9c
RK
5540 oldequiv
5541 = find_equiv_reg (old, insn,
b80bba27 5542 reload_reg_class[reload_secondary_in_reload[j]],
fb3821f7 5543 -1, NULL_PTR, 0, mode);
32131a9c
RK
5544#endif
5545
5546 /* If reloading from memory, see if there is a register
5547 that already holds the same value. If so, reload from there.
5548 We can pass 0 as the reload_reg_p argument because
5549 any other reload has either already been emitted,
5550 in which case find_equiv_reg will see the reload-insn,
5551 or has yet to be emitted, in which case it doesn't matter
5552 because we will use this equiv reg right away. */
5553
58b1581b 5554 if (oldequiv == 0 && optimize
32131a9c
RK
5555 && (GET_CODE (old) == MEM
5556 || (GET_CODE (old) == REG
5557 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5558 && reg_renumber[REGNO (old)] < 0)))
546b63fb 5559 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 5560 -1, NULL_PTR, 0, mode);
32131a9c
RK
5561
5562 if (oldequiv)
5563 {
5564 int regno = true_regnum (oldequiv);
5565
5566 /* If OLDEQUIV is a spill register, don't use it for this
5567 if any other reload needs it at an earlier stage of this insn
a8fdc208 5568 or at this stage. */
32131a9c 5569 if (spill_reg_order[regno] >= 0
546b63fb
RK
5570 && (! reload_reg_free_p (regno, reload_opnum[j],
5571 reload_when_needed[j])
5572 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
5573 reload_when_needed[j])))
5574 oldequiv = 0;
5575
5576 /* If OLDEQUIV is not a spill register,
5577 don't use it if any other reload wants it. */
5578 if (spill_reg_order[regno] < 0)
5579 {
5580 int k;
5581 for (k = 0; k < n_reloads; k++)
5582 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
5583 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5584 oldequiv))
32131a9c
RK
5585 {
5586 oldequiv = 0;
5587 break;
5588 }
5589 }
546b63fb
RK
5590
5591 /* If it is no cheaper to copy from OLDEQUIV into the
5592 reload register than it would be to move from memory,
5593 don't use it. Likewise, if we need a secondary register
5594 or memory. */
5595
5596 if (oldequiv != 0
5597 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5598 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5599 reload_reg_class[j])
5600 >= MEMORY_MOVE_COST (mode)))
5601#ifdef SECONDARY_INPUT_RELOAD_CLASS
5602 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5603 mode, oldequiv)
5604 != NO_REGS)
5605#endif
5606#ifdef SECONDARY_MEMORY_NEEDED
5607 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5608 REGNO_REG_CLASS (regno),
5609 mode)
5610#endif
5611 ))
5612 oldequiv = 0;
32131a9c
RK
5613 }
5614
5615 if (oldequiv == 0)
5616 oldequiv = old;
5617 else if (GET_CODE (oldequiv) == REG)
5618 oldequiv_reg = oldequiv;
5619 else if (GET_CODE (oldequiv) == SUBREG)
5620 oldequiv_reg = SUBREG_REG (oldequiv);
5621
76182796
RK
5622 /* If we are reloading from a register that was recently stored in
5623 with an output-reload, see if we can prove there was
5624 actually no need to store the old value in it. */
5625
5626 if (optimize && GET_CODE (oldequiv) == REG
5627 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5628 && spill_reg_order[REGNO (oldequiv)] >= 0
5629 && spill_reg_store[reload_spill_index[REGNO (oldequiv)]] != 0
8aea655f 5630 && find_reg_note (insn, REG_DEAD, reload_in[j])
76182796 5631 /* This is unsafe if operand occurs more than once in current
b87b7ecd
RK
5632 insn. Perhaps some occurrences weren't reloaded. */
5633 && count_occurrences (PATTERN (insn), reload_in[j]) == 1
5634 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0)
76182796
RK
5635 delete_output_reload
5636 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5637
32131a9c 5638 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
5639 then load RELOADREG from OLDEQUIV. Note that we cannot use
5640 gen_lowpart_common since it can do the wrong thing when
5641 RELOADREG has a multi-word mode. Note that RELOADREG
5642 must always be a REG here. */
32131a9c
RK
5643
5644 if (GET_MODE (reloadreg) != mode)
3abe6f90 5645 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
5646 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5647 oldequiv = SUBREG_REG (oldequiv);
5648 if (GET_MODE (oldequiv) != VOIDmode
5649 && mode != GET_MODE (oldequiv))
5650 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5651
546b63fb 5652 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
5653 switch (reload_when_needed[j])
5654 {
32131a9c 5655 case RELOAD_OTHER:
546b63fb
RK
5656 where = &other_input_reload_insns;
5657 break;
5658 case RELOAD_FOR_INPUT:
5659 where = &input_reload_insns[reload_opnum[j]];
32131a9c 5660 break;
546b63fb
RK
5661 case RELOAD_FOR_INPUT_ADDRESS:
5662 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 5663 break;
546b63fb
RK
5664 case RELOAD_FOR_OUTPUT_ADDRESS:
5665 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c
RK
5666 break;
5667 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5668 where = &operand_reload_insns;
5669 break;
5670 case RELOAD_FOR_OTHER_ADDRESS:
5671 where = &other_input_address_reload_insns;
5672 break;
5673 default:
5674 abort ();
32131a9c
RK
5675 }
5676
546b63fb 5677 push_to_sequence (*where);
32131a9c
RK
5678 special = 0;
5679
5680 /* Auto-increment addresses must be reloaded in a special way. */
5681 if (GET_CODE (oldequiv) == POST_INC
5682 || GET_CODE (oldequiv) == POST_DEC
5683 || GET_CODE (oldequiv) == PRE_INC
5684 || GET_CODE (oldequiv) == PRE_DEC)
5685 {
5686 /* We are not going to bother supporting the case where a
5687 incremented register can't be copied directly from
5688 OLDEQUIV since this seems highly unlikely. */
b80bba27 5689 if (reload_secondary_in_reload[j] >= 0)
32131a9c
RK
5690 abort ();
5691 /* Prevent normal processing of this reload. */
5692 special = 1;
5693 /* Output a special code sequence for this case. */
546b63fb 5694 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
5695 }
5696
5697 /* If we are reloading a pseudo-register that was set by the previous
5698 insn, see if we can get rid of that pseudo-register entirely
5699 by redirecting the previous insn into our reload register. */
5700
5701 else if (optimize && GET_CODE (old) == REG
5702 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5703 && dead_or_set_p (insn, old)
5704 /* This is unsafe if some other reload
5705 uses the same reg first. */
546b63fb
RK
5706 && reload_reg_free_before_p (REGNO (reloadreg),
5707 reload_opnum[j],
5708 reload_when_needed[j]))
32131a9c
RK
5709 {
5710 rtx temp = PREV_INSN (insn);
5711 while (temp && GET_CODE (temp) == NOTE)
5712 temp = PREV_INSN (temp);
5713 if (temp
5714 && GET_CODE (temp) == INSN
5715 && GET_CODE (PATTERN (temp)) == SET
5716 && SET_DEST (PATTERN (temp)) == old
5717 /* Make sure we can access insn_operand_constraint. */
5718 && asm_noperands (PATTERN (temp)) < 0
5719 /* This is unsafe if prev insn rejects our reload reg. */
5720 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5721 reloadreg)
5722 /* This is unsafe if operand occurs more than once in current
5723 insn. Perhaps some occurrences aren't reloaded. */
5724 && count_occurrences (PATTERN (insn), old) == 1
5725 /* Don't risk splitting a matching pair of operands. */
5726 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5727 {
5728 /* Store into the reload register instead of the pseudo. */
5729 SET_DEST (PATTERN (temp)) = reloadreg;
5730 /* If these are the only uses of the pseudo reg,
5731 pretend for GDB it lives in the reload reg we used. */
5732 if (reg_n_deaths[REGNO (old)] == 1
5733 && reg_n_sets[REGNO (old)] == 1)
5734 {
5735 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5736 alter_reg (REGNO (old), -1);
5737 }
5738 special = 1;
5739 }
5740 }
5741
546b63fb
RK
5742 /* We can't do that, so output an insn to load RELOADREG. */
5743
32131a9c
RK
5744 if (! special)
5745 {
5746#ifdef SECONDARY_INPUT_RELOAD_CLASS
5747 rtx second_reload_reg = 0;
5748 enum insn_code icode;
5749
5750 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
5751 and icode, if any. If OLDEQUIV and OLD are different or
5752 if this is an in-out reload, recompute whether or not we
5753 still need a secondary register and what the icode should
5754 be. If we still need a secondary register and the class or
5755 icode is different, go back to reloading from OLD if using
5756 OLDEQUIV means that we got the wrong type of register. We
5757 cannot have different class or icode due to an in-out reload
5758 because we don't make such reloads when both the input and
5759 output need secondary reload registers. */
32131a9c 5760
b80bba27 5761 if (reload_secondary_in_reload[j] >= 0)
32131a9c 5762 {
b80bba27 5763 int secondary_reload = reload_secondary_in_reload[j];
1554c2c6
RK
5764 rtx real_oldequiv = oldequiv;
5765 rtx real_old = old;
5766
5767 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5768 and similarly for OLD.
b80bba27 5769 See comments in get_secondary_reload in reload.c. */
1554c2c6
RK
5770 if (GET_CODE (oldequiv) == REG
5771 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5772 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5773 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5774
5775 if (GET_CODE (old) == REG
5776 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5777 && reg_equiv_mem[REGNO (old)] != 0)
5778 real_old = reg_equiv_mem[REGNO (old)];
5779
32131a9c 5780 second_reload_reg = reload_reg_rtx[secondary_reload];
b80bba27 5781 icode = reload_secondary_in_icode[j];
32131a9c 5782
d445b551
RK
5783 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5784 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
5785 {
5786 enum reg_class new_class
5787 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 5788 mode, real_oldequiv);
32131a9c
RK
5789
5790 if (new_class == NO_REGS)
5791 second_reload_reg = 0;
5792 else
5793 {
5794 enum insn_code new_icode;
5795 enum machine_mode new_mode;
5796
5797 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5798 REGNO (second_reload_reg)))
1554c2c6 5799 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5800 else
5801 {
5802 new_icode = reload_in_optab[(int) mode];
5803 if (new_icode != CODE_FOR_nothing
5804 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 5805 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 5806 (reloadreg, mode)))
a8fdc208
RS
5807 || (insn_operand_predicate[(int) new_icode][1]
5808 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 5809 (real_oldequiv, mode)))))
32131a9c
RK
5810 new_icode = CODE_FOR_nothing;
5811
5812 if (new_icode == CODE_FOR_nothing)
5813 new_mode = mode;
5814 else
196ddf8a 5815 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
5816
5817 if (GET_MODE (second_reload_reg) != new_mode)
5818 {
5819 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5820 new_mode))
1554c2c6 5821 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5822 else
5823 second_reload_reg
3aaa90c7
MM
5824 = gen_rtx (REG, new_mode,
5825 REGNO (second_reload_reg));
32131a9c
RK
5826 }
5827 }
5828 }
5829 }
5830
5831 /* If we still need a secondary reload register, check
5832 to see if it is being used as a scratch or intermediate
1554c2c6
RK
5833 register and generate code appropriately. If we need
5834 a scratch register, use REAL_OLDEQUIV since the form of
5835 the insn may depend on the actual address if it is
5836 a MEM. */
32131a9c
RK
5837
5838 if (second_reload_reg)
5839 {
5840 if (icode != CODE_FOR_nothing)
5841 {
546b63fb
RK
5842 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5843 second_reload_reg));
32131a9c
RK
5844 special = 1;
5845 }
5846 else
5847 {
5848 /* See if we need a scratch register to load the
5849 intermediate register (a tertiary reload). */
5850 enum insn_code tertiary_icode
b80bba27 5851 = reload_secondary_in_icode[secondary_reload];
32131a9c
RK
5852
5853 if (tertiary_icode != CODE_FOR_nothing)
5854 {
5855 rtx third_reload_reg
b80bba27 5856 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
32131a9c 5857
546b63fb
RK
5858 emit_insn ((GEN_FCN (tertiary_icode)
5859 (second_reload_reg, real_oldequiv,
5860 third_reload_reg)));
32131a9c
RK
5861 }
5862 else
546b63fb
RK
5863 gen_input_reload (second_reload_reg, oldequiv,
5864 reload_opnum[j],
5865 reload_when_needed[j]);
5866
5867 oldequiv = second_reload_reg;
32131a9c
RK
5868 }
5869 }
5870 }
5871#endif
5872
2d182c6f 5873 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
546b63fb
RK
5874 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5875 reload_when_needed[j]);
32131a9c
RK
5876
5877#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5878 /* We may have to make a REG_DEAD note for the secondary reload
5879 register in the insns we just made. Find the last insn that
5880 mentioned the register. */
5881 if (! special && second_reload_reg
5882 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5883 {
5884 rtx prev;
5885
546b63fb 5886 for (prev = get_last_insn (); prev;
32131a9c
RK
5887 prev = PREV_INSN (prev))
5888 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5889 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5890 PATTERN (prev)))
32131a9c
RK
5891 {
5892 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5893 second_reload_reg,
5894 REG_NOTES (prev));
5895 break;
5896 }
5897 }
5898#endif
5899 }
5900
546b63fb
RK
5901 /* End this sequence. */
5902 *where = get_insns ();
5903 end_sequence ();
32131a9c
RK
5904 }
5905
5906 /* Add a note saying the input reload reg
5907 dies in this insn, if anyone cares. */
5908#ifdef PRESERVE_DEATH_INFO_REGNO_P
5909 if (old != 0
5910 && reload_reg_rtx[j] != old
5911 && reload_reg_rtx[j] != 0
5912 && reload_out[j] == 0
5913 && ! reload_inherited[j]
5914 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5915 {
5916 register rtx reloadreg = reload_reg_rtx[j];
5917
a8fdc208 5918#if 0
32131a9c
RK
5919 /* We can't abort here because we need to support this for sched.c.
5920 It's not terrible to miss a REG_DEAD note, but we should try
5921 to figure out how to do this correctly. */
5922 /* The code below is incorrect for address-only reloads. */
5923 if (reload_when_needed[j] != RELOAD_OTHER
5924 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5925 abort ();
5926#endif
5927
5928 /* Add a death note to this insn, for an input reload. */
5929
5930 if ((reload_when_needed[j] == RELOAD_OTHER
5931 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5932 && ! dead_or_set_p (insn, reloadreg))
5933 REG_NOTES (insn)
5934 = gen_rtx (EXPR_LIST, REG_DEAD,
5935 reloadreg, REG_NOTES (insn));
5936 }
5937
5938 /* When we inherit a reload, the last marked death of the reload reg
5939 may no longer really be a death. */
5940 if (reload_reg_rtx[j] != 0
5941 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5942 && reload_inherited[j])
5943 {
5944 /* Handle inheriting an output reload.
5945 Remove the death note from the output reload insn. */
5946 if (reload_spill_index[j] >= 0
5947 && GET_CODE (reload_in[j]) == REG
5948 && spill_reg_store[reload_spill_index[j]] != 0
5949 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5950 REG_DEAD, REGNO (reload_reg_rtx[j])))
5951 remove_death (REGNO (reload_reg_rtx[j]),
5952 spill_reg_store[reload_spill_index[j]]);
5953 /* Likewise for input reloads that were inherited. */
5954 else if (reload_spill_index[j] >= 0
5955 && GET_CODE (reload_in[j]) == REG
5956 && spill_reg_store[reload_spill_index[j]] == 0
5957 && reload_inheritance_insn[j] != 0
a8fdc208 5958 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5959 REGNO (reload_reg_rtx[j])))
5960 remove_death (REGNO (reload_reg_rtx[j]),
5961 reload_inheritance_insn[j]);
5962 else
5963 {
5964 rtx prev;
5965
5966 /* We got this register from find_equiv_reg.
5967 Search back for its last death note and get rid of it.
5968 But don't search back too far.
5969 Don't go past a place where this reg is set,
5970 since a death note before that remains valid. */
5971 for (prev = PREV_INSN (insn);
5972 prev && GET_CODE (prev) != CODE_LABEL;
5973 prev = PREV_INSN (prev))
5974 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5975 && dead_or_set_p (prev, reload_reg_rtx[j]))
5976 {
5977 if (find_regno_note (prev, REG_DEAD,
5978 REGNO (reload_reg_rtx[j])))
5979 remove_death (REGNO (reload_reg_rtx[j]), prev);
5980 break;
5981 }
5982 }
5983 }
5984
5985 /* We might have used find_equiv_reg above to choose an alternate
5986 place from which to reload. If so, and it died, we need to remove
5987 that death and move it to one of the insns we just made. */
5988
5989 if (oldequiv_reg != 0
5990 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5991 {
5992 rtx prev, prev1;
5993
5994 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5995 prev = PREV_INSN (prev))
5996 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5997 && dead_or_set_p (prev, oldequiv_reg))
5998 {
5999 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6000 {
6001 for (prev1 = this_reload_insn;
6002 prev1; prev1 = PREV_INSN (prev1))
6003 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
6004 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6005 PATTERN (prev1)))
32131a9c
RK
6006 {
6007 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6008 oldequiv_reg,
6009 REG_NOTES (prev1));
6010 break;
6011 }
6012 remove_death (REGNO (oldequiv_reg), prev);
6013 }
6014 break;
6015 }
6016 }
6017#endif
6018
6019 /* If we are reloading a register that was recently stored in with an
6020 output-reload, see if we can prove there was
6021 actually no need to store the old value in it. */
6022
6023 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 6024 && reload_in[j] != 0
32131a9c
RK
6025 && GET_CODE (reload_in[j]) == REG
6026#if 0
6027 /* There doesn't seem to be any reason to restrict this to pseudos
6028 and doing so loses in the case where we are copying from a
6029 register of the wrong class. */
6030 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6031#endif
6032 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb
RK
6033 /* This is unsafe if some other reload uses the same reg first. */
6034 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6035 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
6036 && dead_or_set_p (insn, reload_in[j])
6037 /* This is unsafe if operand occurs more than once in current
6038 insn. Perhaps some occurrences weren't reloaded. */
6039 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6040 delete_output_reload (insn, j,
6041 spill_reg_store[reload_spill_index[j]]);
6042
6043 /* Input-reloading is done. Now do output-reloading,
6044 storing the value from the reload-register after the main insn
6045 if reload_out[j] is nonzero.
6046
6047 ??? At some point we need to support handling output reloads of
6048 JUMP_INSNs or insns that set cc0. */
6049 old = reload_out[j];
6050 if (old != 0
6051 && reload_reg_rtx[j] != old
6052 && reload_reg_rtx[j] != 0)
6053 {
6054 register rtx reloadreg = reload_reg_rtx[j];
6055 register rtx second_reloadreg = 0;
32131a9c
RK
6056 rtx note, p;
6057 enum machine_mode mode;
6058 int special = 0;
6059
6060 /* An output operand that dies right away does need a reload,
6061 but need not be copied from it. Show the new location in the
6062 REG_UNUSED note. */
6063 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6064 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6065 {
6066 XEXP (note, 0) = reload_reg_rtx[j];
6067 continue;
6068 }
6069 else if (GET_CODE (old) == SCRATCH)
6070 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6071 but we don't want to make an output reload. */
6072 continue;
6073
6074#if 0
6075 /* Strip off of OLD any size-increasing SUBREGs such as
6076 (SUBREG:SI foo:QI 0). */
6077
6078 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6079 && (GET_MODE_SIZE (GET_MODE (old))
6080 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6081 old = SUBREG_REG (old);
6082#endif
6083
6084 /* If is a JUMP_INSN, we can't support output reloads yet. */
6085 if (GET_CODE (insn) == JUMP_INSN)
6086 abort ();
6087
546b63fb
RK
6088 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6089
32131a9c
RK
6090 /* Determine the mode to reload in.
6091 See comments above (for input reloading). */
6092
6093 mode = GET_MODE (old);
6094 if (mode == VOIDmode)
79a365a7
RS
6095 {
6096 /* VOIDmode should never happen for an output. */
6097 if (asm_noperands (PATTERN (insn)) < 0)
6098 /* It's the compiler's fault. */
6099 abort ();
6100 error_for_asm (insn, "output operand is constant in `asm'");
6101 /* Prevent crash--use something we know is valid. */
6102 mode = word_mode;
6103 old = gen_rtx (REG, mode, REGNO (reloadreg));
6104 }
32131a9c 6105
32131a9c 6106 if (GET_MODE (reloadreg) != mode)
3abe6f90 6107 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
6108
6109#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6110
6111 /* If we need two reload regs, set RELOADREG to the intermediate
6112 one, since it will be stored into OUT. We might need a secondary
6113 register only for an input reload, so check again here. */
6114
b80bba27 6115 if (reload_secondary_out_reload[j] >= 0)
32131a9c 6116 {
1554c2c6 6117 rtx real_old = old;
32131a9c 6118
1554c2c6
RK
6119 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6120 && reg_equiv_mem[REGNO (old)] != 0)
6121 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6122
1554c2c6
RK
6123 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6124 mode, real_old)
6125 != NO_REGS))
6126 {
6127 second_reloadreg = reloadreg;
b80bba27 6128 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
32131a9c 6129
1554c2c6
RK
6130 /* See if RELOADREG is to be used as a scratch register
6131 or as an intermediate register. */
b80bba27 6132 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
32131a9c 6133 {
b80bba27 6134 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
546b63fb 6135 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6136 special = 1;
32131a9c
RK
6137 }
6138 else
1554c2c6
RK
6139 {
6140 /* See if we need both a scratch and intermediate reload
6141 register. */
b80bba27 6142 int secondary_reload = reload_secondary_out_reload[j];
1554c2c6 6143 enum insn_code tertiary_icode
b80bba27 6144 = reload_secondary_out_icode[secondary_reload];
1554c2c6 6145 rtx pat;
32131a9c 6146
1554c2c6
RK
6147 if (GET_MODE (reloadreg) != mode)
6148 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6149
6150 if (tertiary_icode != CODE_FOR_nothing)
6151 {
6152 rtx third_reloadreg
b80bba27 6153 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
1554c2c6
RK
6154 pat = (GEN_FCN (tertiary_icode)
6155 (reloadreg, second_reloadreg, third_reloadreg));
6156 }
9ad5f9f6
JW
6157#ifdef SECONDARY_MEMORY_NEEDED
6158 /* If we need a memory location to do the move, do it that way. */
6159 else if (GET_CODE (reloadreg) == REG
6160 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6161 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6162 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6163 GET_MODE (second_reloadreg)))
6164 {
6165 /* Get the memory to use and rewrite both registers
6166 to its mode. */
546b63fb
RK
6167 rtx loc
6168 = get_secondary_mem (reloadreg,
6169 GET_MODE (second_reloadreg),
6170 reload_opnum[j],
6171 reload_when_needed[j]);
9ad5f9f6
JW
6172 rtx tmp_reloadreg;
6173
6174 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6175 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6176 REGNO (second_reloadreg));
6177
6178 if (GET_MODE (loc) != GET_MODE (reloadreg))
6179 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6180 REGNO (reloadreg));
6181 else
6182 tmp_reloadreg = reloadreg;
6183
546b63fb 6184 emit_move_insn (loc, second_reloadreg);
9ad5f9f6
JW
6185 pat = gen_move_insn (tmp_reloadreg, loc);
6186 }
6187#endif
1554c2c6
RK
6188 else
6189 pat = gen_move_insn (reloadreg, second_reloadreg);
6190
546b63fb 6191 emit_insn (pat);
1554c2c6 6192 }
32131a9c
RK
6193 }
6194 }
6195#endif
6196
6197 /* Output the last reload insn. */
6198 if (! special)
0dadecf6
RK
6199 {
6200#ifdef SECONDARY_MEMORY_NEEDED
6201 /* If we need a memory location to do the move, do it that way. */
6202 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6203 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6204 REGNO_REG_CLASS (REGNO (reloadreg)),
6205 GET_MODE (reloadreg)))
6206 {
6207 /* Get the memory to use and rewrite both registers to
6208 its mode. */
546b63fb
RK
6209 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6210 reload_opnum[j],
6211 reload_when_needed[j]);
0dadecf6
RK
6212
6213 if (GET_MODE (loc) != GET_MODE (reloadreg))
6214 reloadreg = gen_rtx (REG, GET_MODE (loc),
6215 REGNO (reloadreg));
6216
6217 if (GET_MODE (loc) != GET_MODE (old))
6218 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6219
546b63fb
RK
6220 emit_insn (gen_move_insn (loc, reloadreg));
6221 emit_insn (gen_move_insn (old, loc));
0dadecf6
RK
6222 }
6223 else
6224#endif
546b63fb 6225 emit_insn (gen_move_insn (old, reloadreg));
0dadecf6 6226 }
32131a9c
RK
6227
6228#ifdef PRESERVE_DEATH_INFO_REGNO_P
6229 /* If final will look at death notes for this reg,
6230 put one on the last output-reload insn to use it. Similarly
6231 for any secondary register. */
6232 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 6233 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6234 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6235 && reg_overlap_mentioned_for_reload_p (reloadreg,
6236 PATTERN (p)))
32131a9c
RK
6237 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6238 reloadreg, REG_NOTES (p));
6239
6240#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6241 if (! special
6242 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 6243 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6244 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6245 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6246 PATTERN (p)))
32131a9c
RK
6247 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6248 second_reloadreg, REG_NOTES (p));
6249#endif
6250#endif
6251 /* Look at all insns we emitted, just to be safe. */
546b63fb 6252 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
6253 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6254 {
6255 /* If this output reload doesn't come from a spill reg,
6256 clear any memory of reloaded copies of the pseudo reg.
6257 If this output reload comes from a spill reg,
6258 reg_has_output_reload will make this do nothing. */
6259 note_stores (PATTERN (p), forget_old_reloads_1);
6260
6261 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6262 store_insn = p;
6263 }
6264
546b63fb
RK
6265 output_reload_insns[reload_opnum[j]] = get_insns ();
6266 end_sequence ();
6267
32131a9c
RK
6268 }
6269
6270 if (reload_spill_index[j] >= 0)
6271 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6272 }
6273
546b63fb
RK
6274 /* Now write all the insns we made for reloads in the order expected by
6275 the allocation functions. Prior to the insn being reloaded, we write
6276 the following reloads:
6277
6278 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6279
6280 RELOAD_OTHER reloads.
6281
6282 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6283 the RELOAD_FOR_INPUT reload for the operand.
6284
6285 RELOAD_FOR_OPERAND_ADDRESS reloads.
6286
6287 After the insn being reloaded, we write the following:
6288
6289 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6290 the RELOAD_FOR_OUTPUT reload for that operand. */
6291
6292 emit_insns_before (other_input_address_reload_insns, before_insn);
6293 emit_insns_before (other_input_reload_insns, before_insn);
6294
6295 for (j = 0; j < reload_n_operands; j++)
6296 {
6297 emit_insns_before (input_address_reload_insns[j], before_insn);
6298 emit_insns_before (input_reload_insns[j], before_insn);
6299 }
6300
6301 emit_insns_before (operand_reload_insns, before_insn);
6302
6303 for (j = 0; j < reload_n_operands; j++)
6304 {
6305 emit_insns_before (output_address_reload_insns[j], following_insn);
6306 emit_insns_before (output_reload_insns[j], following_insn);
6307 }
6308
32131a9c
RK
6309 /* Move death notes from INSN
6310 to output-operand-address and output reload insns. */
6311#ifdef PRESERVE_DEATH_INFO_REGNO_P
6312 {
6313 rtx insn1;
6314 /* Loop over those insns, last ones first. */
6315 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6316 insn1 = PREV_INSN (insn1))
6317 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6318 {
6319 rtx source = SET_SRC (PATTERN (insn1));
6320 rtx dest = SET_DEST (PATTERN (insn1));
6321
6322 /* The note we will examine next. */
6323 rtx reg_notes = REG_NOTES (insn);
6324 /* The place that pointed to this note. */
6325 rtx *prev_reg_note = &REG_NOTES (insn);
6326
6327 /* If the note is for something used in the source of this
6328 reload insn, or in the output address, move the note. */
6329 while (reg_notes)
6330 {
6331 rtx next_reg_notes = XEXP (reg_notes, 1);
6332 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6333 && GET_CODE (XEXP (reg_notes, 0)) == REG
6334 && ((GET_CODE (dest) != REG
bfa30b22
RK
6335 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6336 dest))
6337 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6338 source)))
32131a9c
RK
6339 {
6340 *prev_reg_note = next_reg_notes;
6341 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6342 REG_NOTES (insn1) = reg_notes;
6343 }
6344 else
6345 prev_reg_note = &XEXP (reg_notes, 1);
6346
6347 reg_notes = next_reg_notes;
6348 }
6349 }
6350 }
6351#endif
6352
6353 /* For all the spill regs newly reloaded in this instruction,
6354 record what they were reloaded from, so subsequent instructions
d445b551
RK
6355 can inherit the reloads.
6356
6357 Update spill_reg_store for the reloads of this insn.
e9e79d69 6358 Copy the elements that were updated in the loop above. */
32131a9c
RK
6359
6360 for (j = 0; j < n_reloads; j++)
6361 {
6362 register int r = reload_order[j];
6363 register int i = reload_spill_index[r];
6364
6365 /* I is nonneg if this reload used one of the spill regs.
6366 If reload_reg_rtx[r] is 0, this is an optional reload
546b63fb
RK
6367 that we opted to ignore.
6368
6369 Also ignore reloads that don't reach the end of the insn,
6370 since we will eventually see the one that does. */
d445b551 6371
546b63fb
RK
6372 if (i >= 0 && reload_reg_rtx[r] != 0
6373 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6374 reload_when_needed[r]))
32131a9c
RK
6375 {
6376 /* First, clear out memory of what used to be in this spill reg.
6377 If consecutive registers are used, clear them all. */
6378 int nr
6379 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6380 int k;
6381
6382 for (k = 0; k < nr; k++)
6383 {
6384 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6385 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6386 }
6387
6388 /* Maybe the spill reg contains a copy of reload_out. */
6389 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6390 {
6391 register int nregno = REGNO (reload_out[r]);
d08ea79f
RK
6392 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6393 : HARD_REGNO_NREGS (nregno,
6394 GET_MODE (reload_reg_rtx[r])));
d445b551
RK
6395
6396 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 6397 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6398
d08ea79f
RK
6399 /* If NREGNO is a hard register, it may occupy more than
6400 one register. If it does, say what is in the
6401 rest of the registers assuming that both registers
6402 agree on how many words the object takes. If not,
6403 invalidate the subsequent registers. */
6404
6405 if (nregno < FIRST_PSEUDO_REGISTER)
6406 for (k = 1; k < nnr; k++)
6407 reg_last_reload_reg[nregno + k]
74eb5c52
DE
6408 = (nr == nnr ? gen_rtx (REG,
6409 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
d08ea79f
RK
6410 REGNO (reload_reg_rtx[r]) + k)
6411 : 0);
6412
6413 /* Now do the inverse operation. */
32131a9c
RK
6414 for (k = 0; k < nr; k++)
6415 {
6416 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6417 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6418 : nregno + k);
32131a9c
RK
6419 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6420 }
6421 }
d445b551 6422
2c9ce2ef
RK
6423 /* Maybe the spill reg contains a copy of reload_in. Only do
6424 something if there will not be an output reload for
6425 the register being reloaded. */
32131a9c
RK
6426 else if (reload_out[r] == 0
6427 && reload_in[r] != 0
2c9ce2ef
RK
6428 && ((GET_CODE (reload_in[r]) == REG
6429 && ! reg_has_output_reload[REGNO (reload_in[r])]
6430 || (GET_CODE (reload_in_reg[r]) == REG
6431 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
32131a9c
RK
6432 {
6433 register int nregno;
d08ea79f
RK
6434 int nnr;
6435
32131a9c
RK
6436 if (GET_CODE (reload_in[r]) == REG)
6437 nregno = REGNO (reload_in[r]);
6438 else
6439 nregno = REGNO (reload_in_reg[r]);
6440
d08ea79f
RK
6441 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6442 : HARD_REGNO_NREGS (nregno,
6443 GET_MODE (reload_reg_rtx[r])));
6444
546b63fb 6445 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6446
d08ea79f
RK
6447 if (nregno < FIRST_PSEUDO_REGISTER)
6448 for (k = 1; k < nnr; k++)
6449 reg_last_reload_reg[nregno + k]
74eb5c52
DE
6450 = (nr == nnr ? gen_rtx (REG,
6451 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
d08ea79f
RK
6452 REGNO (reload_reg_rtx[r]) + k)
6453 : 0);
6454
546b63fb
RK
6455 /* Unless we inherited this reload, show we haven't
6456 recently done a store. */
6457 if (! reload_inherited[r])
6458 spill_reg_store[i] = 0;
d445b551 6459
546b63fb
RK
6460 for (k = 0; k < nr; k++)
6461 {
6462 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6463 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6464 : nregno + k);
546b63fb
RK
6465 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6466 = insn;
32131a9c
RK
6467 }
6468 }
6469 }
6470
6471 /* The following if-statement was #if 0'd in 1.34 (or before...).
6472 It's reenabled in 1.35 because supposedly nothing else
6473 deals with this problem. */
6474
6475 /* If a register gets output-reloaded from a non-spill register,
6476 that invalidates any previous reloaded copy of it.
6477 But forget_old_reloads_1 won't get to see it, because
6478 it thinks only about the original insn. So invalidate it here. */
6479 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6480 {
6481 register int nregno = REGNO (reload_out[r]);
36281332
RK
6482 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6483
6484 while (num_regs-- > 0)
6485 reg_last_reload_reg[nregno + num_regs] = 0;
32131a9c
RK
6486 }
6487 }
6488}
6489\f
546b63fb
RK
6490/* Emit code to perform an input reload of IN to RELOADREG. IN is from
6491 operand OPNUM with reload type TYPE.
6492
3c3eeea6 6493 Returns first insn emitted. */
32131a9c
RK
6494
6495rtx
546b63fb 6496gen_input_reload (reloadreg, in, opnum, type)
32131a9c
RK
6497 rtx reloadreg;
6498 rtx in;
546b63fb
RK
6499 int opnum;
6500 enum reload_type type;
32131a9c 6501{
546b63fb 6502 rtx last = get_last_insn ();
32131a9c 6503
a8fdc208 6504 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
6505 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6506 register that didn't get a hard register. In that case we can just
6507 call emit_move_insn.
6508
a7fd196c
JW
6509 We can also be asked to reload a PLUS that adds a register or a MEM to
6510 another register, constant or MEM. This can occur during frame pointer
6511 elimination and while reloading addresses. This case is handled by
6512 trying to emit a single insn to perform the add. If it is not valid,
6513 we use a two insn sequence.
32131a9c
RK
6514
6515 Finally, we could be called to handle an 'o' constraint by putting
6516 an address into a register. In that case, we first try to do this
6517 with a named pattern of "reload_load_address". If no such pattern
6518 exists, we just emit a SET insn and hope for the best (it will normally
6519 be valid on machines that use 'o').
6520
6521 This entire process is made complex because reload will never
6522 process the insns we generate here and so we must ensure that
6523 they will fit their constraints and also by the fact that parts of
6524 IN might be being reloaded separately and replaced with spill registers.
6525 Because of this, we are, in some sense, just guessing the right approach
6526 here. The one listed above seems to work.
6527
6528 ??? At some point, this whole thing needs to be rethought. */
6529
6530 if (GET_CODE (in) == PLUS
a7fd196c
JW
6531 && (GET_CODE (XEXP (in, 0)) == REG
6532 || GET_CODE (XEXP (in, 0)) == MEM)
6533 && (GET_CODE (XEXP (in, 1)) == REG
6534 || CONSTANT_P (XEXP (in, 1))
6535 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 6536 {
a7fd196c
JW
6537 /* We need to compute the sum of a register or a MEM and another
6538 register, constant, or MEM, and put it into the reload
3002e160
JW
6539 register. The best possible way of doing this is if the machine
6540 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
6541
6542 The simplest approach is to try to generate such an insn and see if it
6543 is recognized and matches its constraints. If so, it can be used.
6544
6545 It might be better not to actually emit the insn unless it is valid,
0009eff2 6546 but we need to pass the insn as an operand to `recog' and
b36d7dd7 6547 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 6548 not valid than to dummy things up. */
a8fdc208 6549
af929c62 6550 rtx op0, op1, tem, insn;
32131a9c 6551 int code;
a8fdc208 6552
af929c62
RK
6553 op0 = find_replacement (&XEXP (in, 0));
6554 op1 = find_replacement (&XEXP (in, 1));
6555
32131a9c
RK
6556 /* Since constraint checking is strict, commutativity won't be
6557 checked, so we need to do that here to avoid spurious failure
6558 if the add instruction is two-address and the second operand
6559 of the add is the same as the reload reg, which is frequently
6560 the case. If the insn would be A = B + A, rearrange it so
6561 it will be A = A + B as constrain_operands expects. */
a8fdc208 6562
32131a9c
RK
6563 if (GET_CODE (XEXP (in, 1)) == REG
6564 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
6565 tem = op0, op0 = op1, op1 = tem;
6566
6567 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6568 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c 6569
546b63fb 6570 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6571 code = recog_memoized (insn);
6572
6573 if (code >= 0)
6574 {
6575 insn_extract (insn);
6576 /* We want constrain operands to treat this insn strictly in
6577 its validity determination, i.e., the way it would after reload
6578 has completed. */
6579 if (constrain_operands (code, 1))
6580 return insn;
6581 }
6582
546b63fb 6583 delete_insns_since (last);
32131a9c
RK
6584
6585 /* If that failed, we must use a conservative two-insn sequence.
6586 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
6587 register since "move" will be able to handle an arbitrary operand,
6588 unlike add which can't, in general. Then add the registers.
32131a9c
RK
6589
6590 If there is another way to do this for a specific machine, a
6591 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6592 we emit below. */
6593
af929c62
RK
6594 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6595 || (GET_CODE (op1) == REG
6596 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6597 tem = op0, op0 = op1, op1 = tem;
32131a9c 6598
546b63fb 6599 emit_insn (gen_move_insn (reloadreg, op0));
39b56c2a
RK
6600
6601 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6602 This fixes a problem on the 32K where the stack pointer cannot
6603 be used as an operand of an add insn. */
6604
6605 if (rtx_equal_p (op0, op1))
6606 op1 = reloadreg;
6607
c77c9766
RK
6608 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6609
6610 /* If that failed, copy the address register to the reload register.
6611 Then add the constant to the reload register. */
6612
6613 code = recog_memoized (insn);
6614
6615 if (code >= 0)
6616 {
6617 insn_extract (insn);
6618 /* We want constrain operands to treat this insn strictly in
6619 its validity determination, i.e., the way it would after reload
6620 has completed. */
6621 if (constrain_operands (code, 1))
6622 return insn;
6623 }
6624
6625 delete_insns_since (last);
6626
6627 emit_insn (gen_move_insn (reloadreg, op1));
6628 emit_insn (gen_add2_insn (reloadreg, op0));
32131a9c
RK
6629 }
6630
0dadecf6
RK
6631#ifdef SECONDARY_MEMORY_NEEDED
6632 /* If we need a memory location to do the move, do it that way. */
6633 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6634 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6635 REGNO_REG_CLASS (REGNO (reloadreg)),
6636 GET_MODE (reloadreg)))
6637 {
6638 /* Get the memory to use and rewrite both registers to its mode. */
546b63fb 6639 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
0dadecf6
RK
6640
6641 if (GET_MODE (loc) != GET_MODE (reloadreg))
6642 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6643
6644 if (GET_MODE (loc) != GET_MODE (in))
6645 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6646
546b63fb
RK
6647 emit_insn (gen_move_insn (loc, in));
6648 emit_insn (gen_move_insn (reloadreg, loc));
0dadecf6
RK
6649 }
6650#endif
6651
32131a9c
RK
6652 /* If IN is a simple operand, use gen_move_insn. */
6653 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
546b63fb 6654 emit_insn (gen_move_insn (reloadreg, in));
32131a9c
RK
6655
6656#ifdef HAVE_reload_load_address
6657 else if (HAVE_reload_load_address)
546b63fb 6658 emit_insn (gen_reload_load_address (reloadreg, in));
32131a9c
RK
6659#endif
6660
6661 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6662 else
546b63fb 6663 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6664
6665 /* Return the first insn emitted.
546b63fb 6666 We can not just return get_last_insn, because there may have
32131a9c
RK
6667 been multiple instructions emitted. Also note that gen_move_insn may
6668 emit more than one insn itself, so we can not assume that there is one
6669 insn emitted per emit_insn_before call. */
6670
546b63fb 6671 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
6672}
6673\f
6674/* Delete a previously made output-reload
6675 whose result we now believe is not needed.
6676 First we double-check.
6677
6678 INSN is the insn now being processed.
6679 OUTPUT_RELOAD_INSN is the insn of the output reload.
6680 J is the reload-number for this insn. */
6681
6682static void
6683delete_output_reload (insn, j, output_reload_insn)
6684 rtx insn;
6685 int j;
6686 rtx output_reload_insn;
6687{
6688 register rtx i1;
6689
6690 /* Get the raw pseudo-register referred to. */
6691
6692 rtx reg = reload_in[j];
6693 while (GET_CODE (reg) == SUBREG)
6694 reg = SUBREG_REG (reg);
6695
6696 /* If the pseudo-reg we are reloading is no longer referenced
6697 anywhere between the store into it and here,
6698 and no jumps or labels intervene, then the value can get
6699 here through the reload reg alone.
6700 Otherwise, give up--return. */
6701 for (i1 = NEXT_INSN (output_reload_insn);
6702 i1 != insn; i1 = NEXT_INSN (i1))
6703 {
6704 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6705 return;
6706 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6707 && reg_mentioned_p (reg, PATTERN (i1)))
6708 return;
6709 }
6710
208dffa5
RS
6711 if (cannot_omit_stores[REGNO (reg)])
6712 return;
6713
32131a9c
RK
6714 /* If this insn will store in the pseudo again,
6715 the previous store can be removed. */
6716 if (reload_out[j] == reload_in[j])
6717 delete_insn (output_reload_insn);
6718
6719 /* See if the pseudo reg has been completely replaced
6720 with reload regs. If so, delete the store insn
6721 and forget we had a stack slot for the pseudo. */
6722 else if (reg_n_deaths[REGNO (reg)] == 1
6723 && reg_basic_block[REGNO (reg)] >= 0
6724 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6725 {
6726 rtx i2;
6727
6728 /* We know that it was used only between here
6729 and the beginning of the current basic block.
6730 (We also know that the last use before INSN was
6731 the output reload we are thinking of deleting, but never mind that.)
6732 Search that range; see if any ref remains. */
6733 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6734 {
d445b551
RK
6735 rtx set = single_set (i2);
6736
32131a9c
RK
6737 /* Uses which just store in the pseudo don't count,
6738 since if they are the only uses, they are dead. */
d445b551 6739 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6740 continue;
6741 if (GET_CODE (i2) == CODE_LABEL
6742 || GET_CODE (i2) == JUMP_INSN)
6743 break;
6744 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6745 && reg_mentioned_p (reg, PATTERN (i2)))
6746 /* Some other ref remains;
6747 we can't do anything. */
6748 return;
6749 }
6750
6751 /* Delete the now-dead stores into this pseudo. */
6752 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6753 {
d445b551
RK
6754 rtx set = single_set (i2);
6755
6756 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6757 delete_insn (i2);
6758 if (GET_CODE (i2) == CODE_LABEL
6759 || GET_CODE (i2) == JUMP_INSN)
6760 break;
6761 }
6762
6763 /* For the debugging info,
6764 say the pseudo lives in this reload reg. */
6765 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6766 alter_reg (REGNO (reg), -1);
6767 }
6768}
32131a9c 6769\f
a8fdc208 6770/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 6771 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
6772 is a register or memory location;
6773 so reloading involves incrementing that location.
6774
6775 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 6776 This cannot be deduced from VALUE. */
32131a9c 6777
546b63fb
RK
6778static void
6779inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
6780 rtx reloadreg;
6781 rtx value;
6782 int inc_amount;
32131a9c
RK
6783{
6784 /* REG or MEM to be copied and incremented. */
6785 rtx incloc = XEXP (value, 0);
6786 /* Nonzero if increment after copying. */
6787 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 6788 rtx last;
0009eff2
RK
6789 rtx inc;
6790 rtx add_insn;
6791 int code;
32131a9c
RK
6792
6793 /* No hard register is equivalent to this register after
6794 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6795 we could inc/dec that register as well (maybe even using it for
6796 the source), but I'm not sure it's worth worrying about. */
6797 if (GET_CODE (incloc) == REG)
6798 reg_last_reload_reg[REGNO (incloc)] = 0;
6799
6800 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6801 inc_amount = - inc_amount;
6802
fb3821f7 6803 inc = GEN_INT (inc_amount);
0009eff2
RK
6804
6805 /* If this is post-increment, first copy the location to the reload reg. */
6806 if (post)
546b63fb 6807 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
6808
6809 /* See if we can directly increment INCLOC. Use a method similar to that
6810 in gen_input_reload. */
6811
546b63fb
RK
6812 last = get_last_insn ();
6813 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6814 gen_rtx (PLUS, GET_MODE (incloc),
6815 incloc, inc)));
0009eff2
RK
6816
6817 code = recog_memoized (add_insn);
6818 if (code >= 0)
32131a9c 6819 {
0009eff2
RK
6820 insn_extract (add_insn);
6821 if (constrain_operands (code, 1))
32131a9c 6822 {
0009eff2
RK
6823 /* If this is a pre-increment and we have incremented the value
6824 where it lives, copy the incremented value to RELOADREG to
6825 be used as an address. */
6826
6827 if (! post)
546b63fb
RK
6828 emit_insn (gen_move_insn (reloadreg, incloc));
6829
6830 return;
32131a9c
RK
6831 }
6832 }
0009eff2 6833
546b63fb 6834 delete_insns_since (last);
0009eff2
RK
6835
6836 /* If couldn't do the increment directly, must increment in RELOADREG.
6837 The way we do this depends on whether this is pre- or post-increment.
6838 For pre-increment, copy INCLOC to the reload register, increment it
6839 there, then save back. */
6840
6841 if (! post)
6842 {
546b63fb
RK
6843 emit_insn (gen_move_insn (reloadreg, incloc));
6844 emit_insn (gen_add2_insn (reloadreg, inc));
6845 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 6846 }
32131a9c
RK
6847 else
6848 {
0009eff2
RK
6849 /* Postincrement.
6850 Because this might be a jump insn or a compare, and because RELOADREG
6851 may not be available after the insn in an input reload, we must do
6852 the incrementation before the insn being reloaded for.
6853
6854 We have already copied INCLOC to RELOADREG. Increment the copy in
6855 RELOADREG, save that back, then decrement RELOADREG so it has
6856 the original value. */
6857
546b63fb
RK
6858 emit_insn (gen_add2_insn (reloadreg, inc));
6859 emit_insn (gen_move_insn (incloc, reloadreg));
6860 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 6861 }
0009eff2 6862
546b63fb 6863 return;
32131a9c
RK
6864}
6865\f
6866/* Return 1 if we are certain that the constraint-string STRING allows
6867 the hard register REG. Return 0 if we can't be sure of this. */
6868
6869static int
6870constraint_accepts_reg_p (string, reg)
6871 char *string;
6872 rtx reg;
6873{
6874 int value = 0;
6875 int regno = true_regnum (reg);
6876 int c;
6877
6878 /* Initialize for first alternative. */
6879 value = 0;
6880 /* Check that each alternative contains `g' or `r'. */
6881 while (1)
6882 switch (c = *string++)
6883 {
6884 case 0:
6885 /* If an alternative lacks `g' or `r', we lose. */
6886 return value;
6887 case ',':
6888 /* If an alternative lacks `g' or `r', we lose. */
6889 if (value == 0)
6890 return 0;
6891 /* Initialize for next alternative. */
6892 value = 0;
6893 break;
6894 case 'g':
6895 case 'r':
6896 /* Any general reg wins for this alternative. */
6897 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6898 value = 1;
6899 break;
6900 default:
6901 /* Any reg in specified class wins for this alternative. */
6902 {
0009eff2 6903 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 6904
0009eff2 6905 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
6906 value = 1;
6907 }
6908 }
6909}
6910\f
d445b551
RK
6911/* Return the number of places FIND appears within X, but don't count
6912 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
6913
6914static int
6915count_occurrences (x, find)
6916 register rtx x, find;
6917{
6918 register int i, j;
6919 register enum rtx_code code;
6920 register char *format_ptr;
6921 int count;
6922
6923 if (x == find)
6924 return 1;
6925 if (x == 0)
6926 return 0;
6927
6928 code = GET_CODE (x);
6929
6930 switch (code)
6931 {
6932 case REG:
6933 case QUEUED:
6934 case CONST_INT:
6935 case CONST_DOUBLE:
6936 case SYMBOL_REF:
6937 case CODE_LABEL:
6938 case PC:
6939 case CC0:
6940 return 0;
d445b551
RK
6941
6942 case SET:
6943 if (SET_DEST (x) == find)
6944 return count_occurrences (SET_SRC (x), find);
6945 break;
32131a9c
RK
6946 }
6947
6948 format_ptr = GET_RTX_FORMAT (code);
6949 count = 0;
6950
6951 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6952 {
6953 switch (*format_ptr++)
6954 {
6955 case 'e':
6956 count += count_occurrences (XEXP (x, i), find);
6957 break;
6958
6959 case 'E':
6960 if (XVEC (x, i) != NULL)
6961 {
6962 for (j = 0; j < XVECLEN (x, i); j++)
6963 count += count_occurrences (XVECEXP (x, i, j), find);
6964 }
6965 break;
6966 }
6967 }
6968 return count;
6969}
This page took 1.090267 seconds and 5 git commands to generate.