]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(min_precision): New function.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
8c15858f 2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
546b63fb
RK
69
70
71#ifndef REGISTER_MOVE_COST
72#define REGISTER_MOVE_COST(x, y) 2
73#endif
74
75#ifndef MEMORY_MOVE_COST
76#define MEMORY_MOVE_COST(x) 4
77#endif
32131a9c
RK
78\f
79/* During reload_as_needed, element N contains a REG rtx for the hard reg
d08ea79f 80 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
81static rtx *reg_last_reload_reg;
82
83/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85static char *reg_has_output_reload;
86
87/* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89static HARD_REG_SET reg_is_output_reload;
90
91/* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95rtx *reg_equiv_constant;
96
97/* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 101rtx *reg_equiv_memory_loc;
32131a9c
RK
102
103/* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106rtx *reg_equiv_address;
107
108/* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110rtx *reg_equiv_mem;
111
112/* Widest width in which each pseudo reg is referred to (via subreg). */
113static int *reg_max_ref_width;
114
115/* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117static rtx *reg_equiv_init;
118
119/* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126/* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132/* Number of spill-regs so far; number of valid elements of spill_regs. */
133static int n_spills;
134
135/* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141/* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146/* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152/* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155HARD_REG_SET forbidden_regs;
156
157/* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
546b63fb
RK
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
32131a9c
RK
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164static HARD_REG_SET bad_spill_regs;
165
166/* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171/* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178/* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183/* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185static HARD_REG_SET counted_for_groups;
186
187/* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191static HARD_REG_SET counted_for_nongroups;
192
208dffa5
RS
193/* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197static char *cannot_omit_stores;
198
32131a9c
RK
199/* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206static char spill_indirect_levels;
207
208/* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212char indirect_symref_ok;
213
214/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216char double_reg_address_ok;
217
218/* Record the stack slot for each spilled hard register. */
219
220static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222/* Width allocated so far for that stack slot. */
223
224static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226/* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231char *basic_block_needs[N_REG_CLASSES];
232
233/* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235int reload_first_uid;
236
237/* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240int caller_save_needed;
241
242/* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245int reload_in_progress = 0;
246
247/* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
d45cf215 254/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258struct obstack reload_obstack;
259char *reload_firstobj;
260
261#define obstack_chunk_alloc xmalloc
262#define obstack_chunk_free free
263
32131a9c
RK
264/* List of labels that must never be deleted. */
265extern rtx forced_labels;
266\f
267/* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272static struct elim_table
273{
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
a8efe40d 281 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290} reg_eliminate[] =
291
292/* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296#ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298#else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300#endif
301
302#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304/* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307static int num_not_at_initial_offset;
308
309/* Count the number of registers that we may be able to eliminate. */
310static int num_eliminable;
311
312/* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319static char *offsets_known_at;
320static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322/* Number of labels in the current function. */
323
324static int num_labels;
546b63fb
RK
325
326struct hard_reg_n_uses { int regno; int uses; };
32131a9c 327\f
546b63fb
RK
328static int possible_group_p PROTO((int, int *));
329static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334static void spill_failure PROTO((rtx));
335static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337static void delete_dead_insn PROTO((rtx));
338static void alter_reg PROTO((int, int));
c307c237 339static void mark_scratch_live PROTO((rtx));
546b63fb
RK
340static void set_label_offsets PROTO((rtx, rtx, int));
341static int eliminate_regs_in_insn PROTO((rtx, int));
342static void mark_not_eliminable PROTO((rtx, rtx));
343static int spill_hard_reg PROTO((int, int, FILE *, int));
344static void scan_paradoxical_subregs PROTO((rtx));
345static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347static void order_regs_for_reload PROTO((void));
1d1a832c 348static int compare_spill_regs PROTO((short *, short *));
546b63fb 349static void reload_as_needed PROTO((rtx, int));
9a881562 350static void forget_old_reloads_1 PROTO((rtx, rtx));
546b63fb
RK
351static int reload_reg_class_lower PROTO((short *, short *));
352static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
be7ae2a4
RK
354static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
355 enum machine_mode));
546b63fb
RK
356static int reload_reg_free_p PROTO((int, int, enum reload_type));
357static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
358static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
351aa1c1 359static int reloads_conflict PROTO((int, int));
546b63fb
RK
360static int allocate_reload_reg PROTO((int, rtx, int, int));
361static void choose_reload_regs PROTO((rtx, rtx));
362static void merge_assigned_reloads PROTO((rtx));
363static void emit_reload_insns PROTO((rtx));
364static void delete_output_reload PROTO((rtx, int, rtx));
365static void inc_for_reload PROTO((rtx, rtx, int));
366static int constraint_accepts_reg_p PROTO((char *, rtx));
367static int count_occurrences PROTO((rtx, rtx));
32131a9c 368\f
546b63fb
RK
369/* Initialize the reload pass once per compilation. */
370
32131a9c
RK
371void
372init_reload ()
373{
374 register int i;
375
376 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
377 Set spill_indirect_levels to the number of levels such addressing is
378 permitted, zero if it is not permitted at all. */
379
380 register rtx tem
381 = gen_rtx (MEM, Pmode,
382 gen_rtx (PLUS, Pmode,
383 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 384 GEN_INT (4)));
32131a9c
RK
385 spill_indirect_levels = 0;
386
387 while (memory_address_p (QImode, tem))
388 {
389 spill_indirect_levels++;
390 tem = gen_rtx (MEM, Pmode, tem);
391 }
392
393 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
394
395 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
396 indirect_symref_ok = memory_address_p (QImode, tem);
397
398 /* See if reg+reg is a valid (and offsettable) address. */
399
65701fd2 400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
401 {
402 tem = gen_rtx (PLUS, Pmode,
3ec2ea3e 403 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
57caa638
RS
404 gen_rtx (REG, Pmode, i));
405 /* This way, we make sure that reg+reg is an offsettable address. */
406 tem = plus_constant (tem, 4);
407
408 if (memory_address_p (QImode, tem))
409 {
410 double_reg_address_ok = 1;
411 break;
412 }
413 }
32131a9c
RK
414
415 /* Initialize obstack for our rtl allocation. */
416 gcc_obstack_init (&reload_obstack);
417 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
418}
419
546b63fb 420/* Main entry point for the reload pass.
32131a9c
RK
421
422 FIRST is the first insn of the function being compiled.
423
424 GLOBAL nonzero means we were called from global_alloc
425 and should attempt to reallocate any pseudoregs that we
426 displace from hard regs we will use for reloads.
427 If GLOBAL is zero, we do not have enough information to do that,
428 so any pseudo reg that is spilled must go to the stack.
429
430 DUMPFILE is the global-reg debugging dump file stream, or 0.
431 If it is nonzero, messages are written to it to describe
432 which registers are seized as reload regs, which pseudo regs
5352b11a 433 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 434
5352b11a
RS
435 Return value is nonzero if reload failed
436 and we must not do any more for this function. */
437
438int
32131a9c
RK
439reload (first, global, dumpfile)
440 rtx first;
441 int global;
442 FILE *dumpfile;
443{
444 register int class;
8b3e912b 445 register int i, j, k;
32131a9c
RK
446 register rtx insn;
447 register struct elim_table *ep;
448
449 int something_changed;
450 int something_needs_reloads;
451 int something_needs_elimination;
452 int new_basic_block_needs;
a8efe40d
RK
453 enum reg_class caller_save_spill_class = NO_REGS;
454 int caller_save_group_size = 1;
32131a9c 455
5352b11a
RS
456 /* Nonzero means we couldn't get enough spill regs. */
457 int failure = 0;
458
32131a9c
RK
459 /* The basic block number currently being processed for INSN. */
460 int this_block;
461
462 /* Make sure even insns with volatile mem refs are recognizable. */
463 init_recog ();
464
465 /* Enable find_equiv_reg to distinguish insns made by reload. */
466 reload_first_uid = get_max_uid ();
467
468 for (i = 0; i < N_REG_CLASSES; i++)
469 basic_block_needs[i] = 0;
470
0dadecf6
RK
471#ifdef SECONDARY_MEMORY_NEEDED
472 /* Initialize the secondary memory table. */
473 clear_secondary_mem ();
474#endif
475
32131a9c
RK
476 /* Remember which hard regs appear explicitly
477 before we merge into `regs_ever_live' the ones in which
478 pseudo regs have been allocated. */
479 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
480
481 /* We don't have a stack slot for any spill reg yet. */
482 bzero (spill_stack_slot, sizeof spill_stack_slot);
483 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
484
a8efe40d
RK
485 /* Initialize the save area information for caller-save, in case some
486 are needed. */
487 init_save_areas ();
a8fdc208 488
32131a9c
RK
489 /* Compute which hard registers are now in use
490 as homes for pseudo registers.
491 This is done here rather than (eg) in global_alloc
492 because this point is reached even if not optimizing. */
493
494 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
495 mark_home_live (i);
496
c307c237
RK
497 for (i = 0; i < scratch_list_length; i++)
498 if (scratch_list[i])
499 mark_scratch_live (scratch_list[i]);
500
32131a9c
RK
501 /* Make sure that the last insn in the chain
502 is not something that needs reloading. */
fb3821f7 503 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
504
505 /* Find all the pseudo registers that didn't get hard regs
506 but do have known equivalent constants or memory slots.
507 These include parameters (known equivalent to parameter slots)
508 and cse'd or loop-moved constant memory addresses.
509
510 Record constant equivalents in reg_equiv_constant
511 so they will be substituted by find_reloads.
512 Record memory equivalents in reg_mem_equiv so they can
513 be substituted eventually by altering the REG-rtx's. */
514
515 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
517 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
519 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
521 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero (reg_equiv_init, max_regno * sizeof (rtx));
523 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
524 bzero (reg_equiv_address, max_regno * sizeof (rtx));
525 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
526 bzero (reg_max_ref_width, max_regno * sizeof (int));
208dffa5
RS
527 cannot_omit_stores = (char *) alloca (max_regno);
528 bzero (cannot_omit_stores, max_regno);
32131a9c 529
56f58d3a
RK
530#ifdef SMALL_REGISTER_CLASSES
531 CLEAR_HARD_REG_SET (forbidden_regs);
532#endif
533
32131a9c 534 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
56f58d3a
RK
535 Also find all paradoxical subregs and find largest such for each pseudo.
536 On machines with small register classes, record hard registers that
537 are used for user variables. These can never be used for spills. */
32131a9c
RK
538
539 for (insn = first; insn; insn = NEXT_INSN (insn))
540 {
541 rtx set = single_set (insn);
542
543 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
544 {
fb3821f7 545 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
546 if (note
547#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 548 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
549 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
550#endif
551 )
32131a9c
RK
552 {
553 rtx x = XEXP (note, 0);
554 i = REGNO (SET_DEST (set));
555 if (i > LAST_VIRTUAL_REGISTER)
556 {
557 if (GET_CODE (x) == MEM)
558 reg_equiv_memory_loc[i] = x;
559 else if (CONSTANT_P (x))
560 {
561 if (LEGITIMATE_CONSTANT_P (x))
562 reg_equiv_constant[i] = x;
563 else
564 reg_equiv_memory_loc[i]
d445b551 565 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
566 }
567 else
568 continue;
569
570 /* If this register is being made equivalent to a MEM
571 and the MEM is not SET_SRC, the equivalencing insn
572 is one with the MEM as a SET_DEST and it occurs later.
573 So don't mark this insn now. */
574 if (GET_CODE (x) != MEM
575 || rtx_equal_p (SET_SRC (set), x))
576 reg_equiv_init[i] = insn;
577 }
578 }
579 }
580
581 /* If this insn is setting a MEM from a register equivalent to it,
582 this is the equivalencing insn. */
583 else if (set && GET_CODE (SET_DEST (set)) == MEM
584 && GET_CODE (SET_SRC (set)) == REG
585 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
586 && rtx_equal_p (SET_DEST (set),
587 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
588 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
589
590 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
591 scan_paradoxical_subregs (PATTERN (insn));
592 }
593
594 /* Does this function require a frame pointer? */
595
596 frame_pointer_needed = (! flag_omit_frame_pointer
597#ifdef EXIT_IGNORE_STACK
598 /* ?? If EXIT_IGNORE_STACK is set, we will not save
599 and restore sp for alloca. So we can't eliminate
600 the frame pointer in that case. At some point,
601 we should improve this by emitting the
602 sp-adjusting insns for this case. */
603 || (current_function_calls_alloca
604 && EXIT_IGNORE_STACK)
605#endif
606 || FRAME_POINTER_REQUIRED);
607
608 num_eliminable = 0;
609
610 /* Initialize the table of registers to eliminate. The way we do this
611 depends on how the eliminable registers were defined. */
612#ifdef ELIMINABLE_REGS
613 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
614 {
615 ep->can_eliminate = ep->can_eliminate_previous
616 = (CAN_ELIMINATE (ep->from, ep->to)
3ec2ea3e
DE
617 && (ep->from != HARD_FRAME_POINTER_REGNUM
618 || ! frame_pointer_needed));
32131a9c
RK
619 }
620#else
621 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
622 = ! frame_pointer_needed;
623#endif
624
625 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 626 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
627 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
628 We depend on this. */
629 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
630 {
631 num_eliminable += ep->can_eliminate;
632 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
633 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
634 }
635
636 num_labels = max_label_num () - get_first_label_num ();
637
638 /* Allocate the tables used to store offset information at labels. */
639 offsets_known_at = (char *) alloca (num_labels);
640 offsets_at
641 = (int (*)[NUM_ELIMINABLE_REGS])
642 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
643
644 offsets_known_at -= get_first_label_num ();
645 offsets_at -= get_first_label_num ();
646
647 /* Alter each pseudo-reg rtx to contain its hard reg number.
648 Assign stack slots to the pseudos that lack hard regs or equivalents.
649 Do not touch virtual registers. */
650
651 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
652 alter_reg (i, -1);
653
654 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
655 because the stack size may be a part of the offset computation for
656 register elimination. */
657 assign_stack_local (BLKmode, 0, 0);
658
659 /* If we have some registers we think can be eliminated, scan all insns to
660 see if there is an insn that sets one of these registers to something
661 other than itself plus a constant. If so, the register cannot be
662 eliminated. Doing this scan here eliminates an extra pass through the
663 main reload loop in the most common case where register elimination
664 cannot be done. */
665 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
666 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
667 || GET_CODE (insn) == CALL_INSN)
668 note_stores (PATTERN (insn), mark_not_eliminable);
669
670#ifndef REGISTER_CONSTRAINTS
671 /* If all the pseudo regs have hard regs,
672 except for those that are never referenced,
673 we know that no reloads are needed. */
674 /* But that is not true if there are register constraints, since
675 in that case some pseudos might be in the wrong kind of hard reg. */
676
677 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
678 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
679 break;
680
b8093d02 681 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
682 return;
683#endif
684
685 /* Compute the order of preference for hard registers to spill.
686 Store them by decreasing preference in potential_reload_regs. */
687
688 order_regs_for_reload ();
689
690 /* So far, no hard regs have been spilled. */
691 n_spills = 0;
692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
693 spill_reg_order[i] = -1;
694
695 /* On most machines, we can't use any register explicitly used in the
696 rtl as a spill register. But on some, we have to. Those will have
697 taken care to keep the life of hard regs as short as possible. */
698
56f58d3a 699#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
700 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
701#endif
702
703 /* Spill any hard regs that we know we can't eliminate. */
704 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
705 if (! ep->can_eliminate)
706 {
707 spill_hard_reg (ep->from, global, dumpfile, 1);
708 regs_ever_live[ep->from] = 1;
709 }
710
711 if (global)
712 for (i = 0; i < N_REG_CLASSES; i++)
713 {
714 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
715 bzero (basic_block_needs[i], n_basic_blocks);
716 }
717
b2f15f94
RK
718 /* From now on, we need to emit any moves without making new pseudos. */
719 reload_in_progress = 1;
720
32131a9c
RK
721 /* This loop scans the entire function each go-round
722 and repeats until one repetition spills no additional hard regs. */
723
d45cf215 724 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
725 to require another pass. Note that getting an additional reload
726 reg does not necessarily imply any pseudo reg was spilled;
727 sometimes we find a reload reg that no pseudo reg was allocated in. */
728 something_changed = 1;
729 /* This flag is set if there are any insns that require reloading. */
730 something_needs_reloads = 0;
731 /* This flag is set if there are any insns that require register
732 eliminations. */
733 something_needs_elimination = 0;
734 while (something_changed)
735 {
736 rtx after_call = 0;
737
738 /* For each class, number of reload regs needed in that class.
739 This is the maximum over all insns of the needs in that class
740 of the individual insn. */
741 int max_needs[N_REG_CLASSES];
742 /* For each class, size of group of consecutive regs
743 that is needed for the reloads of this class. */
744 int group_size[N_REG_CLASSES];
745 /* For each class, max number of consecutive groups needed.
746 (Each group contains group_size[CLASS] consecutive registers.) */
747 int max_groups[N_REG_CLASSES];
748 /* For each class, max number needed of regs that don't belong
749 to any of the groups. */
750 int max_nongroups[N_REG_CLASSES];
751 /* For each class, the machine mode which requires consecutive
752 groups of regs of that class.
753 If two different modes ever require groups of one class,
754 they must be the same size and equally restrictive for that class,
755 otherwise we can't handle the complexity. */
756 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
757 /* Record the insn where each maximum need is first found. */
758 rtx max_needs_insn[N_REG_CLASSES];
759 rtx max_groups_insn[N_REG_CLASSES];
760 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 761 rtx x;
0dadecf6 762 int starting_frame_size = get_frame_size ();
e404a39a 763 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
764
765 something_changed = 0;
766 bzero (max_needs, sizeof max_needs);
767 bzero (max_groups, sizeof max_groups);
768 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
769 bzero (max_needs_insn, sizeof max_needs_insn);
770 bzero (max_groups_insn, sizeof max_groups_insn);
771 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
772 bzero (group_size, sizeof group_size);
773 for (i = 0; i < N_REG_CLASSES; i++)
774 group_mode[i] = VOIDmode;
775
776 /* Keep track of which basic blocks are needing the reloads. */
777 this_block = 0;
778
779 /* Remember whether any element of basic_block_needs
780 changes from 0 to 1 in this pass. */
781 new_basic_block_needs = 0;
782
783 /* Reset all offsets on eliminable registers to their initial values. */
784#ifdef ELIMINABLE_REGS
785 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
786 {
787 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
788 ep->previous_offset = ep->offset
789 = ep->max_offset = ep->initial_offset;
32131a9c
RK
790 }
791#else
792#ifdef INITIAL_FRAME_POINTER_OFFSET
793 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
794#else
795 if (!FRAME_POINTER_REQUIRED)
796 abort ();
797 reg_eliminate[0].initial_offset = 0;
798#endif
a8efe40d 799 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
800 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
801#endif
802
803 num_not_at_initial_offset = 0;
804
805 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
806
807 /* Set a known offset for each forced label to be at the initial offset
808 of each elimination. We do this because we assume that all
809 computed jumps occur from a location where each elimination is
810 at its initial offset. */
811
812 for (x = forced_labels; x; x = XEXP (x, 1))
813 if (XEXP (x, 0))
fb3821f7 814 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
815
816 /* For each pseudo register that has an equivalent location defined,
817 try to eliminate any eliminable registers (such as the frame pointer)
818 assuming initial offsets for the replacement register, which
819 is the normal case.
820
821 If the resulting location is directly addressable, substitute
822 the MEM we just got directly for the old REG.
823
824 If it is not addressable but is a constant or the sum of a hard reg
825 and constant, it is probably not addressable because the constant is
826 out of range, in that case record the address; we will generate
827 hairy code to compute the address in a register each time it is
6491dbbb
RK
828 needed. Similarly if it is a hard register, but one that is not
829 valid as an address register.
32131a9c
RK
830
831 If the location is not addressable, but does not have one of the
832 above forms, assign a stack slot. We have to do this to avoid the
833 potential of producing lots of reloads if, e.g., a location involves
834 a pseudo that didn't get a hard register and has an equivalent memory
835 location that also involves a pseudo that didn't get a hard register.
836
837 Perhaps at some point we will improve reload_when_needed handling
838 so this problem goes away. But that's very hairy. */
839
840 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
841 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
842 {
fb3821f7 843 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
844
845 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
846 XEXP (x, 0)))
847 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
848 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
849 || (GET_CODE (XEXP (x, 0)) == REG
850 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
851 || (GET_CODE (XEXP (x, 0)) == PLUS
852 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
853 && (REGNO (XEXP (XEXP (x, 0), 0))
854 < FIRST_PSEUDO_REGISTER)
855 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
856 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
857 else
858 {
859 /* Make a new stack slot. Then indicate that something
a8fdc208 860 changed so we go back and recompute offsets for
32131a9c
RK
861 eliminable registers because the allocation of memory
862 below might change some offset. reg_equiv_{mem,address}
863 will be set up for this pseudo on the next pass around
864 the loop. */
865 reg_equiv_memory_loc[i] = 0;
866 reg_equiv_init[i] = 0;
867 alter_reg (i, -1);
868 something_changed = 1;
869 }
870 }
a8fdc208 871
d45cf215 872 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
873 bookkeeping. */
874 if (something_changed)
875 continue;
876
a8efe40d
RK
877 /* If caller-saves needs a group, initialize the group to include
878 the size and mode required for caller-saves. */
879
880 if (caller_save_group_size > 1)
881 {
882 group_mode[(int) caller_save_spill_class] = Pmode;
883 group_size[(int) caller_save_spill_class] = caller_save_group_size;
884 }
885
32131a9c
RK
886 /* Compute the most additional registers needed by any instruction.
887 Collect information separately for each class of regs. */
888
889 for (insn = first; insn; insn = NEXT_INSN (insn))
890 {
891 if (global && this_block + 1 < n_basic_blocks
892 && insn == basic_block_head[this_block+1])
893 ++this_block;
894
895 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
896 might include REG_LABEL), we need to see what effects this
897 has on the known offsets at labels. */
898
899 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
900 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
901 && REG_NOTES (insn) != 0))
902 set_label_offsets (insn, insn, 0);
903
904 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
905 {
906 /* Nonzero means don't use a reload reg that overlaps
907 the place where a function value can be returned. */
908 rtx avoid_return_reg = 0;
909
910 rtx old_body = PATTERN (insn);
911 int old_code = INSN_CODE (insn);
912 rtx old_notes = REG_NOTES (insn);
913 int did_elimination = 0;
546b63fb
RK
914
915 /* To compute the number of reload registers of each class
916 needed for an insn, we must similate what choose_reload_regs
917 can do. We do this by splitting an insn into an "input" and
918 an "output" part. RELOAD_OTHER reloads are used in both.
919 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
920 which must be live over the entire input section of reloads,
921 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
922 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
923 inputs.
924
925 The registers needed for output are RELOAD_OTHER and
926 RELOAD_FOR_OUTPUT, which are live for the entire output
927 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
928 reloads for each operand.
929
930 The total number of registers needed is the maximum of the
931 inputs and outputs. */
932
8b3e912b 933 struct needs
32131a9c 934 {
8b3e912b
RK
935 /* [0] is normal, [1] is nongroup. */
936 int regs[2][N_REG_CLASSES];
937 int groups[N_REG_CLASSES];
938 };
939
940 /* Each `struct needs' corresponds to one RELOAD_... type. */
941 struct {
942 struct needs other;
943 struct needs input;
944 struct needs output;
945 struct needs insn;
946 struct needs other_addr;
947 struct needs op_addr;
893bc853 948 struct needs op_addr_reload;
8b3e912b
RK
949 struct needs in_addr[MAX_RECOG_OPERANDS];
950 struct needs out_addr[MAX_RECOG_OPERANDS];
951 } insn_needs;
32131a9c
RK
952
953 /* If needed, eliminate any eliminable registers. */
954 if (num_eliminable)
955 did_elimination = eliminate_regs_in_insn (insn, 0);
956
957#ifdef SMALL_REGISTER_CLASSES
958 /* Set avoid_return_reg if this is an insn
959 that might use the value of a function call. */
960 if (GET_CODE (insn) == CALL_INSN)
961 {
962 if (GET_CODE (PATTERN (insn)) == SET)
963 after_call = SET_DEST (PATTERN (insn));
964 else if (GET_CODE (PATTERN (insn)) == PARALLEL
965 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
966 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
967 else
968 after_call = 0;
969 }
970 else if (after_call != 0
971 && !(GET_CODE (PATTERN (insn)) == SET
972 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
973 {
2b979c57 974 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
975 avoid_return_reg = after_call;
976 after_call = 0;
977 }
978#endif /* SMALL_REGISTER_CLASSES */
979
980 /* Analyze the instruction. */
981 find_reloads (insn, 0, spill_indirect_levels, global,
982 spill_reg_order);
983
984 /* Remember for later shortcuts which insns had any reloads or
985 register eliminations.
986
987 One might think that it would be worthwhile to mark insns
988 that need register replacements but not reloads, but this is
989 not safe because find_reloads may do some manipulation of
990 the insn (such as swapping commutative operands), which would
991 be lost when we restore the old pattern after register
992 replacement. So the actions of find_reloads must be redone in
993 subsequent passes or in reload_as_needed.
994
995 However, it is safe to mark insns that need reloads
996 but not register replacement. */
997
998 PUT_MODE (insn, (did_elimination ? QImode
999 : n_reloads ? HImode
546b63fb 1000 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
1001 : VOIDmode));
1002
1003 /* Discard any register replacements done. */
1004 if (did_elimination)
1005 {
1006 obstack_free (&reload_obstack, reload_firstobj);
1007 PATTERN (insn) = old_body;
1008 INSN_CODE (insn) = old_code;
1009 REG_NOTES (insn) = old_notes;
1010 something_needs_elimination = 1;
1011 }
1012
a8efe40d 1013 /* If this insn has no reloads, we need not do anything except
a8fdc208 1014 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1015 caller-save needs reloads. */
1016
1017 if (n_reloads == 0
1018 && ! (GET_CODE (insn) == CALL_INSN
1019 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1020 continue;
1021
1022 something_needs_reloads = 1;
8b3e912b 1023 bzero (&insn_needs, sizeof insn_needs);
32131a9c
RK
1024
1025 /* Count each reload once in every class
1026 containing the reload's own class. */
1027
1028 for (i = 0; i < n_reloads; i++)
1029 {
1030 register enum reg_class *p;
e85ddd99 1031 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1032 int size;
1033 enum machine_mode mode;
ce0e109b 1034 int nongroup_need;
8b3e912b 1035 struct needs *this_needs;
32131a9c
RK
1036
1037 /* Don't count the dummy reloads, for which one of the
1038 regs mentioned in the insn can be used for reloading.
1039 Don't count optional reloads.
1040 Don't count reloads that got combined with others. */
1041 if (reload_reg_rtx[i] != 0
1042 || reload_optional[i] != 0
1043 || (reload_out[i] == 0 && reload_in[i] == 0
1044 && ! reload_secondary_p[i]))
1045 continue;
1046
e85ddd99
RK
1047 /* Show that a reload register of this class is needed
1048 in this basic block. We do not use insn_needs and
1049 insn_groups because they are overly conservative for
1050 this purpose. */
1051 if (global && ! basic_block_needs[(int) class][this_block])
1052 {
1053 basic_block_needs[(int) class][this_block] = 1;
1054 new_basic_block_needs = 1;
1055 }
1056
ee249c09
RK
1057
1058 mode = reload_inmode[i];
1059 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1060 mode = reload_outmode[i];
1061 size = CLASS_MAX_NREGS (class, mode);
1062
8b3e912b
RK
1063 /* If this class doesn't want a group, determine if we have
1064 a nongroup need or a regular need. We have a nongroup
1065 need if this reload conflicts with a group reload whose
1066 class intersects with this reload's class. */
ce0e109b
RK
1067
1068 nongroup_need = 0;
ee249c09 1069 if (size == 1)
b8f4c738
RK
1070 for (j = 0; j < n_reloads; j++)
1071 if ((CLASS_MAX_NREGS (reload_reg_class[j],
255cf280
RK
1072 (GET_MODE_SIZE (reload_outmode[j])
1073 > GET_MODE_SIZE (reload_inmode[j]))
1074 ? reload_outmode[j]
1075 : reload_inmode[j])
b8f4c738 1076 > 1)
893bc853
RK
1077 && (!reload_optional[j])
1078 && (reload_in[j] != 0 || reload_out[j] != 0
1079 || reload_secondary_p[j])
b8f4c738 1080 && reloads_conflict (i, j)
ce0e109b
RK
1081 && reg_classes_intersect_p (class,
1082 reload_reg_class[j]))
1083 {
1084 nongroup_need = 1;
1085 break;
1086 }
1087
32131a9c
RK
1088 /* Decide which time-of-use to count this reload for. */
1089 switch (reload_when_needed[i])
1090 {
1091 case RELOAD_OTHER:
8b3e912b 1092 this_needs = &insn_needs.other;
32131a9c 1093 break;
546b63fb 1094 case RELOAD_FOR_INPUT:
8b3e912b 1095 this_needs = &insn_needs.input;
32131a9c 1096 break;
546b63fb 1097 case RELOAD_FOR_OUTPUT:
8b3e912b 1098 this_needs = &insn_needs.output;
32131a9c 1099 break;
546b63fb 1100 case RELOAD_FOR_INSN:
8b3e912b 1101 this_needs = &insn_needs.insn;
546b63fb 1102 break;
546b63fb 1103 case RELOAD_FOR_OTHER_ADDRESS:
8b3e912b 1104 this_needs = &insn_needs.other_addr;
546b63fb 1105 break;
546b63fb 1106 case RELOAD_FOR_INPUT_ADDRESS:
8b3e912b 1107 this_needs = &insn_needs.in_addr[reload_opnum[i]];
546b63fb 1108 break;
546b63fb 1109 case RELOAD_FOR_OUTPUT_ADDRESS:
8b3e912b 1110 this_needs = &insn_needs.out_addr[reload_opnum[i]];
546b63fb 1111 break;
32131a9c 1112 case RELOAD_FOR_OPERAND_ADDRESS:
8b3e912b 1113 this_needs = &insn_needs.op_addr;
32131a9c 1114 break;
893bc853
RK
1115 case RELOAD_FOR_OPADDR_ADDR:
1116 this_needs = &insn_needs.op_addr_reload;
1117 break;
32131a9c
RK
1118 }
1119
32131a9c
RK
1120 if (size > 1)
1121 {
1122 enum machine_mode other_mode, allocate_mode;
1123
1124 /* Count number of groups needed separately from
1125 number of individual regs needed. */
8b3e912b 1126 this_needs->groups[(int) class]++;
e85ddd99 1127 p = reg_class_superclasses[(int) class];
32131a9c 1128 while (*p != LIM_REG_CLASSES)
8b3e912b 1129 this_needs->groups[(int) *p++]++;
32131a9c
RK
1130
1131 /* Record size and mode of a group of this class. */
1132 /* If more than one size group is needed,
1133 make all groups the largest needed size. */
e85ddd99 1134 if (group_size[(int) class] < size)
32131a9c 1135 {
e85ddd99 1136 other_mode = group_mode[(int) class];
32131a9c
RK
1137 allocate_mode = mode;
1138
e85ddd99
RK
1139 group_size[(int) class] = size;
1140 group_mode[(int) class] = mode;
32131a9c
RK
1141 }
1142 else
1143 {
1144 other_mode = mode;
e85ddd99 1145 allocate_mode = group_mode[(int) class];
32131a9c
RK
1146 }
1147
1148 /* Crash if two dissimilar machine modes both need
1149 groups of consecutive regs of the same class. */
1150
8b3e912b 1151 if (other_mode != VOIDmode && other_mode != allocate_mode
32131a9c 1152 && ! modes_equiv_for_class_p (allocate_mode,
8b3e912b 1153 other_mode, class))
32131a9c
RK
1154 abort ();
1155 }
1156 else if (size == 1)
1157 {
8b3e912b 1158 this_needs->regs[nongroup_need][(int) class] += 1;
e85ddd99 1159 p = reg_class_superclasses[(int) class];
32131a9c 1160 while (*p != LIM_REG_CLASSES)
8b3e912b 1161 this_needs->regs[nongroup_need][(int) *p++] += 1;
32131a9c
RK
1162 }
1163 else
1164 abort ();
1165 }
1166
1167 /* All reloads have been counted for this insn;
1168 now merge the various times of use.
1169 This sets insn_needs, etc., to the maximum total number
1170 of registers needed at any point in this insn. */
1171
1172 for (i = 0; i < N_REG_CLASSES; i++)
1173 {
546b63fb
RK
1174 int in_max, out_max;
1175
8b3e912b
RK
1176 /* Compute normal and nongroup needs. */
1177 for (j = 0; j <= 1; j++)
546b63fb 1178 {
8b3e912b
RK
1179 for (in_max = 0, out_max = 0, k = 0;
1180 k < reload_n_operands; k++)
1181 {
1182 in_max
1183 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1184 out_max
1185 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1186 }
546b63fb 1187
8b3e912b
RK
1188 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1189 and operand addresses but not things used to reload
1190 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1191 don't conflict with things needed to reload inputs or
1192 outputs. */
546b63fb 1193
893bc853
RK
1194 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1195 insn_needs.op_addr_reload.regs[j][i]),
1196 in_max);
1197
8b3e912b 1198 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
546b63fb 1199
8b3e912b
RK
1200 insn_needs.input.regs[j][i]
1201 = MAX (insn_needs.input.regs[j][i]
1202 + insn_needs.op_addr.regs[j][i]
1203 + insn_needs.insn.regs[j][i],
1204 in_max + insn_needs.input.regs[j][i]);
546b63fb 1205
8b3e912b
RK
1206 insn_needs.output.regs[j][i] += out_max;
1207 insn_needs.other.regs[j][i]
1208 += MAX (MAX (insn_needs.input.regs[j][i],
1209 insn_needs.output.regs[j][i]),
1210 insn_needs.other_addr.regs[j][i]);
546b63fb 1211
ce0e109b
RK
1212 }
1213
8b3e912b 1214 /* Now compute group needs. */
546b63fb
RK
1215 for (in_max = 0, out_max = 0, j = 0;
1216 j < reload_n_operands; j++)
1217 {
8b3e912b
RK
1218 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1219 out_max
1220 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
546b63fb
RK
1221 }
1222
893bc853
RK
1223 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1224 insn_needs.op_addr_reload.groups[i]),
1225 in_max);
8b3e912b 1226 out_max = MAX (out_max, insn_needs.insn.groups[i]);
546b63fb 1227
8b3e912b
RK
1228 insn_needs.input.groups[i]
1229 = MAX (insn_needs.input.groups[i]
1230 + insn_needs.op_addr.groups[i]
1231 + insn_needs.insn.groups[i],
1232 in_max + insn_needs.input.groups[i]);
546b63fb 1233
8b3e912b
RK
1234 insn_needs.output.groups[i] += out_max;
1235 insn_needs.other.groups[i]
1236 += MAX (MAX (insn_needs.input.groups[i],
1237 insn_needs.output.groups[i]),
1238 insn_needs.other_addr.groups[i]);
546b63fb
RK
1239 }
1240
a8efe40d
RK
1241 /* If this is a CALL_INSN and caller-saves will need
1242 a spill register, act as if the spill register is
1243 needed for this insn. However, the spill register
1244 can be used by any reload of this insn, so we only
1245 need do something if no need for that class has
a8fdc208 1246 been recorded.
a8efe40d
RK
1247
1248 The assumption that every CALL_INSN will trigger a
1249 caller-save is highly conservative, however, the number
1250 of cases where caller-saves will need a spill register but
1251 a block containing a CALL_INSN won't need a spill register
1252 of that class should be quite rare.
1253
1254 If a group is needed, the size and mode of the group will
d45cf215 1255 have been set up at the beginning of this loop. */
a8efe40d
RK
1256
1257 if (GET_CODE (insn) == CALL_INSN
1258 && caller_save_spill_class != NO_REGS)
1259 {
8b3e912b
RK
1260 /* See if this register would conflict with any reload
1261 that needs a group. */
1262 int nongroup_need = 0;
1263 int *caller_save_needs;
1264
1265 for (j = 0; j < n_reloads; j++)
1266 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1267 (GET_MODE_SIZE (reload_outmode[j])
1268 > GET_MODE_SIZE (reload_inmode[j]))
1269 ? reload_outmode[j]
1270 : reload_inmode[j])
1271 > 1)
1272 && reg_classes_intersect_p (caller_save_spill_class,
1273 reload_reg_class[j]))
1274 {
1275 nongroup_need = 1;
1276 break;
1277 }
1278
1279 caller_save_needs
1280 = (caller_save_group_size > 1
1281 ? insn_needs.other.groups
1282 : insn_needs.other.regs[nongroup_need]);
a8efe40d
RK
1283
1284 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1285 {
1286 register enum reg_class *p
1287 = reg_class_superclasses[(int) caller_save_spill_class];
1288
1289 caller_save_needs[(int) caller_save_spill_class]++;
1290
1291 while (*p != LIM_REG_CLASSES)
0aaa6af8 1292 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1293 }
1294
8b3e912b 1295 /* Show that this basic block will need a register of
d1c1397e
RS
1296 this class. */
1297
8b3e912b
RK
1298 if (global
1299 && ! (basic_block_needs[(int) caller_save_spill_class]
1300 [this_block]))
1301 {
1302 basic_block_needs[(int) caller_save_spill_class]
1303 [this_block] = 1;
1304 new_basic_block_needs = 1;
1305 }
a8efe40d
RK
1306 }
1307
32131a9c
RK
1308#ifdef SMALL_REGISTER_CLASSES
1309 /* If this insn stores the value of a function call,
1310 and that value is in a register that has been spilled,
1311 and if the insn needs a reload in a class
1312 that might use that register as the reload register,
1313 then add add an extra need in that class.
1314 This makes sure we have a register available that does
1315 not overlap the return value. */
8b3e912b 1316
32131a9c
RK
1317 if (avoid_return_reg)
1318 {
1319 int regno = REGNO (avoid_return_reg);
1320 int nregs
1321 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1322 int r;
546b63fb
RK
1323 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1324
1325 /* First compute the "basic needs", which counts a
1326 need only in the smallest class in which it
1327 is required. */
1328
8b3e912b
RK
1329 bcopy (insn_needs.other.regs[0], basic_needs,
1330 sizeof basic_needs);
1331 bcopy (insn_needs.other.groups, basic_groups,
1332 sizeof basic_groups);
546b63fb
RK
1333
1334 for (i = 0; i < N_REG_CLASSES; i++)
1335 {
1336 enum reg_class *p;
1337
1338 if (basic_needs[i] >= 0)
1339 for (p = reg_class_superclasses[i];
1340 *p != LIM_REG_CLASSES; p++)
1341 basic_needs[(int) *p] -= basic_needs[i];
1342
1343 if (basic_groups[i] >= 0)
1344 for (p = reg_class_superclasses[i];
1345 *p != LIM_REG_CLASSES; p++)
1346 basic_groups[(int) *p] -= basic_groups[i];
1347 }
1348
1349 /* Now count extra regs if there might be a conflict with
1350 the return value register.
1351
1352 ??? This is not quite correct because we don't properly
1353 handle the case of groups, but if we end up doing
1354 something wrong, it either will end up not mattering or
1355 we will abort elsewhere. */
1356
32131a9c
RK
1357 for (r = regno; r < regno + nregs; r++)
1358 if (spill_reg_order[r] >= 0)
1359 for (i = 0; i < N_REG_CLASSES; i++)
1360 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1361 {
546b63fb
RK
1362 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1363 {
1364 enum reg_class *p;
1365
8b3e912b 1366 insn_needs.other.regs[0][i]++;
546b63fb
RK
1367 p = reg_class_superclasses[i];
1368 while (*p != LIM_REG_CLASSES)
8b3e912b 1369 insn_needs.other.regs[0][(int) *p++]++;
546b63fb 1370 }
32131a9c 1371 }
32131a9c
RK
1372 }
1373#endif /* SMALL_REGISTER_CLASSES */
1374
1375 /* For each class, collect maximum need of any insn. */
1376
1377 for (i = 0; i < N_REG_CLASSES; i++)
1378 {
8b3e912b 1379 if (max_needs[i] < insn_needs.other.regs[0][i])
5352b11a 1380 {
8b3e912b 1381 max_needs[i] = insn_needs.other.regs[0][i];
5352b11a
RS
1382 max_needs_insn[i] = insn;
1383 }
8b3e912b 1384 if (max_groups[i] < insn_needs.other.groups[i])
5352b11a 1385 {
8b3e912b 1386 max_groups[i] = insn_needs.other.groups[i];
5352b11a
RS
1387 max_groups_insn[i] = insn;
1388 }
8b3e912b 1389 if (max_nongroups[i] < insn_needs.other.regs[1][i])
ce0e109b 1390 {
8b3e912b 1391 max_nongroups[i] = insn_needs.other.regs[1][i];
ce0e109b
RK
1392 max_nongroups_insn[i] = insn;
1393 }
32131a9c
RK
1394 }
1395 }
1396 /* Note that there is a continue statement above. */
1397 }
1398
0dadecf6
RK
1399 /* If we allocated any new memory locations, make another pass
1400 since it might have changed elimination offsets. */
1401 if (starting_frame_size != get_frame_size ())
1402 something_changed = 1;
1403
e404a39a
RK
1404 if (dumpfile)
1405 for (i = 0; i < N_REG_CLASSES; i++)
1406 {
1407 if (max_needs[i] > 0)
1408 fprintf (dumpfile,
1409 ";; Need %d reg%s of class %s (for insn %d).\n",
1410 max_needs[i], max_needs[i] == 1 ? "" : "s",
1411 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1412 if (max_nongroups[i] > 0)
1413 fprintf (dumpfile,
1414 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1415 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1416 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1417 if (max_groups[i] > 0)
1418 fprintf (dumpfile,
1419 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1420 max_groups[i], max_groups[i] == 1 ? "" : "s",
1421 mode_name[(int) group_mode[i]],
1422 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1423 }
1424
d445b551 1425 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1426 will need a spill register. */
32131a9c 1427
d445b551 1428 if (caller_save_needed
a8efe40d
RK
1429 && ! setup_save_areas (&something_changed)
1430 && caller_save_spill_class == NO_REGS)
32131a9c 1431 {
a8efe40d
RK
1432 /* The class we will need depends on whether the machine
1433 supports the sum of two registers for an address; see
1434 find_address_reloads for details. */
1435
a8fdc208 1436 caller_save_spill_class
a8efe40d
RK
1437 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1438 caller_save_group_size
1439 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1440 something_changed = 1;
32131a9c
RK
1441 }
1442
5c23c401
RK
1443 /* See if anything that happened changes which eliminations are valid.
1444 For example, on the Sparc, whether or not the frame pointer can
1445 be eliminated can depend on what registers have been used. We need
1446 not check some conditions again (such as flag_omit_frame_pointer)
1447 since they can't have changed. */
1448
1449 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3ec2ea3e 1450 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5c23c401
RK
1451#ifdef ELIMINABLE_REGS
1452 || ! CAN_ELIMINATE (ep->from, ep->to)
1453#endif
1454 )
1455 ep->can_eliminate = 0;
1456
32131a9c
RK
1457 /* Look for the case where we have discovered that we can't replace
1458 register A with register B and that means that we will now be
1459 trying to replace register A with register C. This means we can
1460 no longer replace register C with register B and we need to disable
1461 such an elimination, if it exists. This occurs often with A == ap,
1462 B == sp, and C == fp. */
a8fdc208 1463
32131a9c
RK
1464 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1465 {
1466 struct elim_table *op;
1467 register int new_to = -1;
1468
1469 if (! ep->can_eliminate && ep->can_eliminate_previous)
1470 {
1471 /* Find the current elimination for ep->from, if there is a
1472 new one. */
1473 for (op = reg_eliminate;
1474 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1475 if (op->from == ep->from && op->can_eliminate)
1476 {
1477 new_to = op->to;
1478 break;
1479 }
1480
1481 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1482 disable it. */
1483 for (op = reg_eliminate;
1484 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1485 if (op->from == new_to && op->to == ep->to)
1486 op->can_eliminate = 0;
1487 }
1488 }
1489
1490 /* See if any registers that we thought we could eliminate the previous
1491 time are no longer eliminable. If so, something has changed and we
1492 must spill the register. Also, recompute the number of eliminable
1493 registers and see if the frame pointer is needed; it is if there is
1494 no elimination of the frame pointer that we can perform. */
1495
1496 frame_pointer_needed = 1;
1497 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1498 {
3ec2ea3e
DE
1499 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1500 && ep->to != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
1501 frame_pointer_needed = 0;
1502
1503 if (! ep->can_eliminate && ep->can_eliminate_previous)
1504 {
1505 ep->can_eliminate_previous = 0;
1506 spill_hard_reg (ep->from, global, dumpfile, 1);
1507 regs_ever_live[ep->from] = 1;
1508 something_changed = 1;
1509 num_eliminable--;
1510 }
1511 }
1512
1513 /* If all needs are met, we win. */
1514
1515 for (i = 0; i < N_REG_CLASSES; i++)
1516 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1517 break;
1518 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1519 break;
1520
546b63fb
RK
1521 /* Not all needs are met; must spill some hard regs. */
1522
1523 /* Put all registers spilled so far back in potential_reload_regs, but
1524 put them at the front, since we've already spilled most of the
1525 psuedos in them (we might have left some pseudos unspilled if they
1526 were in a block that didn't need any spill registers of a conflicting
1527 class. We used to try to mark off the need for those registers,
1528 but doing so properly is very complex and reallocating them is the
1529 simpler approach. First, "pack" potential_reload_regs by pushing
1530 any nonnegative entries towards the end. That will leave room
1531 for the registers we already spilled.
1532
1533 Also, undo the marking of the spill registers from the last time
1534 around in FORBIDDEN_REGS since we will be probably be allocating
1535 them again below.
1536
1537 ??? It is theoretically possible that we might end up not using one
1538 of our previously-spilled registers in this allocation, even though
1539 they are at the head of the list. It's not clear what to do about
1540 this, but it was no better before, when we marked off the needs met
1541 by the previously-spilled registers. With the current code, globals
1542 can be allocated into these registers, but locals cannot. */
1543
1544 if (n_spills)
1545 {
1546 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1547 if (potential_reload_regs[i] != -1)
1548 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1549
546b63fb
RK
1550 for (i = 0; i < n_spills; i++)
1551 {
1552 potential_reload_regs[i] = spill_regs[i];
1553 spill_reg_order[spill_regs[i]] = -1;
1554 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1555 }
32131a9c 1556
546b63fb
RK
1557 n_spills = 0;
1558 }
32131a9c
RK
1559
1560 /* Now find more reload regs to satisfy the remaining need
1561 Do it by ascending class number, since otherwise a reg
1562 might be spilled for a big class and might fail to count
1563 for a smaller class even though it belongs to that class.
1564
1565 Count spilled regs in `spills', and add entries to
1566 `spill_regs' and `spill_reg_order'.
1567
1568 ??? Note there is a problem here.
1569 When there is a need for a group in a high-numbered class,
1570 and also need for non-group regs that come from a lower class,
1571 the non-group regs are chosen first. If there aren't many regs,
1572 they might leave no room for a group.
1573
1574 This was happening on the 386. To fix it, we added the code
1575 that calls possible_group_p, so that the lower class won't
1576 break up the last possible group.
1577
1578 Really fixing the problem would require changes above
1579 in counting the regs already spilled, and in choose_reload_regs.
1580 It might be hard to avoid introducing bugs there. */
1581
546b63fb
RK
1582 CLEAR_HARD_REG_SET (counted_for_groups);
1583 CLEAR_HARD_REG_SET (counted_for_nongroups);
1584
32131a9c
RK
1585 for (class = 0; class < N_REG_CLASSES; class++)
1586 {
1587 /* First get the groups of registers.
1588 If we got single registers first, we might fragment
1589 possible groups. */
1590 while (max_groups[class] > 0)
1591 {
1592 /* If any single spilled regs happen to form groups,
1593 count them now. Maybe we don't really need
1594 to spill another group. */
1595 count_possible_groups (group_size, group_mode, max_groups);
1596
93193ab5
RK
1597 if (max_groups[class] <= 0)
1598 break;
1599
32131a9c
RK
1600 /* Groups of size 2 (the only groups used on most machines)
1601 are treated specially. */
1602 if (group_size[class] == 2)
1603 {
1604 /* First, look for a register that will complete a group. */
1605 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1606 {
32131a9c 1607 int other;
546b63fb
RK
1608
1609 j = potential_reload_regs[i];
32131a9c
RK
1610 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1611 &&
1612 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1613 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1614 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1615 && HARD_REGNO_MODE_OK (other, group_mode[class])
1616 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1617 other)
1618 /* We don't want one part of another group.
1619 We could get "two groups" that overlap! */
1620 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1621 ||
1622 (j < FIRST_PSEUDO_REGISTER - 1
1623 && (other = j + 1, spill_reg_order[other] >= 0)
1624 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1625 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1626 && HARD_REGNO_MODE_OK (j, group_mode[class])
1627 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1628 other)
1629 && ! TEST_HARD_REG_BIT (counted_for_groups,
1630 other))))
1631 {
1632 register enum reg_class *p;
1633
1634 /* We have found one that will complete a group,
1635 so count off one group as provided. */
1636 max_groups[class]--;
1637 p = reg_class_superclasses[class];
1638 while (*p != LIM_REG_CLASSES)
1639 max_groups[(int) *p++]--;
1640
1641 /* Indicate both these regs are part of a group. */
1642 SET_HARD_REG_BIT (counted_for_groups, j);
1643 SET_HARD_REG_BIT (counted_for_groups, other);
1644 break;
1645 }
1646 }
1647 /* We can't complete a group, so start one. */
92b0556d
RS
1648#ifdef SMALL_REGISTER_CLASSES
1649 /* Look for a pair neither of which is explicitly used. */
1650 if (i == FIRST_PSEUDO_REGISTER)
1651 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1652 {
1653 int k;
1654 j = potential_reload_regs[i];
1655 /* Verify that J+1 is a potential reload reg. */
1656 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1657 if (potential_reload_regs[k] == j + 1)
1658 break;
1659 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1660 && k < FIRST_PSEUDO_REGISTER
1661 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1662 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1663 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1664 && HARD_REGNO_MODE_OK (j, group_mode[class])
1665 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1666 j + 1)
1667 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1668 /* Reject J at this stage
1669 if J+1 was explicitly used. */
1670 && ! regs_explicitly_used[j + 1])
1671 break;
1672 }
1673#endif
1674 /* Now try any group at all
1675 whose registers are not in bad_spill_regs. */
32131a9c
RK
1676 if (i == FIRST_PSEUDO_REGISTER)
1677 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1678 {
57697575 1679 int k;
546b63fb 1680 j = potential_reload_regs[i];
57697575
RS
1681 /* Verify that J+1 is a potential reload reg. */
1682 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1683 if (potential_reload_regs[k] == j + 1)
1684 break;
32131a9c 1685 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1686 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1687 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1688 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1689 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1690 && HARD_REGNO_MODE_OK (j, group_mode[class])
1691 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1692 j + 1)
1693 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1694 break;
1695 }
1696
1697 /* I should be the index in potential_reload_regs
1698 of the new reload reg we have found. */
1699
5352b11a
RS
1700 if (i >= FIRST_PSEUDO_REGISTER)
1701 {
1702 /* There are no groups left to spill. */
1703 spill_failure (max_groups_insn[class]);
1704 failure = 1;
1705 goto failed;
1706 }
1707 else
1708 something_changed
fb3821f7 1709 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1710 global, dumpfile);
32131a9c
RK
1711 }
1712 else
1713 {
1714 /* For groups of more than 2 registers,
1715 look for a sufficient sequence of unspilled registers,
1716 and spill them all at once. */
1717 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1718 {
32131a9c 1719 int k;
546b63fb
RK
1720
1721 j = potential_reload_regs[i];
9d1a4667
RS
1722 if (j >= 0
1723 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1724 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1725 {
1726 /* Check each reg in the sequence. */
1727 for (k = 0; k < group_size[class]; k++)
1728 if (! (spill_reg_order[j + k] < 0
1729 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1730 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1731 break;
1732 /* We got a full sequence, so spill them all. */
1733 if (k == group_size[class])
1734 {
1735 register enum reg_class *p;
1736 for (k = 0; k < group_size[class]; k++)
1737 {
1738 int idx;
1739 SET_HARD_REG_BIT (counted_for_groups, j + k);
1740 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1741 if (potential_reload_regs[idx] == j + k)
1742 break;
9d1a4667
RS
1743 something_changed
1744 |= new_spill_reg (idx, class,
1745 max_needs, NULL_PTR,
1746 global, dumpfile);
32131a9c
RK
1747 }
1748
1749 /* We have found one that will complete a group,
1750 so count off one group as provided. */
1751 max_groups[class]--;
1752 p = reg_class_superclasses[class];
1753 while (*p != LIM_REG_CLASSES)
1754 max_groups[(int) *p++]--;
1755
1756 break;
1757 }
1758 }
1759 }
fa52261e 1760 /* We couldn't find any registers for this reload.
9d1a4667
RS
1761 Avoid going into an infinite loop. */
1762 if (i >= FIRST_PSEUDO_REGISTER)
1763 {
1764 /* There are no groups left. */
1765 spill_failure (max_groups_insn[class]);
1766 failure = 1;
1767 goto failed;
1768 }
32131a9c
RK
1769 }
1770 }
1771
1772 /* Now similarly satisfy all need for single registers. */
1773
1774 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1775 {
9a6cde3a
RS
1776#ifdef SMALL_REGISTER_CLASSES
1777 /* This should be right for all machines, but only the 386
1778 is known to need it, so this conditional plays safe.
1779 ??? For 2.5, try making this unconditional. */
1780 /* If we spilled enough regs, but they weren't counted
1781 against the non-group need, see if we can count them now.
1782 If so, we can avoid some actual spilling. */
1783 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1784 for (i = 0; i < n_spills; i++)
1785 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1786 spill_regs[i])
1787 && !TEST_HARD_REG_BIT (counted_for_groups,
1788 spill_regs[i])
1789 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1790 spill_regs[i])
1791 && max_nongroups[class] > 0)
1792 {
1793 register enum reg_class *p;
1794
1795 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1796 max_nongroups[class]--;
1797 p = reg_class_superclasses[class];
1798 while (*p != LIM_REG_CLASSES)
1799 max_nongroups[(int) *p++]--;
1800 }
1801 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1802 break;
1803#endif
1804
32131a9c
RK
1805 /* Consider the potential reload regs that aren't
1806 yet in use as reload regs, in order of preference.
1807 Find the most preferred one that's in this class. */
1808
1809 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1810 if (potential_reload_regs[i] >= 0
1811 && TEST_HARD_REG_BIT (reg_class_contents[class],
1812 potential_reload_regs[i])
1813 /* If this reg will not be available for groups,
1814 pick one that does not foreclose possible groups.
1815 This is a kludge, and not very general,
1816 but it should be sufficient to make the 386 work,
1817 and the problem should not occur on machines with
1818 more registers. */
1819 && (max_nongroups[class] == 0
1820 || possible_group_p (potential_reload_regs[i], max_groups)))
1821 break;
1822
e404a39a
RK
1823 /* If we couldn't get a register, try to get one even if we
1824 might foreclose possible groups. This may cause problems
1825 later, but that's better than aborting now, since it is
1826 possible that we will, in fact, be able to form the needed
1827 group even with this allocation. */
1828
1829 if (i >= FIRST_PSEUDO_REGISTER
1830 && (asm_noperands (max_needs[class] > 0
1831 ? max_needs_insn[class]
1832 : max_nongroups_insn[class])
1833 < 0))
1834 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1835 if (potential_reload_regs[i] >= 0
1836 && TEST_HARD_REG_BIT (reg_class_contents[class],
1837 potential_reload_regs[i]))
1838 break;
1839
32131a9c
RK
1840 /* I should be the index in potential_reload_regs
1841 of the new reload reg we have found. */
1842
5352b11a
RS
1843 if (i >= FIRST_PSEUDO_REGISTER)
1844 {
1845 /* There are no possible registers left to spill. */
1846 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1847 : max_nongroups_insn[class]);
1848 failure = 1;
1849 goto failed;
1850 }
1851 else
1852 something_changed
1853 |= new_spill_reg (i, class, max_needs, max_nongroups,
1854 global, dumpfile);
32131a9c
RK
1855 }
1856 }
1857 }
1858
1859 /* If global-alloc was run, notify it of any register eliminations we have
1860 done. */
1861 if (global)
1862 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1863 if (ep->can_eliminate)
1864 mark_elimination (ep->from, ep->to);
1865
32131a9c 1866 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1867 around calls. Tell if what mode to use so that we will process
1868 those insns in reload_as_needed if we have to. */
32131a9c
RK
1869
1870 if (caller_save_needed)
a8efe40d
RK
1871 save_call_clobbered_regs (num_eliminable ? QImode
1872 : caller_save_spill_class != NO_REGS ? HImode
1873 : VOIDmode);
32131a9c
RK
1874
1875 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1876 If that insn didn't set the register (i.e., it copied the register to
1877 memory), just delete that insn instead of the equivalencing insn plus
1878 anything now dead. If we call delete_dead_insn on that insn, we may
1879 delete the insn that actually sets the register if the register die
1880 there and that is incorrect. */
1881
1882 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1883 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1884 && GET_CODE (reg_equiv_init[i]) != NOTE)
1885 {
1886 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1887 delete_dead_insn (reg_equiv_init[i]);
1888 else
1889 {
1890 PUT_CODE (reg_equiv_init[i], NOTE);
1891 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1892 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1893 }
1894 }
1895
1896 /* Use the reload registers where necessary
1897 by generating move instructions to move the must-be-register
1898 values into or out of the reload registers. */
1899
a8efe40d
RK
1900 if (something_needs_reloads || something_needs_elimination
1901 || (caller_save_needed && num_eliminable)
1902 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1903 reload_as_needed (first, global);
1904
2a1f8b6b 1905 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1906 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1907 virtue of being in a pseudo, that pseudo will be marked live
1908 and hence the frame pointer will be known to be live via that
1909 pseudo. */
1910
1911 if (! frame_pointer_needed)
1912 for (i = 0; i < n_basic_blocks; i++)
3ec2ea3e
DE
1913 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1914 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1915 % REGSET_ELT_BITS));
2a1f8b6b 1916
5352b11a
RS
1917 /* Come here (with failure set nonzero) if we can't get enough spill regs
1918 and we decide not to abort about it. */
1919 failed:
1920
a3ec87a8
RS
1921 reload_in_progress = 0;
1922
32131a9c
RK
1923 /* Now eliminate all pseudo regs by modifying them into
1924 their equivalent memory references.
1925 The REG-rtx's for the pseudos are modified in place,
1926 so all insns that used to refer to them now refer to memory.
1927
1928 For a reg that has a reg_equiv_address, all those insns
1929 were changed by reloading so that no insns refer to it any longer;
1930 but the DECL_RTL of a variable decl may refer to it,
1931 and if so this causes the debugging info to mention the variable. */
1932
1933 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1934 {
1935 rtx addr = 0;
ab1fd483 1936 int in_struct = 0;
32131a9c 1937 if (reg_equiv_mem[i])
ab1fd483
RS
1938 {
1939 addr = XEXP (reg_equiv_mem[i], 0);
1940 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1941 }
32131a9c
RK
1942 if (reg_equiv_address[i])
1943 addr = reg_equiv_address[i];
1944 if (addr)
1945 {
1946 if (reg_renumber[i] < 0)
1947 {
1948 rtx reg = regno_reg_rtx[i];
1949 XEXP (reg, 0) = addr;
1950 REG_USERVAR_P (reg) = 0;
ab1fd483 1951 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1952 PUT_CODE (reg, MEM);
1953 }
1954 else if (reg_equiv_mem[i])
1955 XEXP (reg_equiv_mem[i], 0) = addr;
1956 }
1957 }
1958
1959#ifdef PRESERVE_DEATH_INFO_REGNO_P
1960 /* Make a pass over all the insns and remove death notes for things that
1961 are no longer registers or no longer die in the insn (e.g., an input
1962 and output pseudo being tied). */
1963
1964 for (insn = first; insn; insn = NEXT_INSN (insn))
1965 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1966 {
1967 rtx note, next;
1968
1969 for (note = REG_NOTES (insn); note; note = next)
1970 {
1971 next = XEXP (note, 1);
1972 if (REG_NOTE_KIND (note) == REG_DEAD
1973 && (GET_CODE (XEXP (note, 0)) != REG
1974 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1975 remove_note (insn, note);
1976 }
1977 }
1978#endif
1979
1980 /* Indicate that we no longer have known memory locations or constants. */
1981 reg_equiv_constant = 0;
1982 reg_equiv_memory_loc = 0;
5352b11a 1983
c8ab4464
RS
1984 if (scratch_list)
1985 free (scratch_list);
c307c237 1986 scratch_list = 0;
c8ab4464
RS
1987 if (scratch_block)
1988 free (scratch_block);
c307c237
RK
1989 scratch_block = 0;
1990
5352b11a 1991 return failure;
32131a9c
RK
1992}
1993\f
1994/* Nonzero if, after spilling reg REGNO for non-groups,
1995 it will still be possible to find a group if we still need one. */
1996
1997static int
1998possible_group_p (regno, max_groups)
1999 int regno;
2000 int *max_groups;
2001{
2002 int i;
2003 int class = (int) NO_REGS;
2004
2005 for (i = 0; i < (int) N_REG_CLASSES; i++)
2006 if (max_groups[i] > 0)
2007 {
2008 class = i;
2009 break;
2010 }
2011
2012 if (class == (int) NO_REGS)
2013 return 1;
2014
2015 /* Consider each pair of consecutive registers. */
2016 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2017 {
2018 /* Ignore pairs that include reg REGNO. */
2019 if (i == regno || i + 1 == regno)
2020 continue;
2021
2022 /* Ignore pairs that are outside the class that needs the group.
2023 ??? Here we fail to handle the case where two different classes
2024 independently need groups. But this never happens with our
2025 current machine descriptions. */
2026 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2027 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2028 continue;
2029
2030 /* A pair of consecutive regs we can still spill does the trick. */
2031 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2032 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2033 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2034 return 1;
2035
2036 /* A pair of one already spilled and one we can spill does it
2037 provided the one already spilled is not otherwise reserved. */
2038 if (spill_reg_order[i] < 0
2039 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2040 && spill_reg_order[i + 1] >= 0
2041 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2042 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2043 return 1;
2044 if (spill_reg_order[i + 1] < 0
2045 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2046 && spill_reg_order[i] >= 0
2047 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2048 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2049 return 1;
2050 }
2051
2052 return 0;
2053}
2054\f
2055/* Count any groups that can be formed from the registers recently spilled.
2056 This is done class by class, in order of ascending class number. */
2057
2058static void
2059count_possible_groups (group_size, group_mode, max_groups)
546b63fb 2060 int *group_size;
32131a9c 2061 enum machine_mode *group_mode;
546b63fb 2062 int *max_groups;
32131a9c
RK
2063{
2064 int i;
2065 /* Now find all consecutive groups of spilled registers
2066 and mark each group off against the need for such groups.
2067 But don't count them against ordinary need, yet. */
2068
2069 for (i = 0; i < N_REG_CLASSES; i++)
2070 if (group_size[i] > 1)
2071 {
93193ab5 2072 HARD_REG_SET new;
32131a9c
RK
2073 int j;
2074
93193ab5
RK
2075 CLEAR_HARD_REG_SET (new);
2076
32131a9c
RK
2077 /* Make a mask of all the regs that are spill regs in class I. */
2078 for (j = 0; j < n_spills; j++)
2079 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2080 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2081 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2082 spill_regs[j]))
93193ab5
RK
2083 SET_HARD_REG_BIT (new, spill_regs[j]);
2084
32131a9c
RK
2085 /* Find each consecutive group of them. */
2086 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
93193ab5
RK
2087 if (TEST_HARD_REG_BIT (new, j)
2088 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
2089 /* Next line in case group-mode for this class
2090 demands an even-odd pair. */
2091 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2092 {
2093 int k;
2094 for (k = 1; k < group_size[i]; k++)
93193ab5 2095 if (! TEST_HARD_REG_BIT (new, j + k))
32131a9c
RK
2096 break;
2097 if (k == group_size[i])
2098 {
2099 /* We found a group. Mark it off against this class's
2100 need for groups, and against each superclass too. */
2101 register enum reg_class *p;
2102 max_groups[i]--;
2103 p = reg_class_superclasses[i];
2104 while (*p != LIM_REG_CLASSES)
2105 max_groups[(int) *p++]--;
a8fdc208 2106 /* Don't count these registers again. */
32131a9c
RK
2107 for (k = 0; k < group_size[i]; k++)
2108 SET_HARD_REG_BIT (counted_for_groups, j + k);
2109 }
fa52261e
RS
2110 /* Skip to the last reg in this group. When j is incremented
2111 above, it will then point to the first reg of the next
2112 possible group. */
2113 j += k - 1;
32131a9c
RK
2114 }
2115 }
2116
2117}
2118\f
2119/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2120 another mode that needs to be reloaded for the same register class CLASS.
2121 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2122 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2123
2124 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2125 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2126 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2127 causes unnecessary failures on machines requiring alignment of register
2128 groups when the two modes are different sizes, because the larger mode has
2129 more strict alignment rules than the smaller mode. */
2130
2131static int
2132modes_equiv_for_class_p (allocate_mode, other_mode, class)
2133 enum machine_mode allocate_mode, other_mode;
2134 enum reg_class class;
2135{
2136 register int regno;
2137 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2138 {
2139 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2140 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2141 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2142 return 0;
2143 }
2144 return 1;
2145}
2146
5352b11a
RS
2147/* Handle the failure to find a register to spill.
2148 INSN should be one of the insns which needed this particular spill reg. */
2149
2150static void
2151spill_failure (insn)
2152 rtx insn;
2153{
2154 if (asm_noperands (PATTERN (insn)) >= 0)
2155 error_for_asm (insn, "`asm' needs too many reloads");
2156 else
2157 abort ();
2158}
2159
32131a9c
RK
2160/* Add a new register to the tables of available spill-registers
2161 (as well as spilling all pseudos allocated to the register).
2162 I is the index of this register in potential_reload_regs.
2163 CLASS is the regclass whose need is being satisfied.
2164 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2165 so that this register can count off against them.
2166 MAX_NONGROUPS is 0 if this register is part of a group.
2167 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2168
2169static int
2170new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2171 int i;
2172 int class;
2173 int *max_needs;
2174 int *max_nongroups;
2175 int global;
2176 FILE *dumpfile;
2177{
2178 register enum reg_class *p;
2179 int val;
2180 int regno = potential_reload_regs[i];
2181
2182 if (i >= FIRST_PSEUDO_REGISTER)
2183 abort (); /* Caller failed to find any register. */
2184
2185 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2186 fatal ("fixed or forbidden register was spilled.\n\
56f58d3a
RK
2187This may be due to a compiler bug or to impossible asm\n\
2188statements or clauses.");
32131a9c
RK
2189
2190 /* Make reg REGNO an additional reload reg. */
2191
2192 potential_reload_regs[i] = -1;
2193 spill_regs[n_spills] = regno;
2194 spill_reg_order[regno] = n_spills;
2195 if (dumpfile)
2196 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2197
2198 /* Clear off the needs we just satisfied. */
2199
2200 max_needs[class]--;
2201 p = reg_class_superclasses[class];
2202 while (*p != LIM_REG_CLASSES)
2203 max_needs[(int) *p++]--;
2204
2205 if (max_nongroups && max_nongroups[class] > 0)
2206 {
2207 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2208 max_nongroups[class]--;
2209 p = reg_class_superclasses[class];
2210 while (*p != LIM_REG_CLASSES)
2211 max_nongroups[(int) *p++]--;
2212 }
2213
2214 /* Spill every pseudo reg that was allocated to this reg
2215 or to something that overlaps this reg. */
2216
2217 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2218
2219 /* If there are some registers still to eliminate and this register
2220 wasn't ever used before, additional stack space may have to be
2221 allocated to store this register. Thus, we may have changed the offset
2222 between the stack and frame pointers, so mark that something has changed.
2223 (If new pseudos were spilled, thus requiring more space, VAL would have
2224 been set non-zero by the call to spill_hard_reg above since additional
2225 reloads may be needed in that case.
2226
2227 One might think that we need only set VAL to 1 if this is a call-used
2228 register. However, the set of registers that must be saved by the
2229 prologue is not identical to the call-used set. For example, the
2230 register used by the call insn for the return PC is a call-used register,
2231 but must be saved by the prologue. */
2232 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2233 val = 1;
2234
2235 regs_ever_live[spill_regs[n_spills]] = 1;
2236 n_spills++;
2237
2238 return val;
2239}
2240\f
2241/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2242 data that is dead in INSN. */
2243
2244static void
2245delete_dead_insn (insn)
2246 rtx insn;
2247{
2248 rtx prev = prev_real_insn (insn);
2249 rtx prev_dest;
2250
2251 /* If the previous insn sets a register that dies in our insn, delete it
2252 too. */
2253 if (prev && GET_CODE (PATTERN (prev)) == SET
2254 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2255 && reg_mentioned_p (prev_dest, PATTERN (insn))
2256 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2257 delete_dead_insn (prev);
2258
2259 PUT_CODE (insn, NOTE);
2260 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2261 NOTE_SOURCE_FILE (insn) = 0;
2262}
2263
2264/* Modify the home of pseudo-reg I.
2265 The new home is present in reg_renumber[I].
2266
2267 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2268 or it may be -1, meaning there is none or it is not relevant.
2269 This is used so that all pseudos spilled from a given hard reg
2270 can share one stack slot. */
2271
2272static void
2273alter_reg (i, from_reg)
2274 register int i;
2275 int from_reg;
2276{
2277 /* When outputting an inline function, this can happen
2278 for a reg that isn't actually used. */
2279 if (regno_reg_rtx[i] == 0)
2280 return;
2281
2282 /* If the reg got changed to a MEM at rtl-generation time,
2283 ignore it. */
2284 if (GET_CODE (regno_reg_rtx[i]) != REG)
2285 return;
2286
2287 /* Modify the reg-rtx to contain the new hard reg
2288 number or else to contain its pseudo reg number. */
2289 REGNO (regno_reg_rtx[i])
2290 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2291
2292 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2293 allocate a stack slot for it. */
2294
2295 if (reg_renumber[i] < 0
2296 && reg_n_refs[i] > 0
2297 && reg_equiv_constant[i] == 0
2298 && reg_equiv_memory_loc[i] == 0)
2299 {
2300 register rtx x;
2301 int inherent_size = PSEUDO_REGNO_BYTES (i);
2302 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2303 int adjust = 0;
2304
2305 /* Each pseudo reg has an inherent size which comes from its own mode,
2306 and a total size which provides room for paradoxical subregs
2307 which refer to the pseudo reg in wider modes.
2308
2309 We can use a slot already allocated if it provides both
2310 enough inherent space and enough total space.
2311 Otherwise, we allocate a new slot, making sure that it has no less
2312 inherent space, and no less total space, then the previous slot. */
2313 if (from_reg == -1)
2314 {
2315 /* No known place to spill from => no slot to reuse. */
2316 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2317#if BYTES_BIG_ENDIAN
2318 /* Cancel the big-endian correction done in assign_stack_local.
2319 Get the address of the beginning of the slot.
2320 This is so we can do a big-endian correction unconditionally
2321 below. */
2322 adjust = inherent_size - total_size;
2323#endif
2324 }
2325 /* Reuse a stack slot if possible. */
2326 else if (spill_stack_slot[from_reg] != 0
2327 && spill_stack_slot_width[from_reg] >= total_size
2328 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2329 >= inherent_size))
2330 x = spill_stack_slot[from_reg];
2331 /* Allocate a bigger slot. */
2332 else
2333 {
2334 /* Compute maximum size needed, both for inherent size
2335 and for total size. */
2336 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2337 if (spill_stack_slot[from_reg])
2338 {
2339 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2340 > inherent_size)
2341 mode = GET_MODE (spill_stack_slot[from_reg]);
2342 if (spill_stack_slot_width[from_reg] > total_size)
2343 total_size = spill_stack_slot_width[from_reg];
2344 }
2345 /* Make a slot with that size. */
2346 x = assign_stack_local (mode, total_size, -1);
2347#if BYTES_BIG_ENDIAN
2348 /* Cancel the big-endian correction done in assign_stack_local.
2349 Get the address of the beginning of the slot.
2350 This is so we can do a big-endian correction unconditionally
2351 below. */
2352 adjust = GET_MODE_SIZE (mode) - total_size;
2353#endif
2354 spill_stack_slot[from_reg] = x;
2355 spill_stack_slot_width[from_reg] = total_size;
2356 }
2357
2358#if BYTES_BIG_ENDIAN
2359 /* On a big endian machine, the "address" of the slot
2360 is the address of the low part that fits its inherent mode. */
2361 if (inherent_size < total_size)
2362 adjust += (total_size - inherent_size);
2363#endif /* BYTES_BIG_ENDIAN */
2364
2365 /* If we have any adjustment to make, or if the stack slot is the
2366 wrong mode, make a new stack slot. */
2367 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2368 {
2369 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2370 plus_constant (XEXP (x, 0), adjust));
2371 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2372 }
2373
2374 /* Save the stack slot for later. */
2375 reg_equiv_memory_loc[i] = x;
2376 }
2377}
2378
2379/* Mark the slots in regs_ever_live for the hard regs
2380 used by pseudo-reg number REGNO. */
2381
2382void
2383mark_home_live (regno)
2384 int regno;
2385{
2386 register int i, lim;
2387 i = reg_renumber[regno];
2388 if (i < 0)
2389 return;
2390 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2391 while (i < lim)
2392 regs_ever_live[i++] = 1;
2393}
c307c237
RK
2394
2395/* Mark the registers used in SCRATCH as being live. */
2396
2397static void
2398mark_scratch_live (scratch)
2399 rtx scratch;
2400{
2401 register int i;
2402 int regno = REGNO (scratch);
2403 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2404
2405 for (i = regno; i < lim; i++)
2406 regs_ever_live[i] = 1;
2407}
32131a9c
RK
2408\f
2409/* This function handles the tracking of elimination offsets around branches.
2410
2411 X is a piece of RTL being scanned.
2412
2413 INSN is the insn that it came from, if any.
2414
2415 INITIAL_P is non-zero if we are to set the offset to be the initial
2416 offset and zero if we are setting the offset of the label to be the
2417 current offset. */
2418
2419static void
2420set_label_offsets (x, insn, initial_p)
2421 rtx x;
2422 rtx insn;
2423 int initial_p;
2424{
2425 enum rtx_code code = GET_CODE (x);
2426 rtx tem;
2427 int i;
2428 struct elim_table *p;
2429
2430 switch (code)
2431 {
2432 case LABEL_REF:
8be386d9
RS
2433 if (LABEL_REF_NONLOCAL_P (x))
2434 return;
2435
32131a9c
RK
2436 x = XEXP (x, 0);
2437
2438 /* ... fall through ... */
2439
2440 case CODE_LABEL:
2441 /* If we know nothing about this label, set the desired offsets. Note
2442 that this sets the offset at a label to be the offset before a label
2443 if we don't know anything about the label. This is not correct for
2444 the label after a BARRIER, but is the best guess we can make. If
2445 we guessed wrong, we will suppress an elimination that might have
2446 been possible had we been able to guess correctly. */
2447
2448 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2449 {
2450 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2451 offsets_at[CODE_LABEL_NUMBER (x)][i]
2452 = (initial_p ? reg_eliminate[i].initial_offset
2453 : reg_eliminate[i].offset);
2454 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2455 }
2456
2457 /* Otherwise, if this is the definition of a label and it is
d45cf215 2458 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2459 that label. */
2460
2461 else if (x == insn
2462 && (tem = prev_nonnote_insn (insn)) != 0
2463 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2464 {
2465 num_not_at_initial_offset = 0;
2466 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2467 {
2468 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2469 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2470 if (reg_eliminate[i].can_eliminate
2471 && (reg_eliminate[i].offset
2472 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2473 num_not_at_initial_offset++;
2474 }
2475 }
32131a9c
RK
2476
2477 else
2478 /* If neither of the above cases is true, compare each offset
2479 with those previously recorded and suppress any eliminations
2480 where the offsets disagree. */
a8fdc208 2481
32131a9c
RK
2482 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2483 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2484 != (initial_p ? reg_eliminate[i].initial_offset
2485 : reg_eliminate[i].offset))
2486 reg_eliminate[i].can_eliminate = 0;
2487
2488 return;
2489
2490 case JUMP_INSN:
2491 set_label_offsets (PATTERN (insn), insn, initial_p);
2492
2493 /* ... fall through ... */
2494
2495 case INSN:
2496 case CALL_INSN:
2497 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2498 and hence must have all eliminations at their initial offsets. */
2499 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2500 if (REG_NOTE_KIND (tem) == REG_LABEL)
2501 set_label_offsets (XEXP (tem, 0), insn, 1);
2502 return;
2503
2504 case ADDR_VEC:
2505 case ADDR_DIFF_VEC:
2506 /* Each of the labels in the address vector must be at their initial
2507 offsets. We want the first first for ADDR_VEC and the second
2508 field for ADDR_DIFF_VEC. */
2509
2510 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2511 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2512 insn, initial_p);
2513 return;
2514
2515 case SET:
2516 /* We only care about setting PC. If the source is not RETURN,
2517 IF_THEN_ELSE, or a label, disable any eliminations not at
2518 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2519 isn't one of those possibilities. For branches to a label,
2520 call ourselves recursively.
2521
2522 Note that this can disable elimination unnecessarily when we have
2523 a non-local goto since it will look like a non-constant jump to
2524 someplace in the current function. This isn't a significant
2525 problem since such jumps will normally be when all elimination
2526 pairs are back to their initial offsets. */
2527
2528 if (SET_DEST (x) != pc_rtx)
2529 return;
2530
2531 switch (GET_CODE (SET_SRC (x)))
2532 {
2533 case PC:
2534 case RETURN:
2535 return;
2536
2537 case LABEL_REF:
2538 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2539 return;
2540
2541 case IF_THEN_ELSE:
2542 tem = XEXP (SET_SRC (x), 1);
2543 if (GET_CODE (tem) == LABEL_REF)
2544 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2545 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2546 break;
2547
2548 tem = XEXP (SET_SRC (x), 2);
2549 if (GET_CODE (tem) == LABEL_REF)
2550 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2551 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2552 break;
2553 return;
2554 }
2555
2556 /* If we reach here, all eliminations must be at their initial
2557 offset because we are doing a jump to a variable address. */
2558 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2559 if (p->offset != p->initial_offset)
2560 p->can_eliminate = 0;
2561 }
2562}
2563\f
2564/* Used for communication between the next two function to properly share
2565 the vector for an ASM_OPERANDS. */
2566
2567static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2568
a8fdc208 2569/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2570 replacement (such as sp), plus an offset.
2571
2572 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2573 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2574 MEM, we are allowed to replace a sum of a register and the constant zero
2575 with the register, which we cannot do outside a MEM. In addition, we need
2576 to record the fact that a register is referenced outside a MEM.
2577
ff32812a 2578 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2579 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2580 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2581 that the REG is being modified.
2582
ff32812a
RS
2583 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2584 That's used when we eliminate in expressions stored in notes.
2585 This means, do not set ref_outside_mem even if the reference
2586 is outside of MEMs.
2587
32131a9c
RK
2588 If we see a modification to a register we know about, take the
2589 appropriate action (see case SET, below).
2590
2591 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2592 replacements done assuming all offsets are at their initial values. If
2593 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2594 encounter, return the actual location so that find_reloads will do
2595 the proper thing. */
2596
2597rtx
2598eliminate_regs (x, mem_mode, insn)
2599 rtx x;
2600 enum machine_mode mem_mode;
2601 rtx insn;
2602{
2603 enum rtx_code code = GET_CODE (x);
2604 struct elim_table *ep;
2605 int regno;
2606 rtx new;
2607 int i, j;
2608 char *fmt;
2609 int copied = 0;
2610
2611 switch (code)
2612 {
2613 case CONST_INT:
2614 case CONST_DOUBLE:
2615 case CONST:
2616 case SYMBOL_REF:
2617 case CODE_LABEL:
2618 case PC:
2619 case CC0:
2620 case ASM_INPUT:
2621 case ADDR_VEC:
2622 case ADDR_DIFF_VEC:
2623 case RETURN:
2624 return x;
2625
2626 case REG:
2627 regno = REGNO (x);
2628
2629 /* First handle the case where we encounter a bare register that
2630 is eliminable. Replace it with a PLUS. */
2631 if (regno < FIRST_PSEUDO_REGISTER)
2632 {
2633 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2634 ep++)
2635 if (ep->from_rtx == x && ep->can_eliminate)
2636 {
ff32812a
RS
2637 if (! mem_mode
2638 /* Refs inside notes don't count for this purpose. */
fe089a90 2639 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2640 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2641 ep->ref_outside_mem = 1;
2642 return plus_constant (ep->to_rtx, ep->previous_offset);
2643 }
2644
2645 }
2646 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2647 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2648 {
2649 /* In this case, find_reloads would attempt to either use an
2650 incorrect address (if something is not at its initial offset)
2651 or substitute an replaced address into an insn (which loses
2652 if the offset is changed by some later action). So we simply
2653 return the replaced stack slot (assuming it is changed by
2654 elimination) and ignore the fact that this is actually a
2655 reference to the pseudo. Ensure we make a copy of the
2656 address in case it is shared. */
fb3821f7 2657 new = eliminate_regs (reg_equiv_memory_loc[regno],
e5687447 2658 mem_mode, insn);
32131a9c 2659 if (new != reg_equiv_memory_loc[regno])
208dffa5
RS
2660 {
2661 cannot_omit_stores[regno] = 1;
2662 return copy_rtx (new);
2663 }
32131a9c
RK
2664 }
2665 return x;
2666
2667 case PLUS:
2668 /* If this is the sum of an eliminable register and a constant, rework
2669 the sum. */
2670 if (GET_CODE (XEXP (x, 0)) == REG
2671 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2672 && CONSTANT_P (XEXP (x, 1)))
2673 {
2674 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2675 ep++)
2676 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2677 {
e5687447
JW
2678 if (! mem_mode
2679 /* Refs inside notes don't count for this purpose. */
2680 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2681 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2682 ep->ref_outside_mem = 1;
2683
2684 /* The only time we want to replace a PLUS with a REG (this
2685 occurs when the constant operand of the PLUS is the negative
2686 of the offset) is when we are inside a MEM. We won't want
2687 to do so at other times because that would change the
2688 structure of the insn in a way that reload can't handle.
2689 We special-case the commonest situation in
2690 eliminate_regs_in_insn, so just replace a PLUS with a
2691 PLUS here, unless inside a MEM. */
a23b64d5 2692 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2693 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2694 return ep->to_rtx;
2695 else
2696 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2697 plus_constant (XEXP (x, 1),
2698 ep->previous_offset));
2699 }
2700
2701 /* If the register is not eliminable, we are done since the other
2702 operand is a constant. */
2703 return x;
2704 }
2705
2706 /* If this is part of an address, we want to bring any constant to the
2707 outermost PLUS. We will do this by doing register replacement in
2708 our operands and seeing if a constant shows up in one of them.
2709
2710 We assume here this is part of an address (or a "load address" insn)
2711 since an eliminable register is not likely to appear in any other
2712 context.
2713
2714 If we have (plus (eliminable) (reg)), we want to produce
2715 (plus (plus (replacement) (reg) (const))). If this was part of a
2716 normal add insn, (plus (replacement) (reg)) will be pushed as a
2717 reload. This is the desired action. */
2718
2719 {
e5687447
JW
2720 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2721 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2722
2723 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2724 {
2725 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2726 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2727 we must replace the constant here since it may no longer
2728 be in the position of any operand. */
2729 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2730 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2731 && reg_renumber[REGNO (new1)] < 0
2732 && reg_equiv_constant != 0
2733 && reg_equiv_constant[REGNO (new1)] != 0)
2734 new1 = reg_equiv_constant[REGNO (new1)];
2735 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2736 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2737 && reg_renumber[REGNO (new0)] < 0
2738 && reg_equiv_constant[REGNO (new0)] != 0)
2739 new0 = reg_equiv_constant[REGNO (new0)];
2740
2741 new = form_sum (new0, new1);
2742
2743 /* As above, if we are not inside a MEM we do not want to
2744 turn a PLUS into something else. We might try to do so here
2745 for an addition of 0 if we aren't optimizing. */
2746 if (! mem_mode && GET_CODE (new) != PLUS)
2747 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2748 else
2749 return new;
2750 }
2751 }
2752 return x;
2753
981c7390
RK
2754 case MULT:
2755 /* If this is the product of an eliminable register and a
2756 constant, apply the distribute law and move the constant out
2757 so that we have (plus (mult ..) ..). This is needed in order
2758 to keep load-address insns valid. This case is pathalogical.
2759 We ignore the possibility of overflow here. */
2760 if (GET_CODE (XEXP (x, 0)) == REG
2761 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2762 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2763 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2764 ep++)
2765 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2766 {
2767 if (! mem_mode
2768 /* Refs inside notes don't count for this purpose. */
2769 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2770 || GET_CODE (insn) == INSN_LIST)))
2771 ep->ref_outside_mem = 1;
2772
2773 return
2774 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2775 ep->previous_offset * INTVAL (XEXP (x, 1)));
2776 }
32131a9c
RK
2777
2778 /* ... fall through ... */
2779
32131a9c
RK
2780 case CALL:
2781 case COMPARE:
2782 case MINUS:
32131a9c
RK
2783 case DIV: case UDIV:
2784 case MOD: case UMOD:
2785 case AND: case IOR: case XOR:
45620ed4
RK
2786 case ROTATERT: case ROTATE:
2787 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
2788 case NE: case EQ:
2789 case GE: case GT: case GEU: case GTU:
2790 case LE: case LT: case LEU: case LTU:
2791 {
e5687447 2792 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2793 rtx new1
e5687447 2794 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2795
2796 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2797 return gen_rtx (code, GET_MODE (x), new0, new1);
2798 }
2799 return x;
2800
981c7390
RK
2801 case EXPR_LIST:
2802 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2803 if (XEXP (x, 0))
2804 {
2805 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2806 if (new != XEXP (x, 0))
2807 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2808 }
2809
2810 /* ... fall through ... */
2811
2812 case INSN_LIST:
2813 /* Now do eliminations in the rest of the chain. If this was
2814 an EXPR_LIST, this might result in allocating more memory than is
2815 strictly needed, but it simplifies the code. */
2816 if (XEXP (x, 1))
2817 {
2818 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2819 if (new != XEXP (x, 1))
2820 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2821 }
2822 return x;
2823
32131a9c
RK
2824 case PRE_INC:
2825 case POST_INC:
2826 case PRE_DEC:
2827 case POST_DEC:
2828 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2829 if (ep->to_rtx == XEXP (x, 0))
2830 {
4c05b187
RK
2831 int size = GET_MODE_SIZE (mem_mode);
2832
2833 /* If more bytes than MEM_MODE are pushed, account for them. */
2834#ifdef PUSH_ROUNDING
2835 if (ep->to_rtx == stack_pointer_rtx)
2836 size = PUSH_ROUNDING (size);
2837#endif
32131a9c 2838 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2839 ep->offset += size;
32131a9c 2840 else
4c05b187 2841 ep->offset -= size;
32131a9c
RK
2842 }
2843
2844 /* Fall through to generic unary operation case. */
2845 case USE:
2846 case STRICT_LOW_PART:
2847 case NEG: case NOT:
2848 case SIGN_EXTEND: case ZERO_EXTEND:
2849 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2850 case FLOAT: case FIX:
2851 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2852 case ABS:
2853 case SQRT:
2854 case FFS:
e5687447 2855 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2856 if (new != XEXP (x, 0))
2857 return gen_rtx (code, GET_MODE (x), new);
2858 return x;
2859
2860 case SUBREG:
2861 /* Similar to above processing, but preserve SUBREG_WORD.
2862 Convert (subreg (mem)) to (mem) if not paradoxical.
2863 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2864 pseudo didn't get a hard reg, we must replace this with the
2865 eliminated version of the memory location because push_reloads
2866 may do the replacement in certain circumstances. */
2867 if (GET_CODE (SUBREG_REG (x)) == REG
2868 && (GET_MODE_SIZE (GET_MODE (x))
2869 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2870 && reg_equiv_memory_loc != 0
2871 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2872 {
2873 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
e5687447 2874 mem_mode, insn);
32131a9c
RK
2875
2876 /* If we didn't change anything, we must retain the pseudo. */
2877 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2878 new = XEXP (x, 0);
2879 else
2880 /* Otherwise, ensure NEW isn't shared in case we have to reload
2881 it. */
2882 new = copy_rtx (new);
2883 }
2884 else
e5687447 2885 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
2886
2887 if (new != XEXP (x, 0))
2888 {
2889 if (GET_CODE (new) == MEM
2890 && (GET_MODE_SIZE (GET_MODE (x))
a3b75c07 2891 <= GET_MODE_SIZE (GET_MODE (new)))
e90d3cbb 2892#ifdef LOAD_EXTEND_OP
a3b75c07
RS
2893 /* On these machines we will be reloading what is
2894 inside the SUBREG if it originally was a pseudo and
2895 the inner and outer modes are both a word or
2896 smaller. So leave the SUBREG then. */
2897 && ! (GET_CODE (SUBREG_REG (x)) == REG
2898 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2899 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2900#endif
2901 )
32131a9c
RK
2902 {
2903 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2904 enum machine_mode mode = GET_MODE (x);
2905
2906#if BYTES_BIG_ENDIAN
2907 offset += (MIN (UNITS_PER_WORD,
2908 GET_MODE_SIZE (GET_MODE (new)))
2909 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2910#endif
2911
2912 PUT_MODE (new, mode);
2913 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2914 return new;
2915 }
2916 else
2917 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2918 }
2919
2920 return x;
2921
2922 case CLOBBER:
2923 /* If clobbering a register that is the replacement register for an
d45cf215 2924 elimination we still think can be performed, note that it cannot
32131a9c
RK
2925 be performed. Otherwise, we need not be concerned about it. */
2926 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2927 if (ep->to_rtx == XEXP (x, 0))
2928 ep->can_eliminate = 0;
2929
e5687447 2930 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c
JVA
2931 if (new != XEXP (x, 0))
2932 return gen_rtx (code, GET_MODE (x), new);
32131a9c
RK
2933 return x;
2934
2935 case ASM_OPERANDS:
2936 {
2937 rtx *temp_vec;
2938 /* Properly handle sharing input and constraint vectors. */
2939 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2940 {
2941 /* When we come to a new vector not seen before,
2942 scan all its elements; keep the old vector if none
2943 of them changes; otherwise, make a copy. */
2944 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2945 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2946 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2947 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
e5687447 2948 mem_mode, insn);
32131a9c
RK
2949
2950 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2951 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2952 break;
2953
2954 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2955 new_asm_operands_vec = old_asm_operands_vec;
2956 else
2957 new_asm_operands_vec
2958 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2959 }
2960
2961 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2962 if (new_asm_operands_vec == old_asm_operands_vec)
2963 return x;
2964
2965 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2966 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2967 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2968 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2969 ASM_OPERANDS_SOURCE_FILE (x),
2970 ASM_OPERANDS_SOURCE_LINE (x));
2971 new->volatil = x->volatil;
2972 return new;
2973 }
2974
2975 case SET:
2976 /* Check for setting a register that we know about. */
2977 if (GET_CODE (SET_DEST (x)) == REG)
2978 {
2979 /* See if this is setting the replacement register for an
a8fdc208 2980 elimination.
32131a9c 2981
3ec2ea3e
DE
2982 If DEST is the hard frame pointer, we do nothing because we
2983 assume that all assignments to the frame pointer are for
2984 non-local gotos and are being done at a time when they are valid
2985 and do not disturb anything else. Some machines want to
2986 eliminate a fake argument pointer (or even a fake frame pointer)
2987 with either the real frame or the stack pointer. Assignments to
2988 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
2989
2990 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2991 ep++)
2992 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 2993 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 2994 {
6dc42e49 2995 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2996 this elimination can't be done. */
2997 rtx src = SET_SRC (x);
2998
2999 if (GET_CODE (src) == PLUS
3000 && XEXP (src, 0) == SET_DEST (x)
3001 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3002 ep->offset -= INTVAL (XEXP (src, 1));
3003 else
3004 ep->can_eliminate = 0;
3005 }
3006
3007 /* Now check to see we are assigning to a register that can be
3008 eliminated. If so, it must be as part of a PARALLEL, since we
3009 will not have been called if this is a single SET. So indicate
3010 that we can no longer eliminate this reg. */
3011 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3012 ep++)
3013 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3014 ep->can_eliminate = 0;
3015 }
3016
3017 /* Now avoid the loop below in this common case. */
3018 {
e5687447
JW
3019 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3020 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3021
ff32812a 3022 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3023 write a CLOBBER insn. */
3024 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3025 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3026 && GET_CODE (insn) != INSN_LIST)
32131a9c
RK
3027 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3028
3029 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3030 return gen_rtx (SET, VOIDmode, new0, new1);
3031 }
3032
3033 return x;
3034
3035 case MEM:
3036 /* Our only special processing is to pass the mode of the MEM to our
3037 recursive call and copy the flags. While we are here, handle this
3038 case more efficiently. */
e5687447 3039 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3040 if (new != XEXP (x, 0))
3041 {
3042 new = gen_rtx (MEM, GET_MODE (x), new);
3043 new->volatil = x->volatil;
3044 new->unchanging = x->unchanging;
3045 new->in_struct = x->in_struct;
3046 return new;
3047 }
3048 else
3049 return x;
3050 }
3051
3052 /* Process each of our operands recursively. If any have changed, make a
3053 copy of the rtx. */
3054 fmt = GET_RTX_FORMAT (code);
3055 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3056 {
3057 if (*fmt == 'e')
3058 {
e5687447 3059 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3060 if (new != XEXP (x, i) && ! copied)
3061 {
3062 rtx new_x = rtx_alloc (code);
3063 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3064 + (sizeof (new_x->fld[0])
3065 * GET_RTX_LENGTH (code))));
3066 x = new_x;
3067 copied = 1;
3068 }
3069 XEXP (x, i) = new;
3070 }
3071 else if (*fmt == 'E')
3072 {
3073 int copied_vec = 0;
3074 for (j = 0; j < XVECLEN (x, i); j++)
3075 {
3076 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3077 if (new != XVECEXP (x, i, j) && ! copied_vec)
3078 {
3079 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3080 &XVECEXP (x, i, 0));
3081 if (! copied)
3082 {
3083 rtx new_x = rtx_alloc (code);
3084 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3085 + (sizeof (new_x->fld[0])
3086 * GET_RTX_LENGTH (code))));
3087 x = new_x;
3088 copied = 1;
3089 }
3090 XVEC (x, i) = new_v;
3091 copied_vec = 1;
3092 }
3093 XVECEXP (x, i, j) = new;
3094 }
3095 }
3096 }
3097
3098 return x;
3099}
3100\f
3101/* Scan INSN and eliminate all eliminable registers in it.
3102
3103 If REPLACE is nonzero, do the replacement destructively. Also
3104 delete the insn as dead it if it is setting an eliminable register.
3105
3106 If REPLACE is zero, do all our allocations in reload_obstack.
3107
3108 If no eliminations were done and this insn doesn't require any elimination
3109 processing (these are not identical conditions: it might be updating sp,
3110 but not referencing fp; this needs to be seen during reload_as_needed so
3111 that the offset between fp and sp can be taken into consideration), zero
3112 is returned. Otherwise, 1 is returned. */
3113
3114static int
3115eliminate_regs_in_insn (insn, replace)
3116 rtx insn;
3117 int replace;
3118{
3119 rtx old_body = PATTERN (insn);
3120 rtx new_body;
893bc853
RK
3121 rtx old_set;
3122 rtx new_set;
32131a9c
RK
3123 int val = 0;
3124 struct elim_table *ep;
3125
3126 if (! replace)
3127 push_obstacks (&reload_obstack, &reload_obstack);
3128
3129 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3130 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3131 {
3132 /* Check for setting an eliminable register. */
3133 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3134 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3135 {
3136 /* In this case this insn isn't serving a useful purpose. We
3137 will delete it in reload_as_needed once we know that this
3138 elimination is, in fact, being done.
3139
3140 If REPLACE isn't set, we can't delete this insn, but neededn't
3141 process it since it won't be used unless something changes. */
3142 if (replace)
3143 delete_dead_insn (insn);
3144 val = 1;
3145 goto done;
3146 }
3147
3148 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3149 in the insn is the negative of the offset in FROM. Substitute
3150 (set (reg) (reg to)) for the insn and change its code.
3151
3152 We have to do this here, rather than in eliminate_regs, do that we can
3153 change the insn code. */
3154
3155 if (GET_CODE (SET_SRC (old_body)) == PLUS
3156 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3157 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3158 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3159 ep++)
3160 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
922d9d40 3161 && ep->can_eliminate)
32131a9c 3162 {
922d9d40
RK
3163 /* We must stop at the first elimination that will be used.
3164 If this one would replace the PLUS with a REG, do it
3165 now. Otherwise, quit the loop and let eliminate_regs
3166 do its normal replacement. */
3167 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3168 {
3169 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3170 SET_DEST (old_body), ep->to_rtx);
3171 INSN_CODE (insn) = -1;
3172 val = 1;
3173 goto done;
3174 }
3175
3176 break;
32131a9c
RK
3177 }
3178 }
3179
3180 old_asm_operands_vec = 0;
3181
3182 /* Replace the body of this insn with a substituted form. If we changed
05b4c365 3183 something, return non-zero.
32131a9c
RK
3184
3185 If we are replacing a body that was a (set X (plus Y Z)), try to
3186 re-recognize the insn. We do this in case we had a simple addition
3187 but now can do this as a load-address. This saves an insn in this
3188 common case. */
3189
fb3821f7 3190 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3191 if (new_body != old_body)
3192 {
893bc853
RK
3193 old_set = (GET_CODE (old_body) == PARALLEL) ? single_set (insn) :
3194 old_body;
3195
3196 new_set = (GET_CODE (new_body) == PARALLEL) ? XVECEXP(new_body,0,0) :
3197 new_body;
3198
7c791b13
RK
3199 /* If we aren't replacing things permanently and we changed something,
3200 make another copy to ensure that all the RTL is new. Otherwise
3201 things can go wrong if find_reload swaps commutative operands
3202 and one is inside RTL that has been copied while the other is not. */
3203
4d411872
RS
3204 /* Don't copy an asm_operands because (1) there's no need and (2)
3205 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3206 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3207 new_body = copy_rtx (new_body);
3208
4a5d0fb5 3209 /* If we had a move insn but now we don't, rerecognize it. */
893bc853
RK
3210 if ((GET_CODE (old_set) == SET && GET_CODE (SET_SRC (old_set)) == REG
3211 && (GET_CODE (new_set) != SET
3212 || GET_CODE (SET_SRC (new_set)) != REG))
51b8cba1
JL
3213 /* If this was a load from or store to memory, compare
3214 the MEM in recog_operand to the one in the insn. If they
3215 are not equal, then rerecognize the insn. */
893bc853
RK
3216 || (GET_CODE (old_set) == SET
3217 && ((GET_CODE (SET_SRC (old_set)) == MEM
3218 && SET_SRC (old_set) != recog_operand[1])
3219 || (GET_CODE (SET_DEST (old_set)) == MEM
3220 && SET_DEST (old_set) != recog_operand[0])))
0ba846c7
RS
3221 /* If this was an add insn before, rerecognize. */
3222 ||
893bc853
RK
3223 (GET_CODE (old_set) == SET
3224 && GET_CODE (SET_SRC (old_set)) == PLUS))
4a5d0fb5 3225 {
893bc853
RK
3226 if (!replace)
3227 PATTERN (insn) = copy_rtx (PATTERN (insn));
3228
4a5d0fb5 3229 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3230 /* If recognition fails, store the new body anyway.
3231 It's normal to have recognition failures here
3232 due to bizarre memory addresses; reloading will fix them. */
3233 PATTERN (insn) = new_body;
4a5d0fb5 3234 }
0ba846c7 3235 else
32131a9c
RK
3236 PATTERN (insn) = new_body;
3237
32131a9c
RK
3238 val = 1;
3239 }
a8fdc208 3240
32131a9c
RK
3241 /* Loop through all elimination pairs. See if any have changed and
3242 recalculate the number not at initial offset.
3243
a8efe40d
RK
3244 Compute the maximum offset (minimum offset if the stack does not
3245 grow downward) for each elimination pair.
3246
32131a9c
RK
3247 We also detect a cases where register elimination cannot be done,
3248 namely, if a register would be both changed and referenced outside a MEM
3249 in the resulting insn since such an insn is often undefined and, even if
3250 not, we cannot know what meaning will be given to it. Note that it is
3251 valid to have a register used in an address in an insn that changes it
3252 (presumably with a pre- or post-increment or decrement).
3253
3254 If anything changes, return nonzero. */
3255
3256 num_not_at_initial_offset = 0;
3257 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3258 {
3259 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3260 ep->can_eliminate = 0;
3261
3262 ep->ref_outside_mem = 0;
3263
3264 if (ep->previous_offset != ep->offset)
3265 val = 1;
3266
3267 ep->previous_offset = ep->offset;
3268 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3269 num_not_at_initial_offset++;
a8efe40d
RK
3270
3271#ifdef STACK_GROWS_DOWNWARD
3272 ep->max_offset = MAX (ep->max_offset, ep->offset);
3273#else
3274 ep->max_offset = MIN (ep->max_offset, ep->offset);
3275#endif
32131a9c
RK
3276 }
3277
3278 done:
05b4c365
RK
3279 /* If we changed something, perform elmination in REG_NOTES. This is
3280 needed even when REPLACE is zero because a REG_DEAD note might refer
3281 to a register that we eliminate and could cause a different number
3282 of spill registers to be needed in the final reload pass than in
3283 the pre-passes. */
20748cab 3284 if (val && REG_NOTES (insn) != 0)
ff32812a 3285 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3286
32131a9c
RK
3287 if (! replace)
3288 pop_obstacks ();
3289
3290 return val;
3291}
3292
3293/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3294 replacement we currently believe is valid, mark it as not eliminable if X
3295 modifies DEST in any way other than by adding a constant integer to it.
3296
3297 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3298 all assignments to the hard frame pointer are nonlocal gotos and are being
3299 done at a time when they are valid and do not disturb anything else.
32131a9c 3300 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3301 frame or stack pointer. Assignments to the hard frame pointer must not
3302 prevent this elimination.
32131a9c
RK
3303
3304 Called via note_stores from reload before starting its passes to scan
3305 the insns of the function. */
3306
3307static void
3308mark_not_eliminable (dest, x)
3309 rtx dest;
3310 rtx x;
3311{
3312 register int i;
3313
3314 /* A SUBREG of a hard register here is just changing its mode. We should
3315 not see a SUBREG of an eliminable hard register, but check just in
3316 case. */
3317 if (GET_CODE (dest) == SUBREG)
3318 dest = SUBREG_REG (dest);
3319
3ec2ea3e 3320 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3321 return;
3322
3323 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3324 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3325 && (GET_CODE (x) != SET
3326 || GET_CODE (SET_SRC (x)) != PLUS
3327 || XEXP (SET_SRC (x), 0) != dest
3328 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3329 {
3330 reg_eliminate[i].can_eliminate_previous
3331 = reg_eliminate[i].can_eliminate = 0;
3332 num_eliminable--;
3333 }
3334}
3335\f
3336/* Kick all pseudos out of hard register REGNO.
3337 If GLOBAL is nonzero, try to find someplace else to put them.
3338 If DUMPFILE is nonzero, log actions taken on that file.
3339
3340 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3341 because we found we can't eliminate some register. In the case, no pseudos
3342 are allowed to be in the register, even if they are only in a block that
3343 doesn't require spill registers, unlike the case when we are spilling this
3344 hard reg to produce another spill register.
3345
3346 Return nonzero if any pseudos needed to be kicked out. */
3347
3348static int
3349spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3350 register int regno;
3351 int global;
3352 FILE *dumpfile;
3353 int cant_eliminate;
3354{
c307c237 3355 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3356 int something_changed = 0;
3357 register int i;
3358
3359 SET_HARD_REG_BIT (forbidden_regs, regno);
3360
3361 /* Spill every pseudo reg that was allocated to this reg
3362 or to something that overlaps this reg. */
3363
3364 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3365 if (reg_renumber[i] >= 0
3366 && reg_renumber[i] <= regno
a8fdc208 3367 && (reg_renumber[i]
32131a9c
RK
3368 + HARD_REGNO_NREGS (reg_renumber[i],
3369 PSEUDO_REGNO_MODE (i))
3370 > regno))
3371 {
32131a9c
RK
3372 /* If this register belongs solely to a basic block which needed no
3373 spilling of any class that this register is contained in,
3374 leave it be, unless we are spilling this register because
3375 it was a hard register that can't be eliminated. */
3376
3377 if (! cant_eliminate
3378 && basic_block_needs[0]
3379 && reg_basic_block[i] >= 0
3380 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3381 {
3382 enum reg_class *p;
3383
3384 for (p = reg_class_superclasses[(int) class];
3385 *p != LIM_REG_CLASSES; p++)
3386 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3387 break;
a8fdc208 3388
32131a9c
RK
3389 if (*p == LIM_REG_CLASSES)
3390 continue;
3391 }
3392
3393 /* Mark it as no longer having a hard register home. */
3394 reg_renumber[i] = -1;
3395 /* We will need to scan everything again. */
3396 something_changed = 1;
3397 if (global)
3398 retry_global_alloc (i, forbidden_regs);
3399
3400 alter_reg (i, regno);
3401 if (dumpfile)
3402 {
3403 if (reg_renumber[i] == -1)
3404 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3405 else
3406 fprintf (dumpfile, " Register %d now in %d.\n\n",
3407 i, reg_renumber[i]);
3408 }
3409 }
c307c237
RK
3410 for (i = 0; i < scratch_list_length; i++)
3411 {
3412 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3413 {
3414 if (! cant_eliminate && basic_block_needs[0]
3415 && ! basic_block_needs[(int) class][scratch_block[i]])
3416 {
3417 enum reg_class *p;
3418
3419 for (p = reg_class_superclasses[(int) class];
3420 *p != LIM_REG_CLASSES; p++)
3421 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3422 break;
3423
3424 if (*p == LIM_REG_CLASSES)
3425 continue;
3426 }
3427 PUT_CODE (scratch_list[i], SCRATCH);
3428 scratch_list[i] = 0;
3429 something_changed = 1;
3430 continue;
3431 }
3432 }
32131a9c
RK
3433
3434 return something_changed;
3435}
3436\f
56f58d3a
RK
3437/* Find all paradoxical subregs within X and update reg_max_ref_width.
3438 Also mark any hard registers used to store user variables as
3439 forbidden from being used for spill registers. */
32131a9c
RK
3440
3441static void
3442scan_paradoxical_subregs (x)
3443 register rtx x;
3444{
3445 register int i;
3446 register char *fmt;
3447 register enum rtx_code code = GET_CODE (x);
3448
3449 switch (code)
3450 {
56f58d3a
RK
3451 case REG:
3452#ifdef SMALL_REGISTER_CLASSES
3453 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3454 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3455#endif
3456 return;
3457
32131a9c
RK
3458 case CONST_INT:
3459 case CONST:
3460 case SYMBOL_REF:
3461 case LABEL_REF:
3462 case CONST_DOUBLE:
3463 case CC0:
3464 case PC:
32131a9c
RK
3465 case USE:
3466 case CLOBBER:
3467 return;
3468
3469 case SUBREG:
3470 if (GET_CODE (SUBREG_REG (x)) == REG
3471 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3472 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3473 = GET_MODE_SIZE (GET_MODE (x));
3474 return;
3475 }
3476
3477 fmt = GET_RTX_FORMAT (code);
3478 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3479 {
3480 if (fmt[i] == 'e')
3481 scan_paradoxical_subregs (XEXP (x, i));
3482 else if (fmt[i] == 'E')
3483 {
3484 register int j;
3485 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3486 scan_paradoxical_subregs (XVECEXP (x, i, j));
3487 }
3488 }
3489}
3490\f
32131a9c
RK
3491static int
3492hard_reg_use_compare (p1, p2)
3493 struct hard_reg_n_uses *p1, *p2;
3494{
3495 int tem = p1->uses - p2->uses;
3496 if (tem != 0) return tem;
3497 /* If regs are equally good, sort by regno,
3498 so that the results of qsort leave nothing to chance. */
3499 return p1->regno - p2->regno;
3500}
3501
3502/* Choose the order to consider regs for use as reload registers
3503 based on how much trouble would be caused by spilling one.
3504 Store them in order of decreasing preference in potential_reload_regs. */
3505
3506static void
3507order_regs_for_reload ()
3508{
3509 register int i;
3510 register int o = 0;
3511 int large = 0;
3512
3513 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3514
3515 CLEAR_HARD_REG_SET (bad_spill_regs);
3516
3517 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3518 potential_reload_regs[i] = -1;
3519
3520 /* Count number of uses of each hard reg by pseudo regs allocated to it
3521 and then order them by decreasing use. */
3522
3523 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3524 {
3525 hard_reg_n_uses[i].uses = 0;
3526 hard_reg_n_uses[i].regno = i;
3527 }
3528
3529 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3530 {
3531 int regno = reg_renumber[i];
3532 if (regno >= 0)
3533 {
3534 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3535 while (regno < lim)
3536 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3537 }
3538 large += reg_n_refs[i];
3539 }
3540
3541 /* Now fixed registers (which cannot safely be used for reloading)
3542 get a very high use count so they will be considered least desirable.
3543 Registers used explicitly in the rtl code are almost as bad. */
3544
3545 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3546 {
3547 if (fixed_regs[i])
3548 {
3549 hard_reg_n_uses[i].uses += 2 * large + 2;
3550 SET_HARD_REG_BIT (bad_spill_regs, i);
3551 }
3552 else if (regs_explicitly_used[i])
3553 {
3554 hard_reg_n_uses[i].uses += large + 1;
546b63fb 3555#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
3556 /* ??? We are doing this here because of the potential that
3557 bad code may be generated if a register explicitly used in
3558 an insn was used as a spill register for that insn. But
3559 not using these are spill registers may lose on some machine.
3560 We'll have to see how this works out. */
3561 SET_HARD_REG_BIT (bad_spill_regs, i);
546b63fb 3562#endif
32131a9c
RK
3563 }
3564 }
3ec2ea3e
DE
3565 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3566 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
32131a9c
RK
3567
3568#ifdef ELIMINABLE_REGS
3569 /* If registers other than the frame pointer are eliminable, mark them as
3570 poor choices. */
3571 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3572 {
3573 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3574 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3575 }
3576#endif
3577
3578 /* Prefer registers not so far used, for use in temporary loading.
3579 Among them, if REG_ALLOC_ORDER is defined, use that order.
3580 Otherwise, prefer registers not preserved by calls. */
3581
3582#ifdef REG_ALLOC_ORDER
3583 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3584 {
3585 int regno = reg_alloc_order[i];
3586
3587 if (hard_reg_n_uses[regno].uses == 0)
3588 potential_reload_regs[o++] = regno;
3589 }
3590#else
3591 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3592 {
3593 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3594 potential_reload_regs[o++] = i;
3595 }
3596 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3597 {
3598 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3599 potential_reload_regs[o++] = i;
3600 }
3601#endif
3602
3603 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3604 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3605
3606 /* Now add the regs that are already used,
3607 preferring those used less often. The fixed and otherwise forbidden
3608 registers will be at the end of this list. */
3609
3610 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3611 if (hard_reg_n_uses[i].uses != 0)
3612 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3613}
3614\f
a5339699
RK
3615/* Used in reload_as_needed to sort the spilled regs. */
3616static int
3617compare_spill_regs (r1, r2)
3618 short *r1, *r2;
3619{
3620 return *r1 < *r2 ? -1: 1;
3621}
3622
32131a9c
RK
3623/* Reload pseudo-registers into hard regs around each insn as needed.
3624 Additional register load insns are output before the insn that needs it
3625 and perhaps store insns after insns that modify the reloaded pseudo reg.
3626
3627 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3628 which registers are already available in reload registers.
32131a9c
RK
3629 We update these for the reloads that we perform,
3630 as the insns are scanned. */
3631
3632static void
3633reload_as_needed (first, live_known)
3634 rtx first;
3635 int live_known;
3636{
3637 register rtx insn;
3638 register int i;
3639 int this_block = 0;
3640 rtx x;
3641 rtx after_call = 0;
3642
3643 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
c95c0732 3644 bzero (spill_reg_store, sizeof spill_reg_store);
32131a9c
RK
3645 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3646 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3647 reg_has_output_reload = (char *) alloca (max_regno);
3648 for (i = 0; i < n_spills; i++)
3649 {
3650 reg_reloaded_contents[i] = -1;
3651 reg_reloaded_insn[i] = 0;
3652 }
3653
3654 /* Reset all offsets on eliminable registers to their initial values. */
3655#ifdef ELIMINABLE_REGS
3656 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3657 {
3658 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3659 reg_eliminate[i].initial_offset);
32131a9c
RK
3660 reg_eliminate[i].previous_offset
3661 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3662 }
3663#else
3664 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3665 reg_eliminate[0].previous_offset
3666 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3667#endif
3668
3669 num_not_at_initial_offset = 0;
3670
a5339699
RK
3671 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3672 pack registers with group needs. */
3673 if (n_spills > 1)
5f40cc2d
RK
3674 {
3675 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3676 for (i = 0; i < n_spills; i++)
3677 spill_reg_order[spill_regs[i]] = i;
3678 }
a5339699 3679
32131a9c
RK
3680 for (insn = first; insn;)
3681 {
3682 register rtx next = NEXT_INSN (insn);
3683
3684 /* Notice when we move to a new basic block. */
aa2c50d6 3685 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3686 && insn == basic_block_head[this_block+1])
3687 ++this_block;
3688
3689 /* If we pass a label, copy the offsets from the label information
3690 into the current offsets of each elimination. */
3691 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3692 {
3693 num_not_at_initial_offset = 0;
3694 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3695 {
3696 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3697 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3698 if (reg_eliminate[i].can_eliminate
3699 && (reg_eliminate[i].offset
3700 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3701 num_not_at_initial_offset++;
3702 }
3703 }
32131a9c
RK
3704
3705 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3706 {
3707 rtx avoid_return_reg = 0;
3708
3709#ifdef SMALL_REGISTER_CLASSES
3710 /* Set avoid_return_reg if this is an insn
3711 that might use the value of a function call. */
3712 if (GET_CODE (insn) == CALL_INSN)
3713 {
3714 if (GET_CODE (PATTERN (insn)) == SET)
3715 after_call = SET_DEST (PATTERN (insn));
3716 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3717 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3718 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3719 else
3720 after_call = 0;
3721 }
3722 else if (after_call != 0
3723 && !(GET_CODE (PATTERN (insn)) == SET
3724 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3725 {
2b979c57 3726 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
3727 avoid_return_reg = after_call;
3728 after_call = 0;
3729 }
3730#endif /* SMALL_REGISTER_CLASSES */
3731
2758481d
RS
3732 /* If this is a USE and CLOBBER of a MEM, ensure that any
3733 references to eliminable registers have been removed. */
3734
3735 if ((GET_CODE (PATTERN (insn)) == USE
3736 || GET_CODE (PATTERN (insn)) == CLOBBER)
3737 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3738 XEXP (XEXP (PATTERN (insn), 0), 0)
3739 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3740 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3741
32131a9c
RK
3742 /* If we need to do register elimination processing, do so.
3743 This might delete the insn, in which case we are done. */
3744 if (num_eliminable && GET_MODE (insn) == QImode)
3745 {
3746 eliminate_regs_in_insn (insn, 1);
3747 if (GET_CODE (insn) == NOTE)
3748 {
3749 insn = next;
3750 continue;
3751 }
3752 }
3753
3754 if (GET_MODE (insn) == VOIDmode)
3755 n_reloads = 0;
3756 /* First find the pseudo regs that must be reloaded for this insn.
3757 This info is returned in the tables reload_... (see reload.h).
3758 Also modify the body of INSN by substituting RELOAD
3759 rtx's for those pseudo regs. */
3760 else
3761 {
3762 bzero (reg_has_output_reload, max_regno);
3763 CLEAR_HARD_REG_SET (reg_is_output_reload);
3764
3765 find_reloads (insn, 1, spill_indirect_levels, live_known,
3766 spill_reg_order);
3767 }
3768
3769 if (n_reloads > 0)
3770 {
3c3eeea6
RK
3771 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3772 rtx p;
32131a9c
RK
3773 int class;
3774
3775 /* If this block has not had spilling done for a
546b63fb
RK
3776 particular clas and we have any non-optionals that need a
3777 spill reg in that class, abort. */
32131a9c
RK
3778
3779 for (class = 0; class < N_REG_CLASSES; class++)
3780 if (basic_block_needs[class] != 0
3781 && basic_block_needs[class][this_block] == 0)
3782 for (i = 0; i < n_reloads; i++)
546b63fb
RK
3783 if (class == (int) reload_reg_class[i]
3784 && reload_reg_rtx[i] == 0
3785 && ! reload_optional[i]
3786 && (reload_in[i] != 0 || reload_out[i] != 0
3787 || reload_secondary_p[i] != 0))
3788 abort ();
32131a9c
RK
3789
3790 /* Now compute which reload regs to reload them into. Perhaps
3791 reusing reload regs from previous insns, or else output
3792 load insns to reload them. Maybe output store insns too.
3793 Record the choices of reload reg in reload_reg_rtx. */
3794 choose_reload_regs (insn, avoid_return_reg);
3795
546b63fb
RK
3796#ifdef SMALL_REGISTER_CLASSES
3797 /* Merge any reloads that we didn't combine for fear of
3798 increasing the number of spill registers needed but now
3799 discover can be safely merged. */
3800 merge_assigned_reloads (insn);
3801#endif
3802
32131a9c
RK
3803 /* Generate the insns to reload operands into or out of
3804 their reload regs. */
3805 emit_reload_insns (insn);
3806
3807 /* Substitute the chosen reload regs from reload_reg_rtx
3808 into the insn's body (or perhaps into the bodies of other
3809 load and store insn that we just made for reloading
3810 and that we moved the structure into). */
3811 subst_reloads ();
3c3eeea6
RK
3812
3813 /* If this was an ASM, make sure that all the reload insns
3814 we have generated are valid. If not, give an error
3815 and delete them. */
3816
3817 if (asm_noperands (PATTERN (insn)) >= 0)
3818 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3819 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3820 && (recog_memoized (p) < 0
3821 || (insn_extract (p),
3822 ! constrain_operands (INSN_CODE (p), 1))))
3823 {
3824 error_for_asm (insn,
3825 "`asm' operand requires impossible reload");
3826 PUT_CODE (p, NOTE);
3827 NOTE_SOURCE_FILE (p) = 0;
3828 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3829 }
32131a9c
RK
3830 }
3831 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3832 is no longer validly lying around to save a future reload.
3833 Note that this does not detect pseudos that were reloaded
3834 for this insn in order to be stored in
3835 (obeying register constraints). That is correct; such reload
3836 registers ARE still valid. */
3837 note_stores (PATTERN (insn), forget_old_reloads_1);
3838
3839 /* There may have been CLOBBER insns placed after INSN. So scan
3840 between INSN and NEXT and use them to forget old reloads. */
3841 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3842 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3843 note_stores (PATTERN (x), forget_old_reloads_1);
3844
3845#ifdef AUTO_INC_DEC
3846 /* Likewise for regs altered by auto-increment in this insn.
3847 But note that the reg-notes are not changed by reloading:
3848 they still contain the pseudo-regs, not the spill regs. */
3849 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3850 if (REG_NOTE_KIND (x) == REG_INC)
3851 {
3852 /* See if this pseudo reg was reloaded in this insn.
3853 If so, its last-reload info is still valid
3854 because it is based on this insn's reload. */
3855 for (i = 0; i < n_reloads; i++)
3856 if (reload_out[i] == XEXP (x, 0))
3857 break;
3858
08fb99fa 3859 if (i == n_reloads)
9a881562 3860 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
3861 }
3862#endif
3863 }
3864 /* A reload reg's contents are unknown after a label. */
3865 if (GET_CODE (insn) == CODE_LABEL)
3866 for (i = 0; i < n_spills; i++)
3867 {
3868 reg_reloaded_contents[i] = -1;
3869 reg_reloaded_insn[i] = 0;
3870 }
3871
3872 /* Don't assume a reload reg is still good after a call insn
3873 if it is a call-used reg. */
546b63fb 3874 else if (GET_CODE (insn) == CALL_INSN)
32131a9c
RK
3875 for (i = 0; i < n_spills; i++)
3876 if (call_used_regs[spill_regs[i]])
3877 {
3878 reg_reloaded_contents[i] = -1;
3879 reg_reloaded_insn[i] = 0;
3880 }
3881
3882 /* In case registers overlap, allow certain insns to invalidate
3883 particular hard registers. */
3884
3885#ifdef INSN_CLOBBERS_REGNO_P
3886 for (i = 0 ; i < n_spills ; i++)
3887 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3888 {
3889 reg_reloaded_contents[i] = -1;
3890 reg_reloaded_insn[i] = 0;
3891 }
3892#endif
3893
3894 insn = next;
3895
3896#ifdef USE_C_ALLOCA
3897 alloca (0);
3898#endif
3899 }
3900}
3901
3902/* Discard all record of any value reloaded from X,
3903 or reloaded in X from someplace else;
3904 unless X is an output reload reg of the current insn.
3905
3906 X may be a hard reg (the reload reg)
3907 or it may be a pseudo reg that was reloaded from. */
3908
3909static void
9a881562 3910forget_old_reloads_1 (x, ignored)
32131a9c 3911 rtx x;
9a881562 3912 rtx ignored;
32131a9c
RK
3913{
3914 register int regno;
3915 int nr;
0a2e51a9
RS
3916 int offset = 0;
3917
3918 /* note_stores does give us subregs of hard regs. */
3919 while (GET_CODE (x) == SUBREG)
3920 {
3921 offset += SUBREG_WORD (x);
3922 x = SUBREG_REG (x);
3923 }
32131a9c
RK
3924
3925 if (GET_CODE (x) != REG)
3926 return;
3927
0a2e51a9 3928 regno = REGNO (x) + offset;
32131a9c
RK
3929
3930 if (regno >= FIRST_PSEUDO_REGISTER)
3931 nr = 1;
3932 else
3933 {
3934 int i;
3935 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3936 /* Storing into a spilled-reg invalidates its contents.
3937 This can happen if a block-local pseudo is allocated to that reg
3938 and it wasn't spilled because this block's total need is 0.
3939 Then some insn might have an optional reload and use this reg. */
3940 for (i = 0; i < nr; i++)
3941 if (spill_reg_order[regno + i] >= 0
3942 /* But don't do this if the reg actually serves as an output
3943 reload reg in the current instruction. */
3944 && (n_reloads == 0
3945 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3946 {
3947 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3948 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3949 }
3950 }
3951
3952 /* Since value of X has changed,
3953 forget any value previously copied from it. */
3954
3955 while (nr-- > 0)
3956 /* But don't forget a copy if this is the output reload
3957 that establishes the copy's validity. */
3958 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3959 reg_last_reload_reg[regno + nr] = 0;
3960}
3961\f
3962/* For each reload, the mode of the reload register. */
3963static enum machine_mode reload_mode[MAX_RELOADS];
3964
3965/* For each reload, the largest number of registers it will require. */
3966static int reload_nregs[MAX_RELOADS];
3967
3968/* Comparison function for qsort to decide which of two reloads
3969 should be handled first. *P1 and *P2 are the reload numbers. */
3970
3971static int
3972reload_reg_class_lower (p1, p2)
3973 short *p1, *p2;
3974{
3975 register int r1 = *p1, r2 = *p2;
3976 register int t;
a8fdc208 3977
32131a9c
RK
3978 /* Consider required reloads before optional ones. */
3979 t = reload_optional[r1] - reload_optional[r2];
3980 if (t != 0)
3981 return t;
3982
3983 /* Count all solitary classes before non-solitary ones. */
3984 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3985 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3986 if (t != 0)
3987 return t;
3988
3989 /* Aside from solitaires, consider all multi-reg groups first. */
3990 t = reload_nregs[r2] - reload_nregs[r1];
3991 if (t != 0)
3992 return t;
3993
3994 /* Consider reloads in order of increasing reg-class number. */
3995 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3996 if (t != 0)
3997 return t;
3998
3999 /* If reloads are equally urgent, sort by reload number,
4000 so that the results of qsort leave nothing to chance. */
4001 return r1 - r2;
4002}
4003\f
4004/* The following HARD_REG_SETs indicate when each hard register is
4005 used for a reload of various parts of the current insn. */
4006
4007/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4008static HARD_REG_SET reload_reg_used;
546b63fb
RK
4009/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4010static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4011/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4012static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4013/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4014static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4015/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4016static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
4017/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4018static HARD_REG_SET reload_reg_used_in_op_addr;
893bc853
RK
4019/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4020static HARD_REG_SET reload_reg_used_in_op_addr_reload;
546b63fb
RK
4021/* If reg is in use for a RELOAD_FOR_INSN reload. */
4022static HARD_REG_SET reload_reg_used_in_insn;
4023/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4024static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
4025
4026/* If reg is in use as a reload reg for any sort of reload. */
4027static HARD_REG_SET reload_reg_used_at_all;
4028
be7ae2a4
RK
4029/* If reg is use as an inherited reload. We just mark the first register
4030 in the group. */
4031static HARD_REG_SET reload_reg_used_for_inherit;
4032
546b63fb
RK
4033/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4034 TYPE. MODE is used to indicate how many consecutive regs are
4035 actually used. */
32131a9c
RK
4036
4037static void
546b63fb 4038mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 4039 int regno;
546b63fb
RK
4040 int opnum;
4041 enum reload_type type;
32131a9c
RK
4042 enum machine_mode mode;
4043{
4044 int nregs = HARD_REGNO_NREGS (regno, mode);
4045 int i;
4046
4047 for (i = regno; i < nregs + regno; i++)
4048 {
546b63fb 4049 switch (type)
32131a9c
RK
4050 {
4051 case RELOAD_OTHER:
4052 SET_HARD_REG_BIT (reload_reg_used, i);
4053 break;
4054
546b63fb
RK
4055 case RELOAD_FOR_INPUT_ADDRESS:
4056 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4057 break;
4058
546b63fb
RK
4059 case RELOAD_FOR_OUTPUT_ADDRESS:
4060 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4061 break;
4062
4063 case RELOAD_FOR_OPERAND_ADDRESS:
4064 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4065 break;
4066
893bc853
RK
4067 case RELOAD_FOR_OPADDR_ADDR:
4068 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4069 break;
4070
546b63fb
RK
4071 case RELOAD_FOR_OTHER_ADDRESS:
4072 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4073 break;
4074
32131a9c 4075 case RELOAD_FOR_INPUT:
546b63fb 4076 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4077 break;
4078
4079 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4080 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4081 break;
4082
4083 case RELOAD_FOR_INSN:
4084 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4085 break;
4086 }
4087
4088 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4089 }
4090}
4091
be7ae2a4
RK
4092/* Similarly, but show REGNO is no longer in use for a reload. */
4093
4094static void
4095clear_reload_reg_in_use (regno, opnum, type, mode)
4096 int regno;
4097 int opnum;
4098 enum reload_type type;
4099 enum machine_mode mode;
4100{
4101 int nregs = HARD_REGNO_NREGS (regno, mode);
4102 int i;
4103
4104 for (i = regno; i < nregs + regno; i++)
4105 {
4106 switch (type)
4107 {
4108 case RELOAD_OTHER:
4109 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4110 break;
4111
4112 case RELOAD_FOR_INPUT_ADDRESS:
4113 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4114 break;
4115
4116 case RELOAD_FOR_OUTPUT_ADDRESS:
4117 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4118 break;
4119
4120 case RELOAD_FOR_OPERAND_ADDRESS:
4121 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4122 break;
4123
893bc853
RK
4124 case RELOAD_FOR_OPADDR_ADDR:
4125 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4126 break;
4127
be7ae2a4
RK
4128 case RELOAD_FOR_OTHER_ADDRESS:
4129 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4130 break;
4131
4132 case RELOAD_FOR_INPUT:
4133 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4134 break;
4135
4136 case RELOAD_FOR_OUTPUT:
4137 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4138 break;
4139
4140 case RELOAD_FOR_INSN:
4141 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4142 break;
4143 }
4144 }
4145}
4146
32131a9c 4147/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4148 specified by OPNUM and TYPE. */
32131a9c
RK
4149
4150static int
546b63fb 4151reload_reg_free_p (regno, opnum, type)
32131a9c 4152 int regno;
546b63fb
RK
4153 int opnum;
4154 enum reload_type type;
32131a9c 4155{
546b63fb
RK
4156 int i;
4157
4158 /* In use for a RELOAD_OTHER means it's not available for anything except
4159 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4160 to be used only for inputs. */
4161
4162 if (type != RELOAD_FOR_OTHER_ADDRESS
4163 && TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4164 return 0;
546b63fb
RK
4165
4166 switch (type)
32131a9c
RK
4167 {
4168 case RELOAD_OTHER:
4169 /* In use for anything means not available for a RELOAD_OTHER. */
4170 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4171
4172 /* The other kinds of use can sometimes share a register. */
4173 case RELOAD_FOR_INPUT:
546b63fb
RK
4174 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4175 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4176 return 0;
4177
893bc853
RK
4178 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4179 return 0;
4180
546b63fb
RK
4181 /* If it is used for some other input, can't use it. */
4182 for (i = 0; i < reload_n_operands; i++)
4183 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4184 return 0;
4185
4186 /* If it is used in a later operand's address, can't use it. */
4187 for (i = opnum + 1; i < reload_n_operands; i++)
4188 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4189 return 0;
4190
4191 return 1;
4192
4193 case RELOAD_FOR_INPUT_ADDRESS:
4194 /* Can't use a register if it is used for an input address for this
4195 operand or used as an input in an earlier one. */
4196 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4197 return 0;
4198
4199 for (i = 0; i < opnum; i++)
4200 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4201 return 0;
4202
4203 return 1;
4204
4205 case RELOAD_FOR_OUTPUT_ADDRESS:
4206 /* Can't use a register if it is used for an output address for this
4207 operand or used as an output in this or a later operand. */
4208 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4209 return 0;
4210
4211 for (i = opnum; i < reload_n_operands; i++)
4212 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4213 return 0;
4214
4215 return 1;
4216
32131a9c 4217 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4218 for (i = 0; i < reload_n_operands; i++)
4219 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4220 return 0;
4221
4222 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4223 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4224
893bc853
RK
4225 case RELOAD_FOR_OPADDR_ADDR:
4226 for (i = 0; i < reload_n_operands; i++)
4227 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4228 return 0;
4229
4230 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4231
32131a9c 4232 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4233 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4234 outputs, or an operand address for this or an earlier output. */
4235 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4236 return 0;
4237
4238 for (i = 0; i < reload_n_operands; i++)
4239 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4240 return 0;
4241
4242 for (i = 0; i <= opnum; i++)
4243 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4244 return 0;
4245
4246 return 1;
4247
4248 case RELOAD_FOR_INSN:
4249 for (i = 0; i < reload_n_operands; i++)
4250 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4251 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4252 return 0;
4253
4254 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4255 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4256
4257 case RELOAD_FOR_OTHER_ADDRESS:
4258 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4259 }
4260 abort ();
4261}
4262
4263/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4264 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4265 is not in use for a reload in any prior part of the insn.
4266
4267 We can assume that the reload reg was already tested for availability
4268 at the time it is needed, and we should not check this again,
4269 in case the reg has already been marked in use. */
4270
4271static int
546b63fb 4272reload_reg_free_before_p (regno, opnum, type)
32131a9c 4273 int regno;
546b63fb
RK
4274 int opnum;
4275 enum reload_type type;
32131a9c 4276{
546b63fb
RK
4277 int i;
4278
4279 switch (type)
32131a9c 4280 {
546b63fb
RK
4281 case RELOAD_FOR_OTHER_ADDRESS:
4282 /* These always come first. */
32131a9c
RK
4283 return 1;
4284
546b63fb
RK
4285 case RELOAD_OTHER:
4286 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4287
32131a9c 4288 /* If this use is for part of the insn,
546b63fb
RK
4289 check the reg is not in use for any prior part. It is tempting
4290 to try to do this by falling through from objecs that occur
4291 later in the insn to ones that occur earlier, but that will not
4292 correctly take into account the fact that here we MUST ignore
4293 things that would prevent the register from being allocated in
4294 the first place, since we know that it was allocated. */
4295
4296 case RELOAD_FOR_OUTPUT_ADDRESS:
4297 /* Earlier reloads are for earlier outputs or their addresses,
4298 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4299 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4300 RELOAD_OTHER).. */
4301 for (i = 0; i < opnum; i++)
4302 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4303 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4304 return 0;
4305
4306 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4307 return 0;
546b63fb
RK
4308
4309 for (i = 0; i < reload_n_operands; i++)
4310 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4311 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4312 return 0;
4313
4314 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4315 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4316 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4317
32131a9c 4318 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4319 /* This can't be used in the output address for this operand and
4320 anything that can't be used for it, except that we've already
4321 tested for RELOAD_FOR_INSN objects. */
4322
4323 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
32131a9c 4324 return 0;
546b63fb
RK
4325
4326 for (i = 0; i < opnum; i++)
4327 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4328 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4329 return 0;
4330
4331 for (i = 0; i < reload_n_operands; i++)
4332 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4333 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4334 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4335 return 0;
4336
4337 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4338
32131a9c 4339 case RELOAD_FOR_OPERAND_ADDRESS:
893bc853 4340 case RELOAD_FOR_OPADDR_ADDR:
546b63fb
RK
4341 case RELOAD_FOR_INSN:
4342 /* These can't conflict with inputs, or each other, so all we have to
4343 test is input addresses and the addresses of OTHER items. */
4344
4345 for (i = 0; i < reload_n_operands; i++)
4346 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4347 return 0;
4348
4349 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4350
32131a9c 4351 case RELOAD_FOR_INPUT:
546b63fb
RK
4352 /* The only things earlier are the address for this and
4353 earlier inputs, other inputs (which we know we don't conflict
4354 with), and addresses of RELOAD_OTHER objects. */
4355
4356 for (i = 0; i <= opnum; i++)
4357 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4358 return 0;
4359
4360 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4361
4362 case RELOAD_FOR_INPUT_ADDRESS:
4363 /* Similarly, all we have to check is for use in earlier inputs'
4364 addresses. */
4365 for (i = 0; i < opnum; i++)
4366 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4367 return 0;
4368
4369 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4370 }
4371 abort ();
4372}
4373
4374/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4375 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4376 is still available in REGNO at the end of the insn.
4377
4378 We can assume that the reload reg was already tested for availability
4379 at the time it is needed, and we should not check this again,
4380 in case the reg has already been marked in use. */
4381
4382static int
546b63fb 4383reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4384 int regno;
546b63fb
RK
4385 int opnum;
4386 enum reload_type type;
32131a9c 4387{
546b63fb
RK
4388 int i;
4389
4390 switch (type)
32131a9c
RK
4391 {
4392 case RELOAD_OTHER:
4393 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4394 its value must reach the end. */
4395 return 1;
4396
4397 /* If this use is for part of the insn,
546b63fb
RK
4398 its value reaches if no subsequent part uses the same register.
4399 Just like the above function, don't try to do this with lots
4400 of fallthroughs. */
4401
4402 case RELOAD_FOR_OTHER_ADDRESS:
4403 /* Here we check for everything else, since these don't conflict
4404 with anything else and everything comes later. */
4405
4406 for (i = 0; i < reload_n_operands; i++)
4407 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4408 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4409 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4410 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4411 return 0;
4412
4413 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4414 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4415 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4416
4417 case RELOAD_FOR_INPUT_ADDRESS:
4418 /* Similar, except that we check only for this and subsequent inputs
4419 and the address of only subsequent inputs and we do not need
4420 to check for RELOAD_OTHER objects since they are known not to
4421 conflict. */
4422
4423 for (i = opnum; i < reload_n_operands; i++)
4424 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4425 return 0;
4426
4427 for (i = opnum + 1; i < reload_n_operands; i++)
4428 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4429 return 0;
4430
4431 for (i = 0; i < reload_n_operands; i++)
4432 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4433 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4434 return 0;
4435
893bc853
RK
4436 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4437 return 0;
4438
546b63fb
RK
4439 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4440 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4441
32131a9c 4442 case RELOAD_FOR_INPUT:
546b63fb
RK
4443 /* Similar to input address, except we start at the next operand for
4444 both input and input address and we do not check for
4445 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4446 would conflict. */
4447
4448 for (i = opnum + 1; i < reload_n_operands; i++)
4449 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4450 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4451 return 0;
4452
4453 /* ... fall through ... */
4454
32131a9c 4455 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4456 /* Check outputs and their addresses. */
4457
4458 for (i = 0; i < reload_n_operands; i++)
4459 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4460 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4461 return 0;
4462
4463 return 1;
4464
893bc853
RK
4465 case RELOAD_FOR_OPADDR_ADDR:
4466 for (i = 0; i < reload_n_operands; i++)
4467 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4468 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4469 return 0;
4470
4471 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4472 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4473
546b63fb 4474 case RELOAD_FOR_INSN:
893bc853 4475 /* These conflict with other outputs with RELOAD_OTHER. So
546b63fb
RK
4476 we need only check for output addresses. */
4477
4478 opnum = -1;
4479
4480 /* ... fall through ... */
4481
32131a9c 4482 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4483 case RELOAD_FOR_OUTPUT_ADDRESS:
4484 /* We already know these can't conflict with a later output. So the
4485 only thing to check are later output addresses. */
4486 for (i = opnum + 1; i < reload_n_operands; i++)
4487 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4488 return 0;
4489
32131a9c
RK
4490 return 1;
4491 }
546b63fb 4492
32131a9c
RK
4493 abort ();
4494}
4495\f
351aa1c1
RK
4496/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4497 Return 0 otherwise.
4498
4499 This function uses the same algorithm as reload_reg_free_p above. */
4500
4501static int
4502reloads_conflict (r1, r2)
4503 int r1, r2;
4504{
4505 enum reload_type r1_type = reload_when_needed[r1];
4506 enum reload_type r2_type = reload_when_needed[r2];
4507 int r1_opnum = reload_opnum[r1];
4508 int r2_opnum = reload_opnum[r2];
4509
4510 /* RELOAD_OTHER conflicts with everything except
4511 RELOAD_FOR_OTHER_ADDRESS. */
4512
4513 if ((r1_type == RELOAD_OTHER && r2_type != RELOAD_FOR_OTHER_ADDRESS)
4514 || (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS))
4515 return 1;
4516
4517 /* Otherwise, check conflicts differently for each type. */
4518
4519 switch (r1_type)
4520 {
4521 case RELOAD_FOR_INPUT:
4522 return (r2_type == RELOAD_FOR_INSN
4523 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
893bc853 4524 || r2_type == RELOAD_FOR_OPADDR_ADDR
351aa1c1
RK
4525 || r2_type == RELOAD_FOR_INPUT
4526 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4527
4528 case RELOAD_FOR_INPUT_ADDRESS:
4529 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4530 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4531
4532 case RELOAD_FOR_OUTPUT_ADDRESS:
4533 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4534 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4535
4536 case RELOAD_FOR_OPERAND_ADDRESS:
4537 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4538 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4539
893bc853
RK
4540 case RELOAD_FOR_OPADDR_ADDR:
4541 return (r2_type == RELOAD_FOR_INPUT
4542 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4543
351aa1c1
RK
4544 case RELOAD_FOR_OUTPUT:
4545 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
0aaf4a58 4546 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
351aa1c1
RK
4547 && r2_opnum >= r1_opnum));
4548
4549 case RELOAD_FOR_INSN:
4550 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4551 || r2_type == RELOAD_FOR_INSN
4552 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4553
4554 case RELOAD_FOR_OTHER_ADDRESS:
4555 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4556
4557 default:
4558 abort ();
4559 }
4560}
4561\f
32131a9c
RK
4562/* Vector of reload-numbers showing the order in which the reloads should
4563 be processed. */
4564short reload_order[MAX_RELOADS];
4565
4566/* Indexed by reload number, 1 if incoming value
4567 inherited from previous insns. */
4568char reload_inherited[MAX_RELOADS];
4569
4570/* For an inherited reload, this is the insn the reload was inherited from,
4571 if we know it. Otherwise, this is 0. */
4572rtx reload_inheritance_insn[MAX_RELOADS];
4573
4574/* If non-zero, this is a place to get the value of the reload,
4575 rather than using reload_in. */
4576rtx reload_override_in[MAX_RELOADS];
4577
4578/* For each reload, the index in spill_regs of the spill register used,
4579 or -1 if we did not need one of the spill registers for this reload. */
4580int reload_spill_index[MAX_RELOADS];
4581
4582/* Index of last register assigned as a spill register. We allocate in
4583 a round-robin fashio. */
4584
1d2310f3 4585static int last_spill_reg = 0;
32131a9c
RK
4586
4587/* Find a spill register to use as a reload register for reload R.
4588 LAST_RELOAD is non-zero if this is the last reload for the insn being
4589 processed.
4590
4591 Set reload_reg_rtx[R] to the register allocated.
4592
4593 If NOERROR is nonzero, we return 1 if successful,
4594 or 0 if we couldn't find a spill reg and we didn't change anything. */
4595
4596static int
4597allocate_reload_reg (r, insn, last_reload, noerror)
4598 int r;
4599 rtx insn;
4600 int last_reload;
4601 int noerror;
4602{
4603 int i;
4604 int pass;
4605 int count;
4606 rtx new;
4607 int regno;
4608
4609 /* If we put this reload ahead, thinking it is a group,
4610 then insist on finding a group. Otherwise we can grab a
a8fdc208 4611 reg that some other reload needs.
32131a9c
RK
4612 (That can happen when we have a 68000 DATA_OR_FP_REG
4613 which is a group of data regs or one fp reg.)
4614 We need not be so restrictive if there are no more reloads
4615 for this insn.
4616
4617 ??? Really it would be nicer to have smarter handling
4618 for that kind of reg class, where a problem like this is normal.
4619 Perhaps those classes should be avoided for reloading
4620 by use of more alternatives. */
4621
4622 int force_group = reload_nregs[r] > 1 && ! last_reload;
4623
4624 /* If we want a single register and haven't yet found one,
4625 take any reg in the right class and not in use.
4626 If we want a consecutive group, here is where we look for it.
4627
4628 We use two passes so we can first look for reload regs to
4629 reuse, which are already in use for other reloads in this insn,
4630 and only then use additional registers.
4631 I think that maximizing reuse is needed to make sure we don't
4632 run out of reload regs. Suppose we have three reloads, and
4633 reloads A and B can share regs. These need two regs.
4634 Suppose A and B are given different regs.
4635 That leaves none for C. */
4636 for (pass = 0; pass < 2; pass++)
4637 {
4638 /* I is the index in spill_regs.
4639 We advance it round-robin between insns to use all spill regs
4640 equally, so that inherited reloads have a chance
a5339699
RK
4641 of leapfrogging each other. Don't do this, however, when we have
4642 group needs and failure would be fatal; if we only have a relatively
4643 small number of spill registers, and more than one of them has
4644 group needs, then by starting in the middle, we may end up
4645 allocating the first one in such a way that we are not left with
4646 sufficient groups to handle the rest. */
4647
4648 if (noerror || ! force_group)
4649 i = last_spill_reg;
4650 else
4651 i = -1;
4652
4653 for (count = 0; count < n_spills; count++)
32131a9c
RK
4654 {
4655 int class = (int) reload_reg_class[r];
4656
4657 i = (i + 1) % n_spills;
4658
546b63fb
RK
4659 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4660 reload_when_needed[r])
32131a9c
RK
4661 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4662 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
4663 /* Look first for regs to share, then for unshared. But
4664 don't share regs used for inherited reloads; they are
4665 the ones we want to preserve. */
4666 && (pass
4667 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4668 spill_regs[i])
4669 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4670 spill_regs[i]))))
32131a9c
RK
4671 {
4672 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4673 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4674 (on 68000) got us two FP regs. If NR is 1,
4675 we would reject both of them. */
4676 if (force_group)
4677 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4678 /* If we need only one reg, we have already won. */
4679 if (nr == 1)
4680 {
4681 /* But reject a single reg if we demand a group. */
4682 if (force_group)
4683 continue;
4684 break;
4685 }
4686 /* Otherwise check that as many consecutive regs as we need
4687 are available here.
4688 Also, don't use for a group registers that are
4689 needed for nongroups. */
4690 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4691 while (nr > 1)
4692 {
4693 regno = spill_regs[i] + nr - 1;
4694 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4695 && spill_reg_order[regno] >= 0
546b63fb
RK
4696 && reload_reg_free_p (regno, reload_opnum[r],
4697 reload_when_needed[r])
32131a9c
RK
4698 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4699 regno)))
4700 break;
4701 nr--;
4702 }
4703 if (nr == 1)
4704 break;
4705 }
4706 }
4707
4708 /* If we found something on pass 1, omit pass 2. */
4709 if (count < n_spills)
4710 break;
4711 }
4712
4713 /* We should have found a spill register by now. */
4714 if (count == n_spills)
4715 {
4716 if (noerror)
4717 return 0;
139fc12e 4718 goto failure;
32131a9c
RK
4719 }
4720
be7ae2a4
RK
4721 /* I is the index in SPILL_REG_RTX of the reload register we are to
4722 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
4723
4724 new = spill_reg_rtx[i];
4725
4726 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4
RK
4727 spill_reg_rtx[i] = new
4728 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4729
32131a9c
RK
4730 regno = true_regnum (new);
4731
4732 /* Detect when the reload reg can't hold the reload mode.
4733 This used to be one `if', but Sequent compiler can't handle that. */
4734 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4735 {
4736 enum machine_mode test_mode = VOIDmode;
4737 if (reload_in[r])
4738 test_mode = GET_MODE (reload_in[r]);
4739 /* If reload_in[r] has VOIDmode, it means we will load it
4740 in whatever mode the reload reg has: to wit, reload_mode[r].
4741 We have already tested that for validity. */
4742 /* Aside from that, we need to test that the expressions
4743 to reload from or into have modes which are valid for this
4744 reload register. Otherwise the reload insns would be invalid. */
4745 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4746 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4747 if (! (reload_out[r] != 0
4748 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
4749 {
4750 /* The reg is OK. */
4751 last_spill_reg = i;
4752
4753 /* Mark as in use for this insn the reload regs we use
4754 for this. */
4755 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4756 reload_when_needed[r], reload_mode[r]);
4757
4758 reload_reg_rtx[r] = new;
4759 reload_spill_index[r] = i;
4760 return 1;
4761 }
32131a9c
RK
4762 }
4763
4764 /* The reg is not OK. */
4765 if (noerror)
4766 return 0;
4767
139fc12e 4768 failure:
32131a9c
RK
4769 if (asm_noperands (PATTERN (insn)) < 0)
4770 /* It's the compiler's fault. */
4771 abort ();
4772
4773 /* It's the user's fault; the operand's mode and constraint
4774 don't match. Disable this reload so we don't crash in final. */
4775 error_for_asm (insn,
4776 "`asm' operand constraint incompatible with operand size");
4777 reload_in[r] = 0;
4778 reload_out[r] = 0;
4779 reload_reg_rtx[r] = 0;
4780 reload_optional[r] = 1;
4781 reload_secondary_p[r] = 1;
4782
4783 return 1;
4784}
4785\f
4786/* Assign hard reg targets for the pseudo-registers we must reload
4787 into hard regs for this insn.
4788 Also output the instructions to copy them in and out of the hard regs.
4789
4790 For machines with register classes, we are responsible for
4791 finding a reload reg in the proper class. */
4792
4793static void
4794choose_reload_regs (insn, avoid_return_reg)
4795 rtx insn;
32131a9c
RK
4796 rtx avoid_return_reg;
4797{
4798 register int i, j;
4799 int max_group_size = 1;
4800 enum reg_class group_class = NO_REGS;
4801 int inheritance;
4802
4803 rtx save_reload_reg_rtx[MAX_RELOADS];
4804 char save_reload_inherited[MAX_RELOADS];
4805 rtx save_reload_inheritance_insn[MAX_RELOADS];
4806 rtx save_reload_override_in[MAX_RELOADS];
4807 int save_reload_spill_index[MAX_RELOADS];
4808 HARD_REG_SET save_reload_reg_used;
546b63fb
RK
4809 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4810 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4811 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4812 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 4813 HARD_REG_SET save_reload_reg_used_in_op_addr;
893bc853 4814 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
546b63fb
RK
4815 HARD_REG_SET save_reload_reg_used_in_insn;
4816 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
4817 HARD_REG_SET save_reload_reg_used_at_all;
4818
4819 bzero (reload_inherited, MAX_RELOADS);
4820 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4821 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4822
4823 CLEAR_HARD_REG_SET (reload_reg_used);
4824 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 4825 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
893bc853 4826 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
546b63fb
RK
4827 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4828 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 4829
546b63fb
RK
4830 for (i = 0; i < reload_n_operands; i++)
4831 {
4832 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4833 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4834 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4835 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4836 }
32131a9c
RK
4837
4838#ifdef SMALL_REGISTER_CLASSES
4839 /* Don't bother with avoiding the return reg
4840 if we have no mandatory reload that could use it. */
4841 if (avoid_return_reg)
4842 {
4843 int do_avoid = 0;
4844 int regno = REGNO (avoid_return_reg);
4845 int nregs
4846 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4847 int r;
4848
4849 for (r = regno; r < regno + nregs; r++)
4850 if (spill_reg_order[r] >= 0)
4851 for (j = 0; j < n_reloads; j++)
4852 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4853 && (reload_in[j] != 0 || reload_out[j] != 0
4854 || reload_secondary_p[j])
4855 &&
4856 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4857 do_avoid = 1;
4858 if (!do_avoid)
4859 avoid_return_reg = 0;
4860 }
4861#endif /* SMALL_REGISTER_CLASSES */
4862
4863#if 0 /* Not needed, now that we can always retry without inheritance. */
4864 /* See if we have more mandatory reloads than spill regs.
4865 If so, then we cannot risk optimizations that could prevent
a8fdc208 4866 reloads from sharing one spill register.
32131a9c
RK
4867
4868 Since we will try finding a better register than reload_reg_rtx
4869 unless it is equal to reload_in or reload_out, count such reloads. */
4870
4871 {
4872 int tem = 0;
4873#ifdef SMALL_REGISTER_CLASSES
4874 int tem = (avoid_return_reg != 0);
a8fdc208 4875#endif
32131a9c
RK
4876 for (j = 0; j < n_reloads; j++)
4877 if (! reload_optional[j]
4878 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4879 && (reload_reg_rtx[j] == 0
4880 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4881 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4882 tem++;
4883 if (tem > n_spills)
4884 must_reuse = 1;
4885 }
4886#endif
4887
4888#ifdef SMALL_REGISTER_CLASSES
4889 /* Don't use the subroutine call return reg for a reload
4890 if we are supposed to avoid it. */
4891 if (avoid_return_reg)
4892 {
4893 int regno = REGNO (avoid_return_reg);
4894 int nregs
4895 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4896 int r;
4897
4898 for (r = regno; r < regno + nregs; r++)
4899 if (spill_reg_order[r] >= 0)
4900 SET_HARD_REG_BIT (reload_reg_used, r);
4901 }
4902#endif /* SMALL_REGISTER_CLASSES */
4903
4904 /* In order to be certain of getting the registers we need,
4905 we must sort the reloads into order of increasing register class.
4906 Then our grabbing of reload registers will parallel the process
a8fdc208 4907 that provided the reload registers.
32131a9c
RK
4908
4909 Also note whether any of the reloads wants a consecutive group of regs.
4910 If so, record the maximum size of the group desired and what
4911 register class contains all the groups needed by this insn. */
4912
4913 for (j = 0; j < n_reloads; j++)
4914 {
4915 reload_order[j] = j;
4916 reload_spill_index[j] = -1;
4917
4918 reload_mode[j]
546b63fb
RK
4919 = (reload_inmode[j] == VOIDmode
4920 || (GET_MODE_SIZE (reload_outmode[j])
4921 > GET_MODE_SIZE (reload_inmode[j])))
4922 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
4923
4924 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4925
4926 if (reload_nregs[j] > 1)
4927 {
4928 max_group_size = MAX (reload_nregs[j], max_group_size);
4929 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4930 }
4931
4932 /* If we have already decided to use a certain register,
4933 don't use it in another way. */
4934 if (reload_reg_rtx[j])
546b63fb 4935 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
4936 reload_when_needed[j], reload_mode[j]);
4937 }
4938
4939 if (n_reloads > 1)
4940 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4941
4942 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4943 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4944 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4945 sizeof reload_inheritance_insn);
4946 bcopy (reload_override_in, save_reload_override_in,
4947 sizeof reload_override_in);
4948 bcopy (reload_spill_index, save_reload_spill_index,
4949 sizeof reload_spill_index);
4950 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4951 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
4952 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4953 reload_reg_used_in_op_addr);
893bc853
RK
4954
4955 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
4956 reload_reg_used_in_op_addr_reload);
4957
546b63fb
RK
4958 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4959 reload_reg_used_in_insn);
4960 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4961 reload_reg_used_in_other_addr);
4962
4963 for (i = 0; i < reload_n_operands; i++)
4964 {
4965 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4966 reload_reg_used_in_output[i]);
4967 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4968 reload_reg_used_in_input[i]);
4969 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4970 reload_reg_used_in_input_addr[i]);
4971 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4972 reload_reg_used_in_output_addr[i]);
4973 }
32131a9c 4974
58b1581b
RS
4975 /* If -O, try first with inheritance, then turning it off.
4976 If not -O, don't do inheritance.
4977 Using inheritance when not optimizing leads to paradoxes
4978 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4979 because one side of the comparison might be inherited. */
32131a9c 4980
58b1581b 4981 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
4982 {
4983 /* Process the reloads in order of preference just found.
4984 Beyond this point, subregs can be found in reload_reg_rtx.
4985
4986 This used to look for an existing reloaded home for all
4987 of the reloads, and only then perform any new reloads.
4988 But that could lose if the reloads were done out of reg-class order
4989 because a later reload with a looser constraint might have an old
4990 home in a register needed by an earlier reload with a tighter constraint.
4991
4992 To solve this, we make two passes over the reloads, in the order
4993 described above. In the first pass we try to inherit a reload
4994 from a previous insn. If there is a later reload that needs a
4995 class that is a proper subset of the class being processed, we must
4996 also allocate a spill register during the first pass.
4997
4998 Then make a second pass over the reloads to allocate any reloads
4999 that haven't been given registers yet. */
5000
be7ae2a4
RK
5001 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5002
32131a9c
RK
5003 for (j = 0; j < n_reloads; j++)
5004 {
5005 register int r = reload_order[j];
5006
5007 /* Ignore reloads that got marked inoperative. */
5008 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5009 continue;
5010
5011 /* If find_reloads chose a to use reload_in or reload_out as a reload
5012 register, we don't need to chose one. Otherwise, try even if it found
5013 one since we might save an insn if we find the value lying around. */
5014 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5015 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5016 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5017 continue;
5018
5019#if 0 /* No longer needed for correct operation.
5020 It might give better code, or might not; worth an experiment? */
5021 /* If this is an optional reload, we can't inherit from earlier insns
5022 until we are sure that any non-optional reloads have been allocated.
5023 The following code takes advantage of the fact that optional reloads
5024 are at the end of reload_order. */
5025 if (reload_optional[r] != 0)
5026 for (i = 0; i < j; i++)
5027 if ((reload_out[reload_order[i]] != 0
5028 || reload_in[reload_order[i]] != 0
5029 || reload_secondary_p[reload_order[i]])
5030 && ! reload_optional[reload_order[i]]
5031 && reload_reg_rtx[reload_order[i]] == 0)
5032 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5033#endif
5034
5035 /* First see if this pseudo is already available as reloaded
5036 for a previous insn. We cannot try to inherit for reloads
5037 that are smaller than the maximum number of registers needed
5038 for groups unless the register we would allocate cannot be used
5039 for the groups.
5040
5041 We could check here to see if this is a secondary reload for
5042 an object that is already in a register of the desired class.
5043 This would avoid the need for the secondary reload register.
5044 But this is complex because we can't easily determine what
5045 objects might want to be loaded via this reload. So let a register
5046 be allocated here. In `emit_reload_insns' we suppress one of the
5047 loads in the case described above. */
5048
5049 if (inheritance)
5050 {
5051 register int regno = -1;
db660765 5052 enum machine_mode mode;
32131a9c
RK
5053
5054 if (reload_in[r] == 0)
5055 ;
5056 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
5057 {
5058 regno = REGNO (reload_in[r]);
5059 mode = GET_MODE (reload_in[r]);
5060 }
32131a9c 5061 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
5062 {
5063 regno = REGNO (reload_in_reg[r]);
5064 mode = GET_MODE (reload_in_reg[r]);
5065 }
32131a9c
RK
5066#if 0
5067 /* This won't work, since REGNO can be a pseudo reg number.
5068 Also, it takes much more hair to keep track of all the things
5069 that can invalidate an inherited reload of part of a pseudoreg. */
5070 else if (GET_CODE (reload_in[r]) == SUBREG
5071 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5072 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5073#endif
5074
5075 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5076 {
5077 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5078
5079 if (reg_reloaded_contents[i] == regno
db660765
TW
5080 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5081 >= GET_MODE_SIZE (mode))
32131a9c
RK
5082 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5083 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5084 spill_regs[i])
5085 && (reload_nregs[r] == max_group_size
5086 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5087 spill_regs[i]))
546b63fb
RK
5088 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5089 reload_when_needed[r])
32131a9c 5090 && reload_reg_free_before_p (spill_regs[i],
546b63fb 5091 reload_opnum[r],
32131a9c
RK
5092 reload_when_needed[r]))
5093 {
5094 /* If a group is needed, verify that all the subsequent
5095 registers still have their values intact. */
5096 int nr
5097 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5098 int k;
5099
5100 for (k = 1; k < nr; k++)
5101 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5102 != regno)
5103 break;
5104
5105 if (k == nr)
5106 {
c74fa651
RS
5107 int i1;
5108
5109 /* We found a register that contains the
5110 value we need. If this register is the
5111 same as an `earlyclobber' operand of the
5112 current insn, just mark it as a place to
5113 reload from since we can't use it as the
5114 reload register itself. */
5115
5116 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5117 if (reg_overlap_mentioned_for_reload_p
5118 (reg_last_reload_reg[regno],
5119 reload_earlyclobbers[i1]))
5120 break;
5121
8908158d
RS
5122 if (i1 != n_earlyclobbers
5123 /* Don't really use the inherited spill reg
5124 if we need it wider than we've got it. */
5125 || (GET_MODE_SIZE (reload_mode[r])
5126 > GET_MODE_SIZE (mode)))
c74fa651
RS
5127 reload_override_in[r] = reg_last_reload_reg[regno];
5128 else
5129 {
54c40e68 5130 int k;
c74fa651
RS
5131 /* We can use this as a reload reg. */
5132 /* Mark the register as in use for this part of
5133 the insn. */
5134 mark_reload_reg_in_use (spill_regs[i],
5135 reload_opnum[r],
5136 reload_when_needed[r],
5137 reload_mode[r]);
5138 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5139 reload_inherited[r] = 1;
5140 reload_inheritance_insn[r]
5141 = reg_reloaded_insn[i];
5142 reload_spill_index[r] = i;
54c40e68
RS
5143 for (k = 0; k < nr; k++)
5144 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5145 spill_regs[i + k]);
c74fa651 5146 }
32131a9c
RK
5147 }
5148 }
5149 }
5150 }
5151
5152 /* Here's another way to see if the value is already lying around. */
5153 if (inheritance
5154 && reload_in[r] != 0
5155 && ! reload_inherited[r]
5156 && reload_out[r] == 0
5157 && (CONSTANT_P (reload_in[r])
5158 || GET_CODE (reload_in[r]) == PLUS
5159 || GET_CODE (reload_in[r]) == REG
5160 || GET_CODE (reload_in[r]) == MEM)
5161 && (reload_nregs[r] == max_group_size
5162 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5163 {
5164 register rtx equiv
5165 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 5166 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
5167 int regno;
5168
5169 if (equiv != 0)
5170 {
5171 if (GET_CODE (equiv) == REG)
5172 regno = REGNO (equiv);
5173 else if (GET_CODE (equiv) == SUBREG)
5174 {
f8a9e02b
RK
5175 /* This must be a SUBREG of a hard register.
5176 Make a new REG since this might be used in an
5177 address and not all machines support SUBREGs
5178 there. */
5179 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5180 equiv = gen_rtx (REG, reload_mode[r], regno);
32131a9c
RK
5181 }
5182 else
5183 abort ();
5184 }
5185
5186 /* If we found a spill reg, reject it unless it is free
5187 and of the desired class. */
5188 if (equiv != 0
5189 && ((spill_reg_order[regno] >= 0
546b63fb 5190 && ! reload_reg_free_before_p (regno, reload_opnum[r],
32131a9c
RK
5191 reload_when_needed[r]))
5192 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5193 regno)))
5194 equiv = 0;
5195
5196 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5197 equiv = 0;
5198
5199 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5200 equiv = 0;
5201
5202 /* We found a register that contains the value we need.
5203 If this register is the same as an `earlyclobber' operand
5204 of the current insn, just mark it as a place to reload from
5205 since we can't use it as the reload register itself. */
5206
5207 if (equiv != 0)
5208 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5209 if (reg_overlap_mentioned_for_reload_p (equiv,
5210 reload_earlyclobbers[i]))
32131a9c
RK
5211 {
5212 reload_override_in[r] = equiv;
5213 equiv = 0;
5214 break;
5215 }
5216
5217 /* JRV: If the equiv register we have found is explicitly
5218 clobbered in the current insn, mark but don't use, as above. */
5219
5220 if (equiv != 0 && regno_clobbered_p (regno, insn))
5221 {
5222 reload_override_in[r] = equiv;
5223 equiv = 0;
5224 }
5225
5226 /* If we found an equivalent reg, say no code need be generated
5227 to load it, and use it as our reload reg. */
3ec2ea3e 5228 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
5229 {
5230 reload_reg_rtx[r] = equiv;
5231 reload_inherited[r] = 1;
5232 /* If it is a spill reg,
5233 mark the spill reg as in use for this insn. */
5234 i = spill_reg_order[regno];
5235 if (i >= 0)
be7ae2a4 5236 {
54c40e68
RS
5237 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5238 int k;
be7ae2a4
RK
5239 mark_reload_reg_in_use (regno, reload_opnum[r],
5240 reload_when_needed[r],
5241 reload_mode[r]);
54c40e68
RS
5242 for (k = 0; k < nr; k++)
5243 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
be7ae2a4 5244 }
32131a9c
RK
5245 }
5246 }
5247
5248 /* If we found a register to use already, or if this is an optional
5249 reload, we are done. */
5250 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5251 continue;
5252
5253#if 0 /* No longer needed for correct operation. Might or might not
5254 give better code on the average. Want to experiment? */
5255
5256 /* See if there is a later reload that has a class different from our
5257 class that intersects our class or that requires less register
5258 than our reload. If so, we must allocate a register to this
5259 reload now, since that reload might inherit a previous reload
5260 and take the only available register in our class. Don't do this
5261 for optional reloads since they will force all previous reloads
5262 to be allocated. Also don't do this for reloads that have been
5263 turned off. */
5264
5265 for (i = j + 1; i < n_reloads; i++)
5266 {
5267 int s = reload_order[i];
5268
d45cf215
RS
5269 if ((reload_in[s] == 0 && reload_out[s] == 0
5270 && ! reload_secondary_p[s])
32131a9c
RK
5271 || reload_optional[s])
5272 continue;
5273
5274 if ((reload_reg_class[s] != reload_reg_class[r]
5275 && reg_classes_intersect_p (reload_reg_class[r],
5276 reload_reg_class[s]))
5277 || reload_nregs[s] < reload_nregs[r])
5278 break;
5279 }
5280
5281 if (i == n_reloads)
5282 continue;
5283
5284 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5285#endif
5286 }
5287
5288 /* Now allocate reload registers for anything non-optional that
5289 didn't get one yet. */
5290 for (j = 0; j < n_reloads; j++)
5291 {
5292 register int r = reload_order[j];
5293
5294 /* Ignore reloads that got marked inoperative. */
5295 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5296 continue;
5297
5298 /* Skip reloads that already have a register allocated or are
5299 optional. */
5300 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5301 continue;
5302
5303 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5304 break;
5305 }
5306
5307 /* If that loop got all the way, we have won. */
5308 if (j == n_reloads)
5309 break;
5310
5311 fail:
5312 /* Loop around and try without any inheritance. */
5313 /* First undo everything done by the failed attempt
5314 to allocate with inheritance. */
5315 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5316 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5317 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5318 sizeof reload_inheritance_insn);
5319 bcopy (save_reload_override_in, reload_override_in,
5320 sizeof reload_override_in);
5321 bcopy (save_reload_spill_index, reload_spill_index,
5322 sizeof reload_spill_index);
5323 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5324 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5325 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5326 save_reload_reg_used_in_op_addr);
893bc853
RK
5327 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5328 save_reload_reg_used_in_op_addr_reload);
546b63fb
RK
5329 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5330 save_reload_reg_used_in_insn);
5331 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5332 save_reload_reg_used_in_other_addr);
5333
5334 for (i = 0; i < reload_n_operands; i++)
5335 {
5336 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5337 save_reload_reg_used_in_input[i]);
5338 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5339 save_reload_reg_used_in_output[i]);
5340 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5341 save_reload_reg_used_in_input_addr[i]);
5342 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5343 save_reload_reg_used_in_output_addr[i]);
5344 }
32131a9c
RK
5345 }
5346
5347 /* If we thought we could inherit a reload, because it seemed that
5348 nothing else wanted the same reload register earlier in the insn,
5349 verify that assumption, now that all reloads have been assigned. */
5350
5351 for (j = 0; j < n_reloads; j++)
5352 {
5353 register int r = reload_order[j];
5354
5355 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5356 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
546b63fb 5357 reload_opnum[r],
32131a9c
RK
5358 reload_when_needed[r]))
5359 reload_inherited[r] = 0;
5360
5361 /* If we found a better place to reload from,
5362 validate it in the same fashion, if it is a reload reg. */
5363 if (reload_override_in[r]
5364 && (GET_CODE (reload_override_in[r]) == REG
5365 || GET_CODE (reload_override_in[r]) == SUBREG))
5366 {
5367 int regno = true_regnum (reload_override_in[r]);
5368 if (spill_reg_order[regno] >= 0
546b63fb
RK
5369 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5370 reload_when_needed[r]))
32131a9c
RK
5371 reload_override_in[r] = 0;
5372 }
5373 }
5374
5375 /* Now that reload_override_in is known valid,
5376 actually override reload_in. */
5377 for (j = 0; j < n_reloads; j++)
5378 if (reload_override_in[j])
5379 reload_in[j] = reload_override_in[j];
5380
5381 /* If this reload won't be done because it has been cancelled or is
5382 optional and not inherited, clear reload_reg_rtx so other
5383 routines (such as subst_reloads) don't get confused. */
5384 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
5385 if (reload_reg_rtx[j] != 0
5386 && ((reload_optional[j] && ! reload_inherited[j])
5387 || (reload_in[j] == 0 && reload_out[j] == 0
5388 && ! reload_secondary_p[j])))
5389 {
5390 int regno = true_regnum (reload_reg_rtx[j]);
5391
5392 if (spill_reg_order[regno] >= 0)
5393 clear_reload_reg_in_use (regno, reload_opnum[j],
5394 reload_when_needed[j], reload_mode[j]);
5395 reload_reg_rtx[j] = 0;
5396 }
32131a9c
RK
5397
5398 /* Record which pseudos and which spill regs have output reloads. */
5399 for (j = 0; j < n_reloads; j++)
5400 {
5401 register int r = reload_order[j];
5402
5403 i = reload_spill_index[r];
5404
5405 /* I is nonneg if this reload used one of the spill regs.
5406 If reload_reg_rtx[r] is 0, this is an optional reload
5407 that we opted to ignore. */
5408 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5409 && reload_reg_rtx[r] != 0)
5410 {
5411 register int nregno = REGNO (reload_out[r]);
372e033b
RS
5412 int nr = 1;
5413
5414 if (nregno < FIRST_PSEUDO_REGISTER)
5415 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
5416
5417 while (--nr >= 0)
372e033b
RS
5418 reg_has_output_reload[nregno + nr] = 1;
5419
5420 if (i >= 0)
32131a9c 5421 {
372e033b
RS
5422 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5423 while (--nr >= 0)
32131a9c
RK
5424 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5425 }
5426
5427 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
5428 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5429 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
5430 abort ();
5431 }
5432 }
5433}
5434\f
546b63fb
RK
5435/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5436 reloads of the same item for fear that we might not have enough reload
5437 registers. However, normally they will get the same reload register
5438 and hence actually need not be loaded twice.
5439
5440 Here we check for the most common case of this phenomenon: when we have
5441 a number of reloads for the same object, each of which were allocated
5442 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5443 reload, and is not modified in the insn itself. If we find such,
5444 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5445 This will not increase the number of spill registers needed and will
5446 prevent redundant code. */
5447
5448#ifdef SMALL_REGISTER_CLASSES
5449
5450static void
5451merge_assigned_reloads (insn)
5452 rtx insn;
5453{
5454 int i, j;
5455
5456 /* Scan all the reloads looking for ones that only load values and
5457 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5458 assigned and not modified by INSN. */
5459
5460 for (i = 0; i < n_reloads; i++)
5461 {
5462 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5463 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5464 || reg_set_p (reload_reg_rtx[i], insn))
5465 continue;
5466
5467 /* Look at all other reloads. Ensure that the only use of this
5468 reload_reg_rtx is in a reload that just loads the same value
5469 as we do. Note that any secondary reloads must be of the identical
5470 class since the values, modes, and result registers are the
5471 same, so we need not do anything with any secondary reloads. */
5472
5473 for (j = 0; j < n_reloads; j++)
5474 {
5475 if (i == j || reload_reg_rtx[j] == 0
5476 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5477 reload_reg_rtx[i]))
5478 continue;
5479
5480 /* If the reload regs aren't exactly the same (e.g, different modes)
5481 or if the values are different, we can't merge anything with this
5482 reload register. */
5483
5484 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5485 || reload_out[j] != 0 || reload_in[j] == 0
5486 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5487 break;
5488 }
5489
5490 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5491 we, in fact, found any matching reloads. */
5492
5493 if (j == n_reloads)
5494 {
5495 for (j = 0; j < n_reloads; j++)
5496 if (i != j && reload_reg_rtx[j] != 0
5497 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5498 {
5499 reload_when_needed[i] = RELOAD_OTHER;
5500 reload_in[j] = 0;
5501 transfer_replacements (i, j);
5502 }
5503
5504 /* If this is now RELOAD_OTHER, look for any reloads that load
5505 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5506 if they were for inputs, RELOAD_OTHER for outputs. Note that
5507 this test is equivalent to looking for reloads for this operand
5508 number. */
5509
5510 if (reload_when_needed[i] == RELOAD_OTHER)
5511 for (j = 0; j < n_reloads; j++)
5512 if (reload_in[j] != 0
5513 && reload_when_needed[i] != RELOAD_OTHER
5514 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5515 reload_in[i]))
5516 reload_when_needed[j]
5517 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5518 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5519 }
5520 }
5521}
5522#endif /* SMALL_RELOAD_CLASSES */
5523\f
32131a9c
RK
5524/* Output insns to reload values in and out of the chosen reload regs. */
5525
5526static void
5527emit_reload_insns (insn)
5528 rtx insn;
5529{
5530 register int j;
546b63fb
RK
5531 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5532 rtx other_input_address_reload_insns = 0;
5533 rtx other_input_reload_insns = 0;
5534 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5535 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5536 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5537 rtx operand_reload_insns = 0;
893bc853 5538 rtx other_operand_reload_insns = 0;
32131a9c 5539 rtx following_insn = NEXT_INSN (insn);
a8efe40d 5540 rtx before_insn = insn;
32131a9c
RK
5541 int special;
5542 /* Values to be put in spill_reg_store are put here first. */
5543 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5544
546b63fb
RK
5545 for (j = 0; j < reload_n_operands; j++)
5546 input_reload_insns[j] = input_address_reload_insns[j]
5547 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5548
32131a9c
RK
5549 /* Now output the instructions to copy the data into and out of the
5550 reload registers. Do these in the order that the reloads were reported,
5551 since reloads of base and index registers precede reloads of operands
5552 and the operands may need the base and index registers reloaded. */
5553
5554 for (j = 0; j < n_reloads; j++)
5555 {
5556 register rtx old;
5557 rtx oldequiv_reg = 0;
32131a9c
RK
5558 rtx store_insn = 0;
5559
5560 old = reload_in[j];
5561 if (old != 0 && ! reload_inherited[j]
5562 && ! rtx_equal_p (reload_reg_rtx[j], old)
5563 && reload_reg_rtx[j] != 0)
5564 {
5565 register rtx reloadreg = reload_reg_rtx[j];
5566 rtx oldequiv = 0;
5567 enum machine_mode mode;
546b63fb 5568 rtx *where;
32131a9c
RK
5569
5570 /* Determine the mode to reload in.
5571 This is very tricky because we have three to choose from.
5572 There is the mode the insn operand wants (reload_inmode[J]).
5573 There is the mode of the reload register RELOADREG.
5574 There is the intrinsic mode of the operand, which we could find
5575 by stripping some SUBREGs.
5576 It turns out that RELOADREG's mode is irrelevant:
5577 we can change that arbitrarily.
5578
5579 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5580 then the reload reg may not support QImode moves, so use SImode.
5581 If foo is in memory due to spilling a pseudo reg, this is safe,
5582 because the QImode value is in the least significant part of a
5583 slot big enough for a SImode. If foo is some other sort of
5584 memory reference, then it is impossible to reload this case,
5585 so previous passes had better make sure this never happens.
5586
5587 Then consider a one-word union which has SImode and one of its
5588 members is a float, being fetched as (SUBREG:SF union:SI).
5589 We must fetch that as SFmode because we could be loading into
5590 a float-only register. In this case OLD's mode is correct.
5591
5592 Consider an immediate integer: it has VOIDmode. Here we need
5593 to get a mode from something else.
5594
5595 In some cases, there is a fourth mode, the operand's
5596 containing mode. If the insn specifies a containing mode for
5597 this operand, it overrides all others.
5598
5599 I am not sure whether the algorithm here is always right,
5600 but it does the right things in those cases. */
5601
5602 mode = GET_MODE (old);
5603 if (mode == VOIDmode)
5604 mode = reload_inmode[j];
32131a9c
RK
5605
5606#ifdef SECONDARY_INPUT_RELOAD_CLASS
5607 /* If we need a secondary register for this operation, see if
5608 the value is already in a register in that class. Don't
5609 do this if the secondary register will be used as a scratch
5610 register. */
5611
b80bba27
RK
5612 if (reload_secondary_in_reload[j] >= 0
5613 && reload_secondary_in_icode[j] == CODE_FOR_nothing
58b1581b 5614 && optimize)
32131a9c
RK
5615 oldequiv
5616 = find_equiv_reg (old, insn,
b80bba27 5617 reload_reg_class[reload_secondary_in_reload[j]],
fb3821f7 5618 -1, NULL_PTR, 0, mode);
32131a9c
RK
5619#endif
5620
5621 /* If reloading from memory, see if there is a register
5622 that already holds the same value. If so, reload from there.
5623 We can pass 0 as the reload_reg_p argument because
5624 any other reload has either already been emitted,
5625 in which case find_equiv_reg will see the reload-insn,
5626 or has yet to be emitted, in which case it doesn't matter
5627 because we will use this equiv reg right away. */
5628
58b1581b 5629 if (oldequiv == 0 && optimize
32131a9c
RK
5630 && (GET_CODE (old) == MEM
5631 || (GET_CODE (old) == REG
5632 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5633 && reg_renumber[REGNO (old)] < 0)))
546b63fb 5634 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 5635 -1, NULL_PTR, 0, mode);
32131a9c
RK
5636
5637 if (oldequiv)
5638 {
5639 int regno = true_regnum (oldequiv);
5640
5641 /* If OLDEQUIV is a spill register, don't use it for this
5642 if any other reload needs it at an earlier stage of this insn
a8fdc208 5643 or at this stage. */
32131a9c 5644 if (spill_reg_order[regno] >= 0
546b63fb
RK
5645 && (! reload_reg_free_p (regno, reload_opnum[j],
5646 reload_when_needed[j])
5647 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
5648 reload_when_needed[j])))
5649 oldequiv = 0;
5650
5651 /* If OLDEQUIV is not a spill register,
5652 don't use it if any other reload wants it. */
5653 if (spill_reg_order[regno] < 0)
5654 {
5655 int k;
5656 for (k = 0; k < n_reloads; k++)
5657 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
5658 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5659 oldequiv))
32131a9c
RK
5660 {
5661 oldequiv = 0;
5662 break;
5663 }
5664 }
546b63fb
RK
5665
5666 /* If it is no cheaper to copy from OLDEQUIV into the
5667 reload register than it would be to move from memory,
5668 don't use it. Likewise, if we need a secondary register
5669 or memory. */
5670
5671 if (oldequiv != 0
5672 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5673 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5674 reload_reg_class[j])
5675 >= MEMORY_MOVE_COST (mode)))
5676#ifdef SECONDARY_INPUT_RELOAD_CLASS
5677 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5678 mode, oldequiv)
5679 != NO_REGS)
5680#endif
5681#ifdef SECONDARY_MEMORY_NEEDED
5682 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5683 REGNO_REG_CLASS (regno),
5684 mode)
5685#endif
5686 ))
5687 oldequiv = 0;
32131a9c
RK
5688 }
5689
5690 if (oldequiv == 0)
5691 oldequiv = old;
5692 else if (GET_CODE (oldequiv) == REG)
5693 oldequiv_reg = oldequiv;
5694 else if (GET_CODE (oldequiv) == SUBREG)
5695 oldequiv_reg = SUBREG_REG (oldequiv);
5696
76182796
RK
5697 /* If we are reloading from a register that was recently stored in
5698 with an output-reload, see if we can prove there was
5699 actually no need to store the old value in it. */
5700
5701 if (optimize && GET_CODE (oldequiv) == REG
5702 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5703 && spill_reg_order[REGNO (oldequiv)] >= 0
c95c0732 5704 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
8aea655f 5705 && find_reg_note (insn, REG_DEAD, reload_in[j])
76182796 5706 /* This is unsafe if operand occurs more than once in current
b87b7ecd 5707 insn. Perhaps some occurrences weren't reloaded. */
c95c0732 5708 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
76182796
RK
5709 delete_output_reload
5710 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5711
32131a9c 5712 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
5713 then load RELOADREG from OLDEQUIV. Note that we cannot use
5714 gen_lowpart_common since it can do the wrong thing when
5715 RELOADREG has a multi-word mode. Note that RELOADREG
5716 must always be a REG here. */
32131a9c
RK
5717
5718 if (GET_MODE (reloadreg) != mode)
3abe6f90 5719 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
5720 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5721 oldequiv = SUBREG_REG (oldequiv);
5722 if (GET_MODE (oldequiv) != VOIDmode
5723 && mode != GET_MODE (oldequiv))
5724 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5725
546b63fb 5726 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
5727 switch (reload_when_needed[j])
5728 {
32131a9c 5729 case RELOAD_OTHER:
546b63fb
RK
5730 where = &other_input_reload_insns;
5731 break;
5732 case RELOAD_FOR_INPUT:
5733 where = &input_reload_insns[reload_opnum[j]];
32131a9c 5734 break;
546b63fb
RK
5735 case RELOAD_FOR_INPUT_ADDRESS:
5736 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 5737 break;
546b63fb
RK
5738 case RELOAD_FOR_OUTPUT_ADDRESS:
5739 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c
RK
5740 break;
5741 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5742 where = &operand_reload_insns;
5743 break;
893bc853
RK
5744 case RELOAD_FOR_OPADDR_ADDR:
5745 where = &other_operand_reload_insns;
5746 break;
546b63fb
RK
5747 case RELOAD_FOR_OTHER_ADDRESS:
5748 where = &other_input_address_reload_insns;
5749 break;
5750 default:
5751 abort ();
32131a9c
RK
5752 }
5753
546b63fb 5754 push_to_sequence (*where);
32131a9c
RK
5755 special = 0;
5756
5757 /* Auto-increment addresses must be reloaded in a special way. */
5758 if (GET_CODE (oldequiv) == POST_INC
5759 || GET_CODE (oldequiv) == POST_DEC
5760 || GET_CODE (oldequiv) == PRE_INC
5761 || GET_CODE (oldequiv) == PRE_DEC)
5762 {
5763 /* We are not going to bother supporting the case where a
5764 incremented register can't be copied directly from
5765 OLDEQUIV since this seems highly unlikely. */
b80bba27 5766 if (reload_secondary_in_reload[j] >= 0)
32131a9c
RK
5767 abort ();
5768 /* Prevent normal processing of this reload. */
5769 special = 1;
5770 /* Output a special code sequence for this case. */
546b63fb 5771 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
5772 }
5773
5774 /* If we are reloading a pseudo-register that was set by the previous
5775 insn, see if we can get rid of that pseudo-register entirely
5776 by redirecting the previous insn into our reload register. */
5777
5778 else if (optimize && GET_CODE (old) == REG
5779 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5780 && dead_or_set_p (insn, old)
5781 /* This is unsafe if some other reload
5782 uses the same reg first. */
546b63fb
RK
5783 && reload_reg_free_before_p (REGNO (reloadreg),
5784 reload_opnum[j],
5785 reload_when_needed[j]))
32131a9c
RK
5786 {
5787 rtx temp = PREV_INSN (insn);
5788 while (temp && GET_CODE (temp) == NOTE)
5789 temp = PREV_INSN (temp);
5790 if (temp
5791 && GET_CODE (temp) == INSN
5792 && GET_CODE (PATTERN (temp)) == SET
5793 && SET_DEST (PATTERN (temp)) == old
5794 /* Make sure we can access insn_operand_constraint. */
5795 && asm_noperands (PATTERN (temp)) < 0
5796 /* This is unsafe if prev insn rejects our reload reg. */
5797 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5798 reloadreg)
5799 /* This is unsafe if operand occurs more than once in current
5800 insn. Perhaps some occurrences aren't reloaded. */
5801 && count_occurrences (PATTERN (insn), old) == 1
5802 /* Don't risk splitting a matching pair of operands. */
5803 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5804 {
5805 /* Store into the reload register instead of the pseudo. */
5806 SET_DEST (PATTERN (temp)) = reloadreg;
5807 /* If these are the only uses of the pseudo reg,
5808 pretend for GDB it lives in the reload reg we used. */
5809 if (reg_n_deaths[REGNO (old)] == 1
5810 && reg_n_sets[REGNO (old)] == 1)
5811 {
5812 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5813 alter_reg (REGNO (old), -1);
5814 }
5815 special = 1;
5816 }
5817 }
5818
546b63fb
RK
5819 /* We can't do that, so output an insn to load RELOADREG. */
5820
32131a9c
RK
5821 if (! special)
5822 {
5823#ifdef SECONDARY_INPUT_RELOAD_CLASS
5824 rtx second_reload_reg = 0;
5825 enum insn_code icode;
5826
5827 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
5828 and icode, if any. If OLDEQUIV and OLD are different or
5829 if this is an in-out reload, recompute whether or not we
5830 still need a secondary register and what the icode should
5831 be. If we still need a secondary register and the class or
5832 icode is different, go back to reloading from OLD if using
5833 OLDEQUIV means that we got the wrong type of register. We
5834 cannot have different class or icode due to an in-out reload
5835 because we don't make such reloads when both the input and
5836 output need secondary reload registers. */
32131a9c 5837
b80bba27 5838 if (reload_secondary_in_reload[j] >= 0)
32131a9c 5839 {
b80bba27 5840 int secondary_reload = reload_secondary_in_reload[j];
1554c2c6
RK
5841 rtx real_oldequiv = oldequiv;
5842 rtx real_old = old;
5843
5844 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5845 and similarly for OLD.
b80bba27 5846 See comments in get_secondary_reload in reload.c. */
1554c2c6
RK
5847 if (GET_CODE (oldequiv) == REG
5848 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5849 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5850 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5851
5852 if (GET_CODE (old) == REG
5853 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5854 && reg_equiv_mem[REGNO (old)] != 0)
5855 real_old = reg_equiv_mem[REGNO (old)];
5856
32131a9c 5857 second_reload_reg = reload_reg_rtx[secondary_reload];
b80bba27 5858 icode = reload_secondary_in_icode[j];
32131a9c 5859
d445b551
RK
5860 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5861 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
5862 {
5863 enum reg_class new_class
5864 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 5865 mode, real_oldequiv);
32131a9c
RK
5866
5867 if (new_class == NO_REGS)
5868 second_reload_reg = 0;
5869 else
5870 {
5871 enum insn_code new_icode;
5872 enum machine_mode new_mode;
5873
5874 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5875 REGNO (second_reload_reg)))
1554c2c6 5876 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5877 else
5878 {
5879 new_icode = reload_in_optab[(int) mode];
5880 if (new_icode != CODE_FOR_nothing
5881 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 5882 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 5883 (reloadreg, mode)))
a8fdc208
RS
5884 || (insn_operand_predicate[(int) new_icode][1]
5885 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 5886 (real_oldequiv, mode)))))
32131a9c
RK
5887 new_icode = CODE_FOR_nothing;
5888
5889 if (new_icode == CODE_FOR_nothing)
5890 new_mode = mode;
5891 else
196ddf8a 5892 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
5893
5894 if (GET_MODE (second_reload_reg) != new_mode)
5895 {
5896 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5897 new_mode))
1554c2c6 5898 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5899 else
5900 second_reload_reg
3aaa90c7
MM
5901 = gen_rtx (REG, new_mode,
5902 REGNO (second_reload_reg));
32131a9c
RK
5903 }
5904 }
5905 }
5906 }
5907
5908 /* If we still need a secondary reload register, check
5909 to see if it is being used as a scratch or intermediate
1554c2c6
RK
5910 register and generate code appropriately. If we need
5911 a scratch register, use REAL_OLDEQUIV since the form of
5912 the insn may depend on the actual address if it is
5913 a MEM. */
32131a9c
RK
5914
5915 if (second_reload_reg)
5916 {
5917 if (icode != CODE_FOR_nothing)
5918 {
546b63fb
RK
5919 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5920 second_reload_reg));
32131a9c
RK
5921 special = 1;
5922 }
5923 else
5924 {
5925 /* See if we need a scratch register to load the
5926 intermediate register (a tertiary reload). */
5927 enum insn_code tertiary_icode
b80bba27 5928 = reload_secondary_in_icode[secondary_reload];
32131a9c
RK
5929
5930 if (tertiary_icode != CODE_FOR_nothing)
5931 {
5932 rtx third_reload_reg
b80bba27 5933 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
32131a9c 5934
546b63fb
RK
5935 emit_insn ((GEN_FCN (tertiary_icode)
5936 (second_reload_reg, real_oldequiv,
5937 third_reload_reg)));
32131a9c
RK
5938 }
5939 else
546b63fb
RK
5940 gen_input_reload (second_reload_reg, oldequiv,
5941 reload_opnum[j],
5942 reload_when_needed[j]);
5943
5944 oldequiv = second_reload_reg;
32131a9c
RK
5945 }
5946 }
5947 }
5948#endif
5949
2d182c6f 5950 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
546b63fb
RK
5951 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5952 reload_when_needed[j]);
32131a9c
RK
5953
5954#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5955 /* We may have to make a REG_DEAD note for the secondary reload
5956 register in the insns we just made. Find the last insn that
5957 mentioned the register. */
5958 if (! special && second_reload_reg
5959 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5960 {
5961 rtx prev;
5962
546b63fb 5963 for (prev = get_last_insn (); prev;
32131a9c
RK
5964 prev = PREV_INSN (prev))
5965 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5966 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5967 PATTERN (prev)))
32131a9c
RK
5968 {
5969 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5970 second_reload_reg,
5971 REG_NOTES (prev));
5972 break;
5973 }
5974 }
5975#endif
5976 }
5977
546b63fb
RK
5978 /* End this sequence. */
5979 *where = get_insns ();
5980 end_sequence ();
32131a9c
RK
5981 }
5982
5983 /* Add a note saying the input reload reg
5984 dies in this insn, if anyone cares. */
5985#ifdef PRESERVE_DEATH_INFO_REGNO_P
5986 if (old != 0
5987 && reload_reg_rtx[j] != old
5988 && reload_reg_rtx[j] != 0
5989 && reload_out[j] == 0
5990 && ! reload_inherited[j]
5991 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5992 {
5993 register rtx reloadreg = reload_reg_rtx[j];
5994
a8fdc208 5995#if 0
32131a9c
RK
5996 /* We can't abort here because we need to support this for sched.c.
5997 It's not terrible to miss a REG_DEAD note, but we should try
5998 to figure out how to do this correctly. */
5999 /* The code below is incorrect for address-only reloads. */
6000 if (reload_when_needed[j] != RELOAD_OTHER
6001 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6002 abort ();
6003#endif
6004
6005 /* Add a death note to this insn, for an input reload. */
6006
6007 if ((reload_when_needed[j] == RELOAD_OTHER
6008 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6009 && ! dead_or_set_p (insn, reloadreg))
6010 REG_NOTES (insn)
6011 = gen_rtx (EXPR_LIST, REG_DEAD,
6012 reloadreg, REG_NOTES (insn));
6013 }
6014
6015 /* When we inherit a reload, the last marked death of the reload reg
6016 may no longer really be a death. */
6017 if (reload_reg_rtx[j] != 0
6018 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6019 && reload_inherited[j])
6020 {
6021 /* Handle inheriting an output reload.
6022 Remove the death note from the output reload insn. */
6023 if (reload_spill_index[j] >= 0
6024 && GET_CODE (reload_in[j]) == REG
6025 && spill_reg_store[reload_spill_index[j]] != 0
6026 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6027 REG_DEAD, REGNO (reload_reg_rtx[j])))
6028 remove_death (REGNO (reload_reg_rtx[j]),
6029 spill_reg_store[reload_spill_index[j]]);
6030 /* Likewise for input reloads that were inherited. */
6031 else if (reload_spill_index[j] >= 0
6032 && GET_CODE (reload_in[j]) == REG
6033 && spill_reg_store[reload_spill_index[j]] == 0
6034 && reload_inheritance_insn[j] != 0
a8fdc208 6035 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
6036 REGNO (reload_reg_rtx[j])))
6037 remove_death (REGNO (reload_reg_rtx[j]),
6038 reload_inheritance_insn[j]);
6039 else
6040 {
6041 rtx prev;
6042
6043 /* We got this register from find_equiv_reg.
6044 Search back for its last death note and get rid of it.
6045 But don't search back too far.
6046 Don't go past a place where this reg is set,
6047 since a death note before that remains valid. */
6048 for (prev = PREV_INSN (insn);
6049 prev && GET_CODE (prev) != CODE_LABEL;
6050 prev = PREV_INSN (prev))
6051 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6052 && dead_or_set_p (prev, reload_reg_rtx[j]))
6053 {
6054 if (find_regno_note (prev, REG_DEAD,
6055 REGNO (reload_reg_rtx[j])))
6056 remove_death (REGNO (reload_reg_rtx[j]), prev);
6057 break;
6058 }
6059 }
6060 }
6061
6062 /* We might have used find_equiv_reg above to choose an alternate
6063 place from which to reload. If so, and it died, we need to remove
6064 that death and move it to one of the insns we just made. */
6065
6066 if (oldequiv_reg != 0
6067 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6068 {
6069 rtx prev, prev1;
6070
6071 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6072 prev = PREV_INSN (prev))
6073 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6074 && dead_or_set_p (prev, oldequiv_reg))
6075 {
6076 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6077 {
6078 for (prev1 = this_reload_insn;
6079 prev1; prev1 = PREV_INSN (prev1))
6080 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
6081 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6082 PATTERN (prev1)))
32131a9c
RK
6083 {
6084 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6085 oldequiv_reg,
6086 REG_NOTES (prev1));
6087 break;
6088 }
6089 remove_death (REGNO (oldequiv_reg), prev);
6090 }
6091 break;
6092 }
6093 }
6094#endif
6095
6096 /* If we are reloading a register that was recently stored in with an
6097 output-reload, see if we can prove there was
6098 actually no need to store the old value in it. */
6099
6100 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 6101 && reload_in[j] != 0
32131a9c
RK
6102 && GET_CODE (reload_in[j]) == REG
6103#if 0
6104 /* There doesn't seem to be any reason to restrict this to pseudos
6105 and doing so loses in the case where we are copying from a
6106 register of the wrong class. */
6107 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6108#endif
6109 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb
RK
6110 /* This is unsafe if some other reload uses the same reg first. */
6111 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6112 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
6113 && dead_or_set_p (insn, reload_in[j])
6114 /* This is unsafe if operand occurs more than once in current
6115 insn. Perhaps some occurrences weren't reloaded. */
6116 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6117 delete_output_reload (insn, j,
6118 spill_reg_store[reload_spill_index[j]]);
6119
6120 /* Input-reloading is done. Now do output-reloading,
6121 storing the value from the reload-register after the main insn
6122 if reload_out[j] is nonzero.
6123
6124 ??? At some point we need to support handling output reloads of
6125 JUMP_INSNs or insns that set cc0. */
6126 old = reload_out[j];
6127 if (old != 0
6128 && reload_reg_rtx[j] != old
6129 && reload_reg_rtx[j] != 0)
6130 {
6131 register rtx reloadreg = reload_reg_rtx[j];
6132 register rtx second_reloadreg = 0;
32131a9c
RK
6133 rtx note, p;
6134 enum machine_mode mode;
6135 int special = 0;
6136
6137 /* An output operand that dies right away does need a reload,
6138 but need not be copied from it. Show the new location in the
6139 REG_UNUSED note. */
6140 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6141 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6142 {
6143 XEXP (note, 0) = reload_reg_rtx[j];
6144 continue;
6145 }
6146 else if (GET_CODE (old) == SCRATCH)
6147 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6148 but we don't want to make an output reload. */
6149 continue;
6150
6151#if 0
6152 /* Strip off of OLD any size-increasing SUBREGs such as
6153 (SUBREG:SI foo:QI 0). */
6154
6155 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6156 && (GET_MODE_SIZE (GET_MODE (old))
6157 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6158 old = SUBREG_REG (old);
6159#endif
6160
6161 /* If is a JUMP_INSN, we can't support output reloads yet. */
6162 if (GET_CODE (insn) == JUMP_INSN)
6163 abort ();
6164
546b63fb
RK
6165 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6166
32131a9c
RK
6167 /* Determine the mode to reload in.
6168 See comments above (for input reloading). */
6169
6170 mode = GET_MODE (old);
6171 if (mode == VOIDmode)
79a365a7
RS
6172 {
6173 /* VOIDmode should never happen for an output. */
6174 if (asm_noperands (PATTERN (insn)) < 0)
6175 /* It's the compiler's fault. */
6176 abort ();
6177 error_for_asm (insn, "output operand is constant in `asm'");
6178 /* Prevent crash--use something we know is valid. */
6179 mode = word_mode;
6180 old = gen_rtx (REG, mode, REGNO (reloadreg));
6181 }
32131a9c 6182
32131a9c 6183 if (GET_MODE (reloadreg) != mode)
3abe6f90 6184 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
6185
6186#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6187
6188 /* If we need two reload regs, set RELOADREG to the intermediate
6189 one, since it will be stored into OUT. We might need a secondary
6190 register only for an input reload, so check again here. */
6191
b80bba27 6192 if (reload_secondary_out_reload[j] >= 0)
32131a9c 6193 {
1554c2c6 6194 rtx real_old = old;
32131a9c 6195
1554c2c6
RK
6196 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6197 && reg_equiv_mem[REGNO (old)] != 0)
6198 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6199
1554c2c6
RK
6200 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6201 mode, real_old)
6202 != NO_REGS))
6203 {
6204 second_reloadreg = reloadreg;
b80bba27 6205 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
32131a9c 6206
1554c2c6
RK
6207 /* See if RELOADREG is to be used as a scratch register
6208 or as an intermediate register. */
b80bba27 6209 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
32131a9c 6210 {
b80bba27 6211 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
546b63fb 6212 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6213 special = 1;
32131a9c
RK
6214 }
6215 else
1554c2c6
RK
6216 {
6217 /* See if we need both a scratch and intermediate reload
6218 register. */
b80bba27 6219 int secondary_reload = reload_secondary_out_reload[j];
1554c2c6 6220 enum insn_code tertiary_icode
b80bba27 6221 = reload_secondary_out_icode[secondary_reload];
1554c2c6 6222 rtx pat;
32131a9c 6223
1554c2c6
RK
6224 if (GET_MODE (reloadreg) != mode)
6225 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6226
6227 if (tertiary_icode != CODE_FOR_nothing)
6228 {
6229 rtx third_reloadreg
b80bba27 6230 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
1554c2c6
RK
6231 pat = (GEN_FCN (tertiary_icode)
6232 (reloadreg, second_reloadreg, third_reloadreg));
6233 }
9ad5f9f6
JW
6234#ifdef SECONDARY_MEMORY_NEEDED
6235 /* If we need a memory location to do the move, do it that way. */
6236 else if (GET_CODE (reloadreg) == REG
6237 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6238 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6239 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6240 GET_MODE (second_reloadreg)))
6241 {
6242 /* Get the memory to use and rewrite both registers
6243 to its mode. */
546b63fb
RK
6244 rtx loc
6245 = get_secondary_mem (reloadreg,
6246 GET_MODE (second_reloadreg),
6247 reload_opnum[j],
6248 reload_when_needed[j]);
9ad5f9f6
JW
6249 rtx tmp_reloadreg;
6250
6251 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6252 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6253 REGNO (second_reloadreg));
6254
6255 if (GET_MODE (loc) != GET_MODE (reloadreg))
6256 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6257 REGNO (reloadreg));
6258 else
6259 tmp_reloadreg = reloadreg;
6260
546b63fb 6261 emit_move_insn (loc, second_reloadreg);
9ad5f9f6
JW
6262 pat = gen_move_insn (tmp_reloadreg, loc);
6263 }
6264#endif
1554c2c6
RK
6265 else
6266 pat = gen_move_insn (reloadreg, second_reloadreg);
6267
546b63fb 6268 emit_insn (pat);
1554c2c6 6269 }
32131a9c
RK
6270 }
6271 }
6272#endif
6273
6274 /* Output the last reload insn. */
6275 if (! special)
0dadecf6
RK
6276 {
6277#ifdef SECONDARY_MEMORY_NEEDED
6278 /* If we need a memory location to do the move, do it that way. */
6279 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6280 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6281 REGNO_REG_CLASS (REGNO (reloadreg)),
6282 GET_MODE (reloadreg)))
6283 {
6284 /* Get the memory to use and rewrite both registers to
6285 its mode. */
546b63fb
RK
6286 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6287 reload_opnum[j],
6288 reload_when_needed[j]);
0dadecf6
RK
6289
6290 if (GET_MODE (loc) != GET_MODE (reloadreg))
6291 reloadreg = gen_rtx (REG, GET_MODE (loc),
6292 REGNO (reloadreg));
6293
6294 if (GET_MODE (loc) != GET_MODE (old))
6295 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6296
546b63fb
RK
6297 emit_insn (gen_move_insn (loc, reloadreg));
6298 emit_insn (gen_move_insn (old, loc));
0dadecf6
RK
6299 }
6300 else
6301#endif
546b63fb 6302 emit_insn (gen_move_insn (old, reloadreg));
0dadecf6 6303 }
32131a9c
RK
6304
6305#ifdef PRESERVE_DEATH_INFO_REGNO_P
6306 /* If final will look at death notes for this reg,
6307 put one on the last output-reload insn to use it. Similarly
6308 for any secondary register. */
6309 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 6310 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6311 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6312 && reg_overlap_mentioned_for_reload_p (reloadreg,
6313 PATTERN (p)))
32131a9c
RK
6314 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6315 reloadreg, REG_NOTES (p));
6316
6317#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6318 if (! special
6319 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 6320 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6321 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6322 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6323 PATTERN (p)))
32131a9c
RK
6324 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6325 second_reloadreg, REG_NOTES (p));
6326#endif
6327#endif
6328 /* Look at all insns we emitted, just to be safe. */
546b63fb 6329 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
6330 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6331 {
6332 /* If this output reload doesn't come from a spill reg,
6333 clear any memory of reloaded copies of the pseudo reg.
6334 If this output reload comes from a spill reg,
6335 reg_has_output_reload will make this do nothing. */
6336 note_stores (PATTERN (p), forget_old_reloads_1);
6337
6338 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6339 store_insn = p;
6340 }
6341
546b63fb
RK
6342 output_reload_insns[reload_opnum[j]] = get_insns ();
6343 end_sequence ();
6344
32131a9c
RK
6345 }
6346
6347 if (reload_spill_index[j] >= 0)
6348 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6349 }
6350
546b63fb
RK
6351 /* Now write all the insns we made for reloads in the order expected by
6352 the allocation functions. Prior to the insn being reloaded, we write
6353 the following reloads:
6354
6355 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6356
6357 RELOAD_OTHER reloads.
6358
6359 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6360 the RELOAD_FOR_INPUT reload for the operand.
6361
893bc853
RK
6362 RELOAD_FOR_OPADDR_ADDRS reloads.
6363
546b63fb
RK
6364 RELOAD_FOR_OPERAND_ADDRESS reloads.
6365
6366 After the insn being reloaded, we write the following:
6367
6368 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6369 the RELOAD_FOR_OUTPUT reload for that operand. */
6370
6371 emit_insns_before (other_input_address_reload_insns, before_insn);
6372 emit_insns_before (other_input_reload_insns, before_insn);
6373
6374 for (j = 0; j < reload_n_operands; j++)
6375 {
6376 emit_insns_before (input_address_reload_insns[j], before_insn);
6377 emit_insns_before (input_reload_insns[j], before_insn);
6378 }
6379
893bc853 6380 emit_insns_before (other_operand_reload_insns, before_insn);
546b63fb
RK
6381 emit_insns_before (operand_reload_insns, before_insn);
6382
6383 for (j = 0; j < reload_n_operands; j++)
6384 {
6385 emit_insns_before (output_address_reload_insns[j], following_insn);
6386 emit_insns_before (output_reload_insns[j], following_insn);
6387 }
6388
32131a9c
RK
6389 /* Move death notes from INSN
6390 to output-operand-address and output reload insns. */
6391#ifdef PRESERVE_DEATH_INFO_REGNO_P
6392 {
6393 rtx insn1;
6394 /* Loop over those insns, last ones first. */
6395 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6396 insn1 = PREV_INSN (insn1))
6397 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6398 {
6399 rtx source = SET_SRC (PATTERN (insn1));
6400 rtx dest = SET_DEST (PATTERN (insn1));
6401
6402 /* The note we will examine next. */
6403 rtx reg_notes = REG_NOTES (insn);
6404 /* The place that pointed to this note. */
6405 rtx *prev_reg_note = &REG_NOTES (insn);
6406
6407 /* If the note is for something used in the source of this
6408 reload insn, or in the output address, move the note. */
6409 while (reg_notes)
6410 {
6411 rtx next_reg_notes = XEXP (reg_notes, 1);
6412 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6413 && GET_CODE (XEXP (reg_notes, 0)) == REG
6414 && ((GET_CODE (dest) != REG
bfa30b22
RK
6415 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6416 dest))
6417 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6418 source)))
32131a9c
RK
6419 {
6420 *prev_reg_note = next_reg_notes;
6421 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6422 REG_NOTES (insn1) = reg_notes;
6423 }
6424 else
6425 prev_reg_note = &XEXP (reg_notes, 1);
6426
6427 reg_notes = next_reg_notes;
6428 }
6429 }
6430 }
6431#endif
6432
6433 /* For all the spill regs newly reloaded in this instruction,
6434 record what they were reloaded from, so subsequent instructions
d445b551
RK
6435 can inherit the reloads.
6436
6437 Update spill_reg_store for the reloads of this insn.
e9e79d69 6438 Copy the elements that were updated in the loop above. */
32131a9c
RK
6439
6440 for (j = 0; j < n_reloads; j++)
6441 {
6442 register int r = reload_order[j];
6443 register int i = reload_spill_index[r];
6444
6445 /* I is nonneg if this reload used one of the spill regs.
6446 If reload_reg_rtx[r] is 0, this is an optional reload
546b63fb
RK
6447 that we opted to ignore.
6448
6449 Also ignore reloads that don't reach the end of the insn,
6450 since we will eventually see the one that does. */
d445b551 6451
546b63fb
RK
6452 if (i >= 0 && reload_reg_rtx[r] != 0
6453 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6454 reload_when_needed[r]))
32131a9c
RK
6455 {
6456 /* First, clear out memory of what used to be in this spill reg.
6457 If consecutive registers are used, clear them all. */
6458 int nr
6459 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6460 int k;
6461
6462 for (k = 0; k < nr; k++)
6463 {
6464 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6465 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6466 }
6467
6468 /* Maybe the spill reg contains a copy of reload_out. */
6469 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6470 {
6471 register int nregno = REGNO (reload_out[r]);
d08ea79f
RK
6472 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6473 : HARD_REGNO_NREGS (nregno,
6474 GET_MODE (reload_reg_rtx[r])));
d445b551
RK
6475
6476 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 6477 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6478
d08ea79f
RK
6479 /* If NREGNO is a hard register, it may occupy more than
6480 one register. If it does, say what is in the
6481 rest of the registers assuming that both registers
6482 agree on how many words the object takes. If not,
6483 invalidate the subsequent registers. */
6484
6485 if (nregno < FIRST_PSEUDO_REGISTER)
6486 for (k = 1; k < nnr; k++)
6487 reg_last_reload_reg[nregno + k]
74eb5c52
DE
6488 = (nr == nnr ? gen_rtx (REG,
6489 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
d08ea79f
RK
6490 REGNO (reload_reg_rtx[r]) + k)
6491 : 0);
6492
6493 /* Now do the inverse operation. */
32131a9c
RK
6494 for (k = 0; k < nr; k++)
6495 {
6496 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6497 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6498 : nregno + k);
32131a9c
RK
6499 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6500 }
6501 }
d445b551 6502
2c9ce2ef
RK
6503 /* Maybe the spill reg contains a copy of reload_in. Only do
6504 something if there will not be an output reload for
6505 the register being reloaded. */
32131a9c
RK
6506 else if (reload_out[r] == 0
6507 && reload_in[r] != 0
2c9ce2ef
RK
6508 && ((GET_CODE (reload_in[r]) == REG
6509 && ! reg_has_output_reload[REGNO (reload_in[r])]
6510 || (GET_CODE (reload_in_reg[r]) == REG
6511 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
32131a9c
RK
6512 {
6513 register int nregno;
d08ea79f
RK
6514 int nnr;
6515
32131a9c
RK
6516 if (GET_CODE (reload_in[r]) == REG)
6517 nregno = REGNO (reload_in[r]);
6518 else
6519 nregno = REGNO (reload_in_reg[r]);
6520
d08ea79f
RK
6521 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6522 : HARD_REGNO_NREGS (nregno,
6523 GET_MODE (reload_reg_rtx[r])));
6524
546b63fb 6525 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6526
d08ea79f
RK
6527 if (nregno < FIRST_PSEUDO_REGISTER)
6528 for (k = 1; k < nnr; k++)
6529 reg_last_reload_reg[nregno + k]
74eb5c52
DE
6530 = (nr == nnr ? gen_rtx (REG,
6531 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
d08ea79f
RK
6532 REGNO (reload_reg_rtx[r]) + k)
6533 : 0);
6534
546b63fb
RK
6535 /* Unless we inherited this reload, show we haven't
6536 recently done a store. */
6537 if (! reload_inherited[r])
6538 spill_reg_store[i] = 0;
d445b551 6539
546b63fb
RK
6540 for (k = 0; k < nr; k++)
6541 {
6542 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6543 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6544 : nregno + k);
546b63fb
RK
6545 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6546 = insn;
32131a9c
RK
6547 }
6548 }
6549 }
6550
6551 /* The following if-statement was #if 0'd in 1.34 (or before...).
6552 It's reenabled in 1.35 because supposedly nothing else
6553 deals with this problem. */
6554
6555 /* If a register gets output-reloaded from a non-spill register,
6556 that invalidates any previous reloaded copy of it.
6557 But forget_old_reloads_1 won't get to see it, because
6558 it thinks only about the original insn. So invalidate it here. */
6559 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6560 {
6561 register int nregno = REGNO (reload_out[r]);
36281332
RK
6562 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (reload_out[r]));
6563
6564 while (num_regs-- > 0)
6565 reg_last_reload_reg[nregno + num_regs] = 0;
32131a9c
RK
6566 }
6567 }
6568}
6569\f
546b63fb
RK
6570/* Emit code to perform an input reload of IN to RELOADREG. IN is from
6571 operand OPNUM with reload type TYPE.
6572
3c3eeea6 6573 Returns first insn emitted. */
32131a9c
RK
6574
6575rtx
546b63fb 6576gen_input_reload (reloadreg, in, opnum, type)
32131a9c
RK
6577 rtx reloadreg;
6578 rtx in;
546b63fb
RK
6579 int opnum;
6580 enum reload_type type;
32131a9c 6581{
546b63fb 6582 rtx last = get_last_insn ();
32131a9c 6583
a8fdc208 6584 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
6585 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6586 register that didn't get a hard register. In that case we can just
6587 call emit_move_insn.
6588
a7fd196c
JW
6589 We can also be asked to reload a PLUS that adds a register or a MEM to
6590 another register, constant or MEM. This can occur during frame pointer
6591 elimination and while reloading addresses. This case is handled by
6592 trying to emit a single insn to perform the add. If it is not valid,
6593 we use a two insn sequence.
32131a9c
RK
6594
6595 Finally, we could be called to handle an 'o' constraint by putting
6596 an address into a register. In that case, we first try to do this
6597 with a named pattern of "reload_load_address". If no such pattern
6598 exists, we just emit a SET insn and hope for the best (it will normally
6599 be valid on machines that use 'o').
6600
6601 This entire process is made complex because reload will never
6602 process the insns we generate here and so we must ensure that
6603 they will fit their constraints and also by the fact that parts of
6604 IN might be being reloaded separately and replaced with spill registers.
6605 Because of this, we are, in some sense, just guessing the right approach
6606 here. The one listed above seems to work.
6607
6608 ??? At some point, this whole thing needs to be rethought. */
6609
6610 if (GET_CODE (in) == PLUS
a7fd196c
JW
6611 && (GET_CODE (XEXP (in, 0)) == REG
6612 || GET_CODE (XEXP (in, 0)) == MEM)
6613 && (GET_CODE (XEXP (in, 1)) == REG
6614 || CONSTANT_P (XEXP (in, 1))
6615 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 6616 {
a7fd196c
JW
6617 /* We need to compute the sum of a register or a MEM and another
6618 register, constant, or MEM, and put it into the reload
3002e160
JW
6619 register. The best possible way of doing this is if the machine
6620 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
6621
6622 The simplest approach is to try to generate such an insn and see if it
6623 is recognized and matches its constraints. If so, it can be used.
6624
6625 It might be better not to actually emit the insn unless it is valid,
0009eff2 6626 but we need to pass the insn as an operand to `recog' and
b36d7dd7 6627 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 6628 not valid than to dummy things up. */
a8fdc208 6629
af929c62 6630 rtx op0, op1, tem, insn;
32131a9c 6631 int code;
a8fdc208 6632
af929c62
RK
6633 op0 = find_replacement (&XEXP (in, 0));
6634 op1 = find_replacement (&XEXP (in, 1));
6635
32131a9c
RK
6636 /* Since constraint checking is strict, commutativity won't be
6637 checked, so we need to do that here to avoid spurious failure
6638 if the add instruction is two-address and the second operand
6639 of the add is the same as the reload reg, which is frequently
6640 the case. If the insn would be A = B + A, rearrange it so
6641 it will be A = A + B as constrain_operands expects. */
a8fdc208 6642
32131a9c
RK
6643 if (GET_CODE (XEXP (in, 1)) == REG
6644 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
6645 tem = op0, op0 = op1, op1 = tem;
6646
6647 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6648 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c 6649
546b63fb 6650 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6651 code = recog_memoized (insn);
6652
6653 if (code >= 0)
6654 {
6655 insn_extract (insn);
6656 /* We want constrain operands to treat this insn strictly in
6657 its validity determination, i.e., the way it would after reload
6658 has completed. */
6659 if (constrain_operands (code, 1))
6660 return insn;
6661 }
6662
546b63fb 6663 delete_insns_since (last);
32131a9c
RK
6664
6665 /* If that failed, we must use a conservative two-insn sequence.
6666 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
6667 register since "move" will be able to handle an arbitrary operand,
6668 unlike add which can't, in general. Then add the registers.
32131a9c
RK
6669
6670 If there is another way to do this for a specific machine, a
6671 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6672 we emit below. */
6673
af929c62
RK
6674 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6675 || (GET_CODE (op1) == REG
6676 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6677 tem = op0, op0 = op1, op1 = tem;
32131a9c 6678
546b63fb 6679 emit_insn (gen_move_insn (reloadreg, op0));
39b56c2a
RK
6680
6681 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6682 This fixes a problem on the 32K where the stack pointer cannot
6683 be used as an operand of an add insn. */
6684
6685 if (rtx_equal_p (op0, op1))
6686 op1 = reloadreg;
6687
c77c9766
RK
6688 insn = emit_insn (gen_add2_insn (reloadreg, op1));
6689
6690 /* If that failed, copy the address register to the reload register.
6691 Then add the constant to the reload register. */
6692
6693 code = recog_memoized (insn);
6694
6695 if (code >= 0)
6696 {
6697 insn_extract (insn);
6698 /* We want constrain operands to treat this insn strictly in
6699 its validity determination, i.e., the way it would after reload
6700 has completed. */
6701 if (constrain_operands (code, 1))
6702 return insn;
6703 }
6704
6705 delete_insns_since (last);
6706
6707 emit_insn (gen_move_insn (reloadreg, op1));
6708 emit_insn (gen_add2_insn (reloadreg, op0));
32131a9c
RK
6709 }
6710
0dadecf6
RK
6711#ifdef SECONDARY_MEMORY_NEEDED
6712 /* If we need a memory location to do the move, do it that way. */
6713 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6714 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6715 REGNO_REG_CLASS (REGNO (reloadreg)),
6716 GET_MODE (reloadreg)))
6717 {
6718 /* Get the memory to use and rewrite both registers to its mode. */
546b63fb 6719 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
0dadecf6
RK
6720
6721 if (GET_MODE (loc) != GET_MODE (reloadreg))
6722 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6723
6724 if (GET_MODE (loc) != GET_MODE (in))
6725 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6726
546b63fb
RK
6727 emit_insn (gen_move_insn (loc, in));
6728 emit_insn (gen_move_insn (reloadreg, loc));
0dadecf6
RK
6729 }
6730#endif
6731
32131a9c
RK
6732 /* If IN is a simple operand, use gen_move_insn. */
6733 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
546b63fb 6734 emit_insn (gen_move_insn (reloadreg, in));
32131a9c
RK
6735
6736#ifdef HAVE_reload_load_address
6737 else if (HAVE_reload_load_address)
546b63fb 6738 emit_insn (gen_reload_load_address (reloadreg, in));
32131a9c
RK
6739#endif
6740
6741 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6742 else
546b63fb 6743 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6744
6745 /* Return the first insn emitted.
546b63fb 6746 We can not just return get_last_insn, because there may have
32131a9c
RK
6747 been multiple instructions emitted. Also note that gen_move_insn may
6748 emit more than one insn itself, so we can not assume that there is one
6749 insn emitted per emit_insn_before call. */
6750
546b63fb 6751 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
6752}
6753\f
6754/* Delete a previously made output-reload
6755 whose result we now believe is not needed.
6756 First we double-check.
6757
6758 INSN is the insn now being processed.
6759 OUTPUT_RELOAD_INSN is the insn of the output reload.
6760 J is the reload-number for this insn. */
6761
6762static void
6763delete_output_reload (insn, j, output_reload_insn)
6764 rtx insn;
6765 int j;
6766 rtx output_reload_insn;
6767{
6768 register rtx i1;
6769
6770 /* Get the raw pseudo-register referred to. */
6771
6772 rtx reg = reload_in[j];
6773 while (GET_CODE (reg) == SUBREG)
6774 reg = SUBREG_REG (reg);
6775
6776 /* If the pseudo-reg we are reloading is no longer referenced
6777 anywhere between the store into it and here,
6778 and no jumps or labels intervene, then the value can get
6779 here through the reload reg alone.
6780 Otherwise, give up--return. */
6781 for (i1 = NEXT_INSN (output_reload_insn);
6782 i1 != insn; i1 = NEXT_INSN (i1))
6783 {
6784 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6785 return;
6786 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6787 && reg_mentioned_p (reg, PATTERN (i1)))
6788 return;
6789 }
6790
208dffa5
RS
6791 if (cannot_omit_stores[REGNO (reg)])
6792 return;
6793
32131a9c
RK
6794 /* If this insn will store in the pseudo again,
6795 the previous store can be removed. */
6796 if (reload_out[j] == reload_in[j])
6797 delete_insn (output_reload_insn);
6798
6799 /* See if the pseudo reg has been completely replaced
6800 with reload regs. If so, delete the store insn
6801 and forget we had a stack slot for the pseudo. */
6802 else if (reg_n_deaths[REGNO (reg)] == 1
6803 && reg_basic_block[REGNO (reg)] >= 0
6804 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6805 {
6806 rtx i2;
6807
6808 /* We know that it was used only between here
6809 and the beginning of the current basic block.
6810 (We also know that the last use before INSN was
6811 the output reload we are thinking of deleting, but never mind that.)
6812 Search that range; see if any ref remains. */
6813 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6814 {
d445b551
RK
6815 rtx set = single_set (i2);
6816
32131a9c
RK
6817 /* Uses which just store in the pseudo don't count,
6818 since if they are the only uses, they are dead. */
d445b551 6819 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6820 continue;
6821 if (GET_CODE (i2) == CODE_LABEL
6822 || GET_CODE (i2) == JUMP_INSN)
6823 break;
6824 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6825 && reg_mentioned_p (reg, PATTERN (i2)))
6826 /* Some other ref remains;
6827 we can't do anything. */
6828 return;
6829 }
6830
6831 /* Delete the now-dead stores into this pseudo. */
6832 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6833 {
d445b551
RK
6834 rtx set = single_set (i2);
6835
6836 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6837 delete_insn (i2);
6838 if (GET_CODE (i2) == CODE_LABEL
6839 || GET_CODE (i2) == JUMP_INSN)
6840 break;
6841 }
6842
6843 /* For the debugging info,
6844 say the pseudo lives in this reload reg. */
6845 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6846 alter_reg (REGNO (reg), -1);
6847 }
6848}
32131a9c 6849\f
a8fdc208 6850/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 6851 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
6852 is a register or memory location;
6853 so reloading involves incrementing that location.
6854
6855 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 6856 This cannot be deduced from VALUE. */
32131a9c 6857
546b63fb
RK
6858static void
6859inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
6860 rtx reloadreg;
6861 rtx value;
6862 int inc_amount;
32131a9c
RK
6863{
6864 /* REG or MEM to be copied and incremented. */
6865 rtx incloc = XEXP (value, 0);
6866 /* Nonzero if increment after copying. */
6867 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 6868 rtx last;
0009eff2
RK
6869 rtx inc;
6870 rtx add_insn;
6871 int code;
32131a9c
RK
6872
6873 /* No hard register is equivalent to this register after
6874 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6875 we could inc/dec that register as well (maybe even using it for
6876 the source), but I'm not sure it's worth worrying about. */
6877 if (GET_CODE (incloc) == REG)
6878 reg_last_reload_reg[REGNO (incloc)] = 0;
6879
6880 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6881 inc_amount = - inc_amount;
6882
fb3821f7 6883 inc = GEN_INT (inc_amount);
0009eff2
RK
6884
6885 /* If this is post-increment, first copy the location to the reload reg. */
6886 if (post)
546b63fb 6887 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
6888
6889 /* See if we can directly increment INCLOC. Use a method similar to that
6890 in gen_input_reload. */
6891
546b63fb
RK
6892 last = get_last_insn ();
6893 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6894 gen_rtx (PLUS, GET_MODE (incloc),
6895 incloc, inc)));
0009eff2
RK
6896
6897 code = recog_memoized (add_insn);
6898 if (code >= 0)
32131a9c 6899 {
0009eff2
RK
6900 insn_extract (add_insn);
6901 if (constrain_operands (code, 1))
32131a9c 6902 {
0009eff2
RK
6903 /* If this is a pre-increment and we have incremented the value
6904 where it lives, copy the incremented value to RELOADREG to
6905 be used as an address. */
6906
6907 if (! post)
546b63fb
RK
6908 emit_insn (gen_move_insn (reloadreg, incloc));
6909
6910 return;
32131a9c
RK
6911 }
6912 }
0009eff2 6913
546b63fb 6914 delete_insns_since (last);
0009eff2
RK
6915
6916 /* If couldn't do the increment directly, must increment in RELOADREG.
6917 The way we do this depends on whether this is pre- or post-increment.
6918 For pre-increment, copy INCLOC to the reload register, increment it
6919 there, then save back. */
6920
6921 if (! post)
6922 {
546b63fb
RK
6923 emit_insn (gen_move_insn (reloadreg, incloc));
6924 emit_insn (gen_add2_insn (reloadreg, inc));
6925 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 6926 }
32131a9c
RK
6927 else
6928 {
0009eff2
RK
6929 /* Postincrement.
6930 Because this might be a jump insn or a compare, and because RELOADREG
6931 may not be available after the insn in an input reload, we must do
6932 the incrementation before the insn being reloaded for.
6933
6934 We have already copied INCLOC to RELOADREG. Increment the copy in
6935 RELOADREG, save that back, then decrement RELOADREG so it has
6936 the original value. */
6937
546b63fb
RK
6938 emit_insn (gen_add2_insn (reloadreg, inc));
6939 emit_insn (gen_move_insn (incloc, reloadreg));
6940 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 6941 }
0009eff2 6942
546b63fb 6943 return;
32131a9c
RK
6944}
6945\f
6946/* Return 1 if we are certain that the constraint-string STRING allows
6947 the hard register REG. Return 0 if we can't be sure of this. */
6948
6949static int
6950constraint_accepts_reg_p (string, reg)
6951 char *string;
6952 rtx reg;
6953{
6954 int value = 0;
6955 int regno = true_regnum (reg);
6956 int c;
6957
6958 /* Initialize for first alternative. */
6959 value = 0;
6960 /* Check that each alternative contains `g' or `r'. */
6961 while (1)
6962 switch (c = *string++)
6963 {
6964 case 0:
6965 /* If an alternative lacks `g' or `r', we lose. */
6966 return value;
6967 case ',':
6968 /* If an alternative lacks `g' or `r', we lose. */
6969 if (value == 0)
6970 return 0;
6971 /* Initialize for next alternative. */
6972 value = 0;
6973 break;
6974 case 'g':
6975 case 'r':
6976 /* Any general reg wins for this alternative. */
6977 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6978 value = 1;
6979 break;
6980 default:
6981 /* Any reg in specified class wins for this alternative. */
6982 {
0009eff2 6983 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 6984
0009eff2 6985 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
6986 value = 1;
6987 }
6988 }
6989}
6990\f
d445b551
RK
6991/* Return the number of places FIND appears within X, but don't count
6992 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
6993
6994static int
6995count_occurrences (x, find)
6996 register rtx x, find;
6997{
6998 register int i, j;
6999 register enum rtx_code code;
7000 register char *format_ptr;
7001 int count;
7002
7003 if (x == find)
7004 return 1;
7005 if (x == 0)
7006 return 0;
7007
7008 code = GET_CODE (x);
7009
7010 switch (code)
7011 {
7012 case REG:
7013 case QUEUED:
7014 case CONST_INT:
7015 case CONST_DOUBLE:
7016 case SYMBOL_REF:
7017 case CODE_LABEL:
7018 case PC:
7019 case CC0:
7020 return 0;
d445b551
RK
7021
7022 case SET:
7023 if (SET_DEST (x) == find)
7024 return count_occurrences (SET_SRC (x), find);
7025 break;
32131a9c
RK
7026 }
7027
7028 format_ptr = GET_RTX_FORMAT (code);
7029 count = 0;
7030
7031 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7032 {
7033 switch (*format_ptr++)
7034 {
7035 case 'e':
7036 count += count_occurrences (XEXP (x, i), find);
7037 break;
7038
7039 case 'E':
7040 if (XVEC (x, i) != NULL)
7041 {
7042 for (j = 0; j < XVECLEN (x, i); j++)
7043 count += count_occurrences (XVECEXP (x, i, j), find);
7044 }
7045 break;
7046 }
7047 }
7048 return count;
7049}
This page took 1.112734 seconds and 5 git commands to generate.