]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(reload_secondary_{reload,icode}): Deleted.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
8c15858f 2 Copyright (C) 1987, 88, 89, 92, 93, 1994 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
546b63fb
RK
69
70
71#ifndef REGISTER_MOVE_COST
72#define REGISTER_MOVE_COST(x, y) 2
73#endif
74
75#ifndef MEMORY_MOVE_COST
76#define MEMORY_MOVE_COST(x) 4
77#endif
32131a9c
RK
78\f
79/* During reload_as_needed, element N contains a REG rtx for the hard reg
d08ea79f 80 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
81static rtx *reg_last_reload_reg;
82
83/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85static char *reg_has_output_reload;
86
87/* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89static HARD_REG_SET reg_is_output_reload;
90
91/* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95rtx *reg_equiv_constant;
96
97/* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 101rtx *reg_equiv_memory_loc;
32131a9c
RK
102
103/* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106rtx *reg_equiv_address;
107
108/* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110rtx *reg_equiv_mem;
111
112/* Widest width in which each pseudo reg is referred to (via subreg). */
113static int *reg_max_ref_width;
114
115/* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117static rtx *reg_equiv_init;
118
119/* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126/* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132/* Number of spill-regs so far; number of valid elements of spill_regs. */
133static int n_spills;
134
135/* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141/* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146/* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152/* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155HARD_REG_SET forbidden_regs;
156
157/* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
546b63fb
RK
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
32131a9c
RK
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164static HARD_REG_SET bad_spill_regs;
165
166/* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171/* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178/* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183/* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185static HARD_REG_SET counted_for_groups;
186
187/* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191static HARD_REG_SET counted_for_nongroups;
192
208dffa5
RS
193/* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197static char *cannot_omit_stores;
198
32131a9c
RK
199/* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206static char spill_indirect_levels;
207
208/* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212char indirect_symref_ok;
213
214/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216char double_reg_address_ok;
217
218/* Record the stack slot for each spilled hard register. */
219
220static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222/* Width allocated so far for that stack slot. */
223
224static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226/* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231char *basic_block_needs[N_REG_CLASSES];
232
233/* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235int reload_first_uid;
236
237/* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240int caller_save_needed;
241
242/* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245int reload_in_progress = 0;
246
247/* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
d45cf215 254/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258struct obstack reload_obstack;
259char *reload_firstobj;
260
261#define obstack_chunk_alloc xmalloc
262#define obstack_chunk_free free
263
32131a9c
RK
264/* List of labels that must never be deleted. */
265extern rtx forced_labels;
266\f
267/* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272static struct elim_table
273{
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
a8efe40d 281 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290} reg_eliminate[] =
291
292/* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296#ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298#else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300#endif
301
302#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304/* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307static int num_not_at_initial_offset;
308
309/* Count the number of registers that we may be able to eliminate. */
310static int num_eliminable;
311
312/* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319static char *offsets_known_at;
320static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322/* Number of labels in the current function. */
323
324static int num_labels;
546b63fb
RK
325
326struct hard_reg_n_uses { int regno; int uses; };
32131a9c 327\f
546b63fb
RK
328static int possible_group_p PROTO((int, int *));
329static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334static void spill_failure PROTO((rtx));
335static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337static void delete_dead_insn PROTO((rtx));
338static void alter_reg PROTO((int, int));
c307c237 339static void mark_scratch_live PROTO((rtx));
546b63fb
RK
340static void set_label_offsets PROTO((rtx, rtx, int));
341static int eliminate_regs_in_insn PROTO((rtx, int));
342static void mark_not_eliminable PROTO((rtx, rtx));
343static int spill_hard_reg PROTO((int, int, FILE *, int));
344static void scan_paradoxical_subregs PROTO((rtx));
345static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347static void order_regs_for_reload PROTO((void));
348static void reload_as_needed PROTO((rtx, int));
9a881562 349static void forget_old_reloads_1 PROTO((rtx, rtx));
546b63fb
RK
350static int reload_reg_class_lower PROTO((short *, short *));
351static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
352 enum machine_mode));
be7ae2a4
RK
353static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
354 enum machine_mode));
546b63fb
RK
355static int reload_reg_free_p PROTO((int, int, enum reload_type));
356static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
357static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
358static int allocate_reload_reg PROTO((int, rtx, int, int));
359static void choose_reload_regs PROTO((rtx, rtx));
360static void merge_assigned_reloads PROTO((rtx));
361static void emit_reload_insns PROTO((rtx));
362static void delete_output_reload PROTO((rtx, int, rtx));
363static void inc_for_reload PROTO((rtx, rtx, int));
364static int constraint_accepts_reg_p PROTO((char *, rtx));
365static int count_occurrences PROTO((rtx, rtx));
32131a9c 366\f
546b63fb
RK
367/* Initialize the reload pass once per compilation. */
368
32131a9c
RK
369void
370init_reload ()
371{
372 register int i;
373
374 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
375 Set spill_indirect_levels to the number of levels such addressing is
376 permitted, zero if it is not permitted at all. */
377
378 register rtx tem
379 = gen_rtx (MEM, Pmode,
380 gen_rtx (PLUS, Pmode,
381 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 382 GEN_INT (4)));
32131a9c
RK
383 spill_indirect_levels = 0;
384
385 while (memory_address_p (QImode, tem))
386 {
387 spill_indirect_levels++;
388 tem = gen_rtx (MEM, Pmode, tem);
389 }
390
391 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
392
393 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
394 indirect_symref_ok = memory_address_p (QImode, tem);
395
396 /* See if reg+reg is a valid (and offsettable) address. */
397
65701fd2 398 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
399 {
400 tem = gen_rtx (PLUS, Pmode,
3ec2ea3e 401 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
57caa638
RS
402 gen_rtx (REG, Pmode, i));
403 /* This way, we make sure that reg+reg is an offsettable address. */
404 tem = plus_constant (tem, 4);
405
406 if (memory_address_p (QImode, tem))
407 {
408 double_reg_address_ok = 1;
409 break;
410 }
411 }
32131a9c
RK
412
413 /* Initialize obstack for our rtl allocation. */
414 gcc_obstack_init (&reload_obstack);
415 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
416}
417
546b63fb 418/* Main entry point for the reload pass.
32131a9c
RK
419
420 FIRST is the first insn of the function being compiled.
421
422 GLOBAL nonzero means we were called from global_alloc
423 and should attempt to reallocate any pseudoregs that we
424 displace from hard regs we will use for reloads.
425 If GLOBAL is zero, we do not have enough information to do that,
426 so any pseudo reg that is spilled must go to the stack.
427
428 DUMPFILE is the global-reg debugging dump file stream, or 0.
429 If it is nonzero, messages are written to it to describe
430 which registers are seized as reload regs, which pseudo regs
5352b11a 431 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 432
5352b11a
RS
433 Return value is nonzero if reload failed
434 and we must not do any more for this function. */
435
436int
32131a9c
RK
437reload (first, global, dumpfile)
438 rtx first;
439 int global;
440 FILE *dumpfile;
441{
442 register int class;
546b63fb 443 register int i, j;
32131a9c
RK
444 register rtx insn;
445 register struct elim_table *ep;
446
447 int something_changed;
448 int something_needs_reloads;
449 int something_needs_elimination;
450 int new_basic_block_needs;
a8efe40d
RK
451 enum reg_class caller_save_spill_class = NO_REGS;
452 int caller_save_group_size = 1;
32131a9c 453
5352b11a
RS
454 /* Nonzero means we couldn't get enough spill regs. */
455 int failure = 0;
456
32131a9c
RK
457 /* The basic block number currently being processed for INSN. */
458 int this_block;
459
460 /* Make sure even insns with volatile mem refs are recognizable. */
461 init_recog ();
462
463 /* Enable find_equiv_reg to distinguish insns made by reload. */
464 reload_first_uid = get_max_uid ();
465
466 for (i = 0; i < N_REG_CLASSES; i++)
467 basic_block_needs[i] = 0;
468
0dadecf6
RK
469#ifdef SECONDARY_MEMORY_NEEDED
470 /* Initialize the secondary memory table. */
471 clear_secondary_mem ();
472#endif
473
32131a9c
RK
474 /* Remember which hard regs appear explicitly
475 before we merge into `regs_ever_live' the ones in which
476 pseudo regs have been allocated. */
477 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
478
479 /* We don't have a stack slot for any spill reg yet. */
480 bzero (spill_stack_slot, sizeof spill_stack_slot);
481 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
482
a8efe40d
RK
483 /* Initialize the save area information for caller-save, in case some
484 are needed. */
485 init_save_areas ();
a8fdc208 486
32131a9c
RK
487 /* Compute which hard registers are now in use
488 as homes for pseudo registers.
489 This is done here rather than (eg) in global_alloc
490 because this point is reached even if not optimizing. */
491
492 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
493 mark_home_live (i);
494
c307c237
RK
495 for (i = 0; i < scratch_list_length; i++)
496 if (scratch_list[i])
497 mark_scratch_live (scratch_list[i]);
498
32131a9c
RK
499 /* Make sure that the last insn in the chain
500 is not something that needs reloading. */
fb3821f7 501 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
502
503 /* Find all the pseudo registers that didn't get hard regs
504 but do have known equivalent constants or memory slots.
505 These include parameters (known equivalent to parameter slots)
506 and cse'd or loop-moved constant memory addresses.
507
508 Record constant equivalents in reg_equiv_constant
509 so they will be substituted by find_reloads.
510 Record memory equivalents in reg_mem_equiv so they can
511 be substituted eventually by altering the REG-rtx's. */
512
513 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
514 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
515 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
517 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
519 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero (reg_equiv_init, max_regno * sizeof (rtx));
521 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero (reg_equiv_address, max_regno * sizeof (rtx));
523 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
524 bzero (reg_max_ref_width, max_regno * sizeof (int));
208dffa5
RS
525 cannot_omit_stores = (char *) alloca (max_regno);
526 bzero (cannot_omit_stores, max_regno);
32131a9c
RK
527
528 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
529 Also find all paradoxical subregs
530 and find largest such for each pseudo. */
531
532 for (insn = first; insn; insn = NEXT_INSN (insn))
533 {
534 rtx set = single_set (insn);
535
536 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
537 {
fb3821f7 538 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
539 if (note
540#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 541 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
542 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
543#endif
544 )
32131a9c
RK
545 {
546 rtx x = XEXP (note, 0);
547 i = REGNO (SET_DEST (set));
548 if (i > LAST_VIRTUAL_REGISTER)
549 {
550 if (GET_CODE (x) == MEM)
551 reg_equiv_memory_loc[i] = x;
552 else if (CONSTANT_P (x))
553 {
554 if (LEGITIMATE_CONSTANT_P (x))
555 reg_equiv_constant[i] = x;
556 else
557 reg_equiv_memory_loc[i]
d445b551 558 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
559 }
560 else
561 continue;
562
563 /* If this register is being made equivalent to a MEM
564 and the MEM is not SET_SRC, the equivalencing insn
565 is one with the MEM as a SET_DEST and it occurs later.
566 So don't mark this insn now. */
567 if (GET_CODE (x) != MEM
568 || rtx_equal_p (SET_SRC (set), x))
569 reg_equiv_init[i] = insn;
570 }
571 }
572 }
573
574 /* If this insn is setting a MEM from a register equivalent to it,
575 this is the equivalencing insn. */
576 else if (set && GET_CODE (SET_DEST (set)) == MEM
577 && GET_CODE (SET_SRC (set)) == REG
578 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
579 && rtx_equal_p (SET_DEST (set),
580 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
581 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
582
583 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
584 scan_paradoxical_subregs (PATTERN (insn));
585 }
586
587 /* Does this function require a frame pointer? */
588
589 frame_pointer_needed = (! flag_omit_frame_pointer
590#ifdef EXIT_IGNORE_STACK
591 /* ?? If EXIT_IGNORE_STACK is set, we will not save
592 and restore sp for alloca. So we can't eliminate
593 the frame pointer in that case. At some point,
594 we should improve this by emitting the
595 sp-adjusting insns for this case. */
596 || (current_function_calls_alloca
597 && EXIT_IGNORE_STACK)
598#endif
599 || FRAME_POINTER_REQUIRED);
600
601 num_eliminable = 0;
602
603 /* Initialize the table of registers to eliminate. The way we do this
604 depends on how the eliminable registers were defined. */
605#ifdef ELIMINABLE_REGS
606 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
607 {
608 ep->can_eliminate = ep->can_eliminate_previous
609 = (CAN_ELIMINATE (ep->from, ep->to)
3ec2ea3e
DE
610 && (ep->from != HARD_FRAME_POINTER_REGNUM
611 || ! frame_pointer_needed));
32131a9c
RK
612 }
613#else
614 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
615 = ! frame_pointer_needed;
616#endif
617
618 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 619 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
620 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
621 We depend on this. */
622 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
623 {
624 num_eliminable += ep->can_eliminate;
625 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
626 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
627 }
628
629 num_labels = max_label_num () - get_first_label_num ();
630
631 /* Allocate the tables used to store offset information at labels. */
632 offsets_known_at = (char *) alloca (num_labels);
633 offsets_at
634 = (int (*)[NUM_ELIMINABLE_REGS])
635 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
636
637 offsets_known_at -= get_first_label_num ();
638 offsets_at -= get_first_label_num ();
639
640 /* Alter each pseudo-reg rtx to contain its hard reg number.
641 Assign stack slots to the pseudos that lack hard regs or equivalents.
642 Do not touch virtual registers. */
643
644 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
645 alter_reg (i, -1);
646
647 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
648 because the stack size may be a part of the offset computation for
649 register elimination. */
650 assign_stack_local (BLKmode, 0, 0);
651
652 /* If we have some registers we think can be eliminated, scan all insns to
653 see if there is an insn that sets one of these registers to something
654 other than itself plus a constant. If so, the register cannot be
655 eliminated. Doing this scan here eliminates an extra pass through the
656 main reload loop in the most common case where register elimination
657 cannot be done. */
658 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
659 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
660 || GET_CODE (insn) == CALL_INSN)
661 note_stores (PATTERN (insn), mark_not_eliminable);
662
663#ifndef REGISTER_CONSTRAINTS
664 /* If all the pseudo regs have hard regs,
665 except for those that are never referenced,
666 we know that no reloads are needed. */
667 /* But that is not true if there are register constraints, since
668 in that case some pseudos might be in the wrong kind of hard reg. */
669
670 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
671 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
672 break;
673
b8093d02 674 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
675 return;
676#endif
677
678 /* Compute the order of preference for hard registers to spill.
679 Store them by decreasing preference in potential_reload_regs. */
680
681 order_regs_for_reload ();
682
683 /* So far, no hard regs have been spilled. */
684 n_spills = 0;
685 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
686 spill_reg_order[i] = -1;
687
688 /* On most machines, we can't use any register explicitly used in the
689 rtl as a spill register. But on some, we have to. Those will have
690 taken care to keep the life of hard regs as short as possible. */
691
692#ifdef SMALL_REGISTER_CLASSES
693 CLEAR_HARD_REG_SET (forbidden_regs);
694#else
695 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
696#endif
697
698 /* Spill any hard regs that we know we can't eliminate. */
699 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
700 if (! ep->can_eliminate)
701 {
702 spill_hard_reg (ep->from, global, dumpfile, 1);
703 regs_ever_live[ep->from] = 1;
704 }
705
706 if (global)
707 for (i = 0; i < N_REG_CLASSES; i++)
708 {
709 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
710 bzero (basic_block_needs[i], n_basic_blocks);
711 }
712
b2f15f94
RK
713 /* From now on, we need to emit any moves without making new pseudos. */
714 reload_in_progress = 1;
715
32131a9c
RK
716 /* This loop scans the entire function each go-round
717 and repeats until one repetition spills no additional hard regs. */
718
d45cf215 719 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
720 to require another pass. Note that getting an additional reload
721 reg does not necessarily imply any pseudo reg was spilled;
722 sometimes we find a reload reg that no pseudo reg was allocated in. */
723 something_changed = 1;
724 /* This flag is set if there are any insns that require reloading. */
725 something_needs_reloads = 0;
726 /* This flag is set if there are any insns that require register
727 eliminations. */
728 something_needs_elimination = 0;
729 while (something_changed)
730 {
731 rtx after_call = 0;
732
733 /* For each class, number of reload regs needed in that class.
734 This is the maximum over all insns of the needs in that class
735 of the individual insn. */
736 int max_needs[N_REG_CLASSES];
737 /* For each class, size of group of consecutive regs
738 that is needed for the reloads of this class. */
739 int group_size[N_REG_CLASSES];
740 /* For each class, max number of consecutive groups needed.
741 (Each group contains group_size[CLASS] consecutive registers.) */
742 int max_groups[N_REG_CLASSES];
743 /* For each class, max number needed of regs that don't belong
744 to any of the groups. */
745 int max_nongroups[N_REG_CLASSES];
746 /* For each class, the machine mode which requires consecutive
747 groups of regs of that class.
748 If two different modes ever require groups of one class,
749 they must be the same size and equally restrictive for that class,
750 otherwise we can't handle the complexity. */
751 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
752 /* Record the insn where each maximum need is first found. */
753 rtx max_needs_insn[N_REG_CLASSES];
754 rtx max_groups_insn[N_REG_CLASSES];
755 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 756 rtx x;
0dadecf6 757 int starting_frame_size = get_frame_size ();
e404a39a 758 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
759
760 something_changed = 0;
761 bzero (max_needs, sizeof max_needs);
762 bzero (max_groups, sizeof max_groups);
763 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
764 bzero (max_needs_insn, sizeof max_needs_insn);
765 bzero (max_groups_insn, sizeof max_groups_insn);
766 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
767 bzero (group_size, sizeof group_size);
768 for (i = 0; i < N_REG_CLASSES; i++)
769 group_mode[i] = VOIDmode;
770
771 /* Keep track of which basic blocks are needing the reloads. */
772 this_block = 0;
773
774 /* Remember whether any element of basic_block_needs
775 changes from 0 to 1 in this pass. */
776 new_basic_block_needs = 0;
777
778 /* Reset all offsets on eliminable registers to their initial values. */
779#ifdef ELIMINABLE_REGS
780 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
781 {
782 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
783 ep->previous_offset = ep->offset
784 = ep->max_offset = ep->initial_offset;
32131a9c
RK
785 }
786#else
787#ifdef INITIAL_FRAME_POINTER_OFFSET
788 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
789#else
790 if (!FRAME_POINTER_REQUIRED)
791 abort ();
792 reg_eliminate[0].initial_offset = 0;
793#endif
a8efe40d 794 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
795 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
796#endif
797
798 num_not_at_initial_offset = 0;
799
800 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
801
802 /* Set a known offset for each forced label to be at the initial offset
803 of each elimination. We do this because we assume that all
804 computed jumps occur from a location where each elimination is
805 at its initial offset. */
806
807 for (x = forced_labels; x; x = XEXP (x, 1))
808 if (XEXP (x, 0))
fb3821f7 809 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
810
811 /* For each pseudo register that has an equivalent location defined,
812 try to eliminate any eliminable registers (such as the frame pointer)
813 assuming initial offsets for the replacement register, which
814 is the normal case.
815
816 If the resulting location is directly addressable, substitute
817 the MEM we just got directly for the old REG.
818
819 If it is not addressable but is a constant or the sum of a hard reg
820 and constant, it is probably not addressable because the constant is
821 out of range, in that case record the address; we will generate
822 hairy code to compute the address in a register each time it is
6491dbbb
RK
823 needed. Similarly if it is a hard register, but one that is not
824 valid as an address register.
32131a9c
RK
825
826 If the location is not addressable, but does not have one of the
827 above forms, assign a stack slot. We have to do this to avoid the
828 potential of producing lots of reloads if, e.g., a location involves
829 a pseudo that didn't get a hard register and has an equivalent memory
830 location that also involves a pseudo that didn't get a hard register.
831
832 Perhaps at some point we will improve reload_when_needed handling
833 so this problem goes away. But that's very hairy. */
834
835 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
836 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
837 {
fb3821f7 838 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
839
840 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
841 XEXP (x, 0)))
842 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
843 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
844 || (GET_CODE (XEXP (x, 0)) == REG
845 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
846 || (GET_CODE (XEXP (x, 0)) == PLUS
847 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
848 && (REGNO (XEXP (XEXP (x, 0), 0))
849 < FIRST_PSEUDO_REGISTER)
850 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
851 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
852 else
853 {
854 /* Make a new stack slot. Then indicate that something
a8fdc208 855 changed so we go back and recompute offsets for
32131a9c
RK
856 eliminable registers because the allocation of memory
857 below might change some offset. reg_equiv_{mem,address}
858 will be set up for this pseudo on the next pass around
859 the loop. */
860 reg_equiv_memory_loc[i] = 0;
861 reg_equiv_init[i] = 0;
862 alter_reg (i, -1);
863 something_changed = 1;
864 }
865 }
a8fdc208 866
d45cf215 867 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
868 bookkeeping. */
869 if (something_changed)
870 continue;
871
a8efe40d
RK
872 /* If caller-saves needs a group, initialize the group to include
873 the size and mode required for caller-saves. */
874
875 if (caller_save_group_size > 1)
876 {
877 group_mode[(int) caller_save_spill_class] = Pmode;
878 group_size[(int) caller_save_spill_class] = caller_save_group_size;
879 }
880
32131a9c
RK
881 /* Compute the most additional registers needed by any instruction.
882 Collect information separately for each class of regs. */
883
884 for (insn = first; insn; insn = NEXT_INSN (insn))
885 {
886 if (global && this_block + 1 < n_basic_blocks
887 && insn == basic_block_head[this_block+1])
888 ++this_block;
889
890 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
891 might include REG_LABEL), we need to see what effects this
892 has on the known offsets at labels. */
893
894 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
895 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
896 && REG_NOTES (insn) != 0))
897 set_label_offsets (insn, insn, 0);
898
899 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
900 {
901 /* Nonzero means don't use a reload reg that overlaps
902 the place where a function value can be returned. */
903 rtx avoid_return_reg = 0;
904
905 rtx old_body = PATTERN (insn);
906 int old_code = INSN_CODE (insn);
907 rtx old_notes = REG_NOTES (insn);
908 int did_elimination = 0;
546b63fb
RK
909 int max_total_input_groups = 0, max_total_output_groups = 0;
910
911 /* To compute the number of reload registers of each class
912 needed for an insn, we must similate what choose_reload_regs
913 can do. We do this by splitting an insn into an "input" and
914 an "output" part. RELOAD_OTHER reloads are used in both.
915 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
916 which must be live over the entire input section of reloads,
917 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
918 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
919 inputs.
920
921 The registers needed for output are RELOAD_OTHER and
922 RELOAD_FOR_OUTPUT, which are live for the entire output
923 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
924 reloads for each operand.
925
926 The total number of registers needed is the maximum of the
927 inputs and outputs. */
928
929 /* These just count RELOAD_OTHER. */
32131a9c
RK
930 int insn_needs[N_REG_CLASSES];
931 int insn_groups[N_REG_CLASSES];
932 int insn_total_groups = 0;
933
546b63fb 934 /* Count RELOAD_FOR_INPUT reloads. */
32131a9c
RK
935 int insn_needs_for_inputs[N_REG_CLASSES];
936 int insn_groups_for_inputs[N_REG_CLASSES];
937 int insn_total_groups_for_inputs = 0;
938
546b63fb 939 /* Count RELOAD_FOR_OUTPUT reloads. */
32131a9c
RK
940 int insn_needs_for_outputs[N_REG_CLASSES];
941 int insn_groups_for_outputs[N_REG_CLASSES];
942 int insn_total_groups_for_outputs = 0;
943
546b63fb
RK
944 /* Count RELOAD_FOR_INSN reloads. */
945 int insn_needs_for_insn[N_REG_CLASSES];
946 int insn_groups_for_insn[N_REG_CLASSES];
947 int insn_total_groups_for_insn = 0;
948
949 /* Count RELOAD_FOR_OTHER_ADDRESS reloads. */
950 int insn_needs_for_other_addr[N_REG_CLASSES];
951 int insn_groups_for_other_addr[N_REG_CLASSES];
952 int insn_total_groups_for_other_addr = 0;
953
954 /* Count RELOAD_FOR_INPUT_ADDRESS reloads. */
955 int insn_needs_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
956 int insn_groups_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
957 int insn_total_groups_for_in_addr[MAX_RECOG_OPERANDS];
958
959 /* Count RELOAD_FOR_OUTPUT_ADDRESS reloads. */
960 int insn_needs_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
961 int insn_groups_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
962 int insn_total_groups_for_out_addr[MAX_RECOG_OPERANDS];
963
32131a9c 964 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
546b63fb
RK
965 int insn_needs_for_op_addr[N_REG_CLASSES];
966 int insn_groups_for_op_addr[N_REG_CLASSES];
967 int insn_total_groups_for_op_addr = 0;
32131a9c 968
32131a9c
RK
969#if 0 /* This wouldn't work nowadays, since optimize_bit_field
970 looks for non-strict memory addresses. */
971 /* Optimization: a bit-field instruction whose field
972 happens to be a byte or halfword in memory
973 can be changed to a move instruction. */
974
975 if (GET_CODE (PATTERN (insn)) == SET)
976 {
977 rtx dest = SET_DEST (PATTERN (insn));
978 rtx src = SET_SRC (PATTERN (insn));
979
980 if (GET_CODE (dest) == ZERO_EXTRACT
981 || GET_CODE (dest) == SIGN_EXTRACT)
982 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
983 if (GET_CODE (src) == ZERO_EXTRACT
984 || GET_CODE (src) == SIGN_EXTRACT)
985 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
986 }
987#endif
988
989 /* If needed, eliminate any eliminable registers. */
990 if (num_eliminable)
991 did_elimination = eliminate_regs_in_insn (insn, 0);
992
993#ifdef SMALL_REGISTER_CLASSES
994 /* Set avoid_return_reg if this is an insn
995 that might use the value of a function call. */
996 if (GET_CODE (insn) == CALL_INSN)
997 {
998 if (GET_CODE (PATTERN (insn)) == SET)
999 after_call = SET_DEST (PATTERN (insn));
1000 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1001 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1002 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1003 else
1004 after_call = 0;
1005 }
1006 else if (after_call != 0
1007 && !(GET_CODE (PATTERN (insn)) == SET
1008 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1009 {
1010 if (reg_mentioned_p (after_call, PATTERN (insn)))
1011 avoid_return_reg = after_call;
1012 after_call = 0;
1013 }
1014#endif /* SMALL_REGISTER_CLASSES */
1015
1016 /* Analyze the instruction. */
1017 find_reloads (insn, 0, spill_indirect_levels, global,
1018 spill_reg_order);
1019
1020 /* Remember for later shortcuts which insns had any reloads or
1021 register eliminations.
1022
1023 One might think that it would be worthwhile to mark insns
1024 that need register replacements but not reloads, but this is
1025 not safe because find_reloads may do some manipulation of
1026 the insn (such as swapping commutative operands), which would
1027 be lost when we restore the old pattern after register
1028 replacement. So the actions of find_reloads must be redone in
1029 subsequent passes or in reload_as_needed.
1030
1031 However, it is safe to mark insns that need reloads
1032 but not register replacement. */
1033
1034 PUT_MODE (insn, (did_elimination ? QImode
1035 : n_reloads ? HImode
546b63fb 1036 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
1037 : VOIDmode));
1038
1039 /* Discard any register replacements done. */
1040 if (did_elimination)
1041 {
1042 obstack_free (&reload_obstack, reload_firstobj);
1043 PATTERN (insn) = old_body;
1044 INSN_CODE (insn) = old_code;
1045 REG_NOTES (insn) = old_notes;
1046 something_needs_elimination = 1;
1047 }
1048
a8efe40d 1049 /* If this insn has no reloads, we need not do anything except
a8fdc208 1050 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1051 caller-save needs reloads. */
1052
1053 if (n_reloads == 0
1054 && ! (GET_CODE (insn) == CALL_INSN
1055 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1056 continue;
1057
1058 something_needs_reloads = 1;
1059
a8efe40d
RK
1060 for (i = 0; i < N_REG_CLASSES; i++)
1061 {
1062 insn_needs[i] = 0, insn_groups[i] = 0;
1063 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1064 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
546b63fb
RK
1065 insn_needs_for_insn[i] = 0, insn_groups_for_insn[i] = 0;
1066 insn_needs_for_op_addr[i] = 0, insn_groups_for_op_addr[i] = 0;
1067 insn_needs_for_other_addr[i] = 0;
1068 insn_groups_for_other_addr[i] = 0;
a8efe40d
RK
1069 }
1070
546b63fb
RK
1071 for (i = 0; i < reload_n_operands; i++)
1072 {
1073 insn_total_groups_for_in_addr[i] = 0;
1074 insn_total_groups_for_out_addr[i] = 0;
1075
1076 for (j = 0; j < N_REG_CLASSES; j++)
1077 {
1078 insn_needs_for_in_addr[i][j] = 0;
1079 insn_needs_for_out_addr[i][j] = 0;
1080 insn_groups_for_in_addr[i][j] = 0;
1081 insn_groups_for_out_addr[i][j] = 0;
1082 }
1083 }
1084
32131a9c
RK
1085 /* Count each reload once in every class
1086 containing the reload's own class. */
1087
1088 for (i = 0; i < n_reloads; i++)
1089 {
1090 register enum reg_class *p;
e85ddd99 1091 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1092 int size;
1093 enum machine_mode mode;
1094 int *this_groups;
1095 int *this_needs;
1096 int *this_total_groups;
1097
1098 /* Don't count the dummy reloads, for which one of the
1099 regs mentioned in the insn can be used for reloading.
1100 Don't count optional reloads.
1101 Don't count reloads that got combined with others. */
1102 if (reload_reg_rtx[i] != 0
1103 || reload_optional[i] != 0
1104 || (reload_out[i] == 0 && reload_in[i] == 0
1105 && ! reload_secondary_p[i]))
1106 continue;
1107
e85ddd99
RK
1108 /* Show that a reload register of this class is needed
1109 in this basic block. We do not use insn_needs and
1110 insn_groups because they are overly conservative for
1111 this purpose. */
1112 if (global && ! basic_block_needs[(int) class][this_block])
1113 {
1114 basic_block_needs[(int) class][this_block] = 1;
1115 new_basic_block_needs = 1;
1116 }
1117
32131a9c
RK
1118 /* Decide which time-of-use to count this reload for. */
1119 switch (reload_when_needed[i])
1120 {
1121 case RELOAD_OTHER:
32131a9c
RK
1122 this_needs = insn_needs;
1123 this_groups = insn_groups;
1124 this_total_groups = &insn_total_groups;
1125 break;
1126
546b63fb 1127 case RELOAD_FOR_INPUT:
32131a9c
RK
1128 this_needs = insn_needs_for_inputs;
1129 this_groups = insn_groups_for_inputs;
1130 this_total_groups = &insn_total_groups_for_inputs;
1131 break;
1132
546b63fb 1133 case RELOAD_FOR_OUTPUT:
32131a9c
RK
1134 this_needs = insn_needs_for_outputs;
1135 this_groups = insn_groups_for_outputs;
1136 this_total_groups = &insn_total_groups_for_outputs;
1137 break;
1138
546b63fb
RK
1139 case RELOAD_FOR_INSN:
1140 this_needs = insn_needs_for_insn;
af04ba4a 1141 this_groups = insn_groups_for_insn;
546b63fb
RK
1142 this_total_groups = &insn_total_groups_for_insn;
1143 break;
1144
1145 case RELOAD_FOR_OTHER_ADDRESS:
1146 this_needs = insn_needs_for_other_addr;
1147 this_groups = insn_groups_for_other_addr;
1148 this_total_groups = &insn_total_groups_for_other_addr;
1149 break;
1150
1151 case RELOAD_FOR_INPUT_ADDRESS:
1152 this_needs = insn_needs_for_in_addr[reload_opnum[i]];
1153 this_groups = insn_groups_for_in_addr[reload_opnum[i]];
1154 this_total_groups
1155 = &insn_total_groups_for_in_addr[reload_opnum[i]];
1156 break;
1157
1158 case RELOAD_FOR_OUTPUT_ADDRESS:
1159 this_needs = insn_needs_for_out_addr[reload_opnum[i]];
1160 this_groups = insn_groups_for_out_addr[reload_opnum[i]];
1161 this_total_groups
1162 = &insn_total_groups_for_out_addr[reload_opnum[i]];
1163 break;
1164
32131a9c 1165 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
1166 this_needs = insn_needs_for_op_addr;
1167 this_groups = insn_groups_for_op_addr;
1168 this_total_groups = &insn_total_groups_for_op_addr;
32131a9c
RK
1169 break;
1170 }
1171
1172 mode = reload_inmode[i];
1173 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1174 mode = reload_outmode[i];
e85ddd99 1175 size = CLASS_MAX_NREGS (class, mode);
32131a9c
RK
1176 if (size > 1)
1177 {
1178 enum machine_mode other_mode, allocate_mode;
1179
1180 /* Count number of groups needed separately from
1181 number of individual regs needed. */
e85ddd99
RK
1182 this_groups[(int) class]++;
1183 p = reg_class_superclasses[(int) class];
32131a9c
RK
1184 while (*p != LIM_REG_CLASSES)
1185 this_groups[(int) *p++]++;
1186 (*this_total_groups)++;
1187
1188 /* Record size and mode of a group of this class. */
1189 /* If more than one size group is needed,
1190 make all groups the largest needed size. */
e85ddd99 1191 if (group_size[(int) class] < size)
32131a9c 1192 {
e85ddd99 1193 other_mode = group_mode[(int) class];
32131a9c
RK
1194 allocate_mode = mode;
1195
e85ddd99
RK
1196 group_size[(int) class] = size;
1197 group_mode[(int) class] = mode;
32131a9c
RK
1198 }
1199 else
1200 {
1201 other_mode = mode;
e85ddd99 1202 allocate_mode = group_mode[(int) class];
32131a9c
RK
1203 }
1204
1205 /* Crash if two dissimilar machine modes both need
1206 groups of consecutive regs of the same class. */
1207
1208 if (other_mode != VOIDmode
1209 && other_mode != allocate_mode
1210 && ! modes_equiv_for_class_p (allocate_mode,
1211 other_mode,
e85ddd99 1212 class))
32131a9c
RK
1213 abort ();
1214 }
1215 else if (size == 1)
1216 {
e85ddd99
RK
1217 this_needs[(int) class] += 1;
1218 p = reg_class_superclasses[(int) class];
32131a9c
RK
1219 while (*p != LIM_REG_CLASSES)
1220 this_needs[(int) *p++] += 1;
1221 }
1222 else
1223 abort ();
1224 }
1225
1226 /* All reloads have been counted for this insn;
1227 now merge the various times of use.
1228 This sets insn_needs, etc., to the maximum total number
1229 of registers needed at any point in this insn. */
1230
1231 for (i = 0; i < N_REG_CLASSES; i++)
1232 {
546b63fb
RK
1233 int in_max, out_max;
1234
1235 for (in_max = 0, out_max = 0, j = 0;
1236 j < reload_n_operands; j++)
1237 {
1238 in_max = MAX (in_max, insn_needs_for_in_addr[j][i]);
1239 out_max = MAX (out_max, insn_needs_for_out_addr[j][i]);
1240 }
1241
1242 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1243 and operand addresses but not things used to reload them.
1244 Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads don't
1245 conflict with things needed to reload inputs or
1246 outputs. */
1247
1248 in_max = MAX (in_max, insn_needs_for_op_addr[i]);
1249 out_max = MAX (out_max, insn_needs_for_insn[i]);
1250
1251 insn_needs_for_inputs[i]
1252 = MAX (insn_needs_for_inputs[i]
1253 + insn_needs_for_op_addr[i]
1254 + insn_needs_for_insn[i],
1255 in_max + insn_needs_for_inputs[i]);
1256
1257 insn_needs_for_outputs[i] += out_max;
1258 insn_needs[i] += MAX (MAX (insn_needs_for_inputs[i],
1259 insn_needs_for_outputs[i]),
1260 insn_needs_for_other_addr[i]);
1261
1262 for (in_max = 0, out_max = 0, j = 0;
1263 j < reload_n_operands; j++)
1264 {
1265 in_max = MAX (in_max, insn_groups_for_in_addr[j][i]);
1266 out_max = MAX (out_max, insn_groups_for_out_addr[j][i]);
1267 }
1268
1269 in_max = MAX (in_max, insn_groups_for_op_addr[i]);
1270 out_max = MAX (out_max, insn_groups_for_insn[i]);
1271
1272 insn_groups_for_inputs[i]
1273 = MAX (insn_groups_for_inputs[i]
1274 + insn_groups_for_op_addr[i]
1275 + insn_groups_for_insn[i],
1276 in_max + insn_groups_for_inputs[i]);
1277
1278 insn_groups_for_outputs[i] += out_max;
1279 insn_groups[i] += MAX (MAX (insn_groups_for_inputs[i],
1280 insn_groups_for_outputs[i]),
1281 insn_groups_for_other_addr[i]);
1282 }
1283
1284 for (i = 0; i < reload_n_operands; i++)
1285 {
1286 max_total_input_groups
1287 = MAX (max_total_input_groups,
1288 insn_total_groups_for_in_addr[i]);
1289 max_total_output_groups
1290 = MAX (max_total_output_groups,
1291 insn_total_groups_for_out_addr[i]);
32131a9c 1292 }
a8efe40d 1293
546b63fb
RK
1294 max_total_input_groups = MAX (max_total_input_groups,
1295 insn_total_groups_for_op_addr);
1296 max_total_output_groups = MAX (max_total_output_groups,
1297 insn_total_groups_for_insn);
1298
1299 insn_total_groups_for_inputs
1300 = MAX (max_total_input_groups + insn_total_groups_for_op_addr
1301 + insn_total_groups_for_insn,
1302 max_total_input_groups + insn_total_groups_for_inputs);
1303
1304 insn_total_groups_for_outputs += max_total_output_groups;
1305
1306 insn_total_groups += MAX (MAX (insn_total_groups_for_outputs,
1307 insn_total_groups_for_inputs),
1308 insn_total_groups_for_other_addr);
32131a9c 1309
a8efe40d
RK
1310 /* If this is a CALL_INSN and caller-saves will need
1311 a spill register, act as if the spill register is
1312 needed for this insn. However, the spill register
1313 can be used by any reload of this insn, so we only
1314 need do something if no need for that class has
a8fdc208 1315 been recorded.
a8efe40d
RK
1316
1317 The assumption that every CALL_INSN will trigger a
1318 caller-save is highly conservative, however, the number
1319 of cases where caller-saves will need a spill register but
1320 a block containing a CALL_INSN won't need a spill register
1321 of that class should be quite rare.
1322
1323 If a group is needed, the size and mode of the group will
d45cf215 1324 have been set up at the beginning of this loop. */
a8efe40d
RK
1325
1326 if (GET_CODE (insn) == CALL_INSN
1327 && caller_save_spill_class != NO_REGS)
1328 {
1329 int *caller_save_needs
1330 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1331
1332 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1333 {
1334 register enum reg_class *p
1335 = reg_class_superclasses[(int) caller_save_spill_class];
1336
1337 caller_save_needs[(int) caller_save_spill_class]++;
1338
1339 while (*p != LIM_REG_CLASSES)
0aaa6af8 1340 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1341 }
1342
1343 if (caller_save_group_size > 1)
1344 insn_total_groups = MAX (insn_total_groups, 1);
d1c1397e
RS
1345
1346
1347 /* Show that this basic block will need a register of
1348 this class. */
1349
1350 if (global
1351 && ! (basic_block_needs[(int) caller_save_spill_class]
1352 [this_block]))
1353 {
1354 basic_block_needs[(int) caller_save_spill_class]
1355 [this_block] = 1;
1356 new_basic_block_needs = 1;
1357 }
a8efe40d
RK
1358 }
1359
32131a9c
RK
1360#ifdef SMALL_REGISTER_CLASSES
1361 /* If this insn stores the value of a function call,
1362 and that value is in a register that has been spilled,
1363 and if the insn needs a reload in a class
1364 that might use that register as the reload register,
1365 then add add an extra need in that class.
1366 This makes sure we have a register available that does
1367 not overlap the return value. */
1368 if (avoid_return_reg)
1369 {
1370 int regno = REGNO (avoid_return_reg);
1371 int nregs
1372 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1373 int r;
546b63fb
RK
1374 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1375
1376 /* First compute the "basic needs", which counts a
1377 need only in the smallest class in which it
1378 is required. */
1379
1380 bcopy (insn_needs, basic_needs, sizeof basic_needs);
1381 bcopy (insn_groups, basic_groups, sizeof basic_groups);
1382
1383 for (i = 0; i < N_REG_CLASSES; i++)
1384 {
1385 enum reg_class *p;
1386
1387 if (basic_needs[i] >= 0)
1388 for (p = reg_class_superclasses[i];
1389 *p != LIM_REG_CLASSES; p++)
1390 basic_needs[(int) *p] -= basic_needs[i];
1391
1392 if (basic_groups[i] >= 0)
1393 for (p = reg_class_superclasses[i];
1394 *p != LIM_REG_CLASSES; p++)
1395 basic_groups[(int) *p] -= basic_groups[i];
1396 }
1397
1398 /* Now count extra regs if there might be a conflict with
1399 the return value register.
1400
1401 ??? This is not quite correct because we don't properly
1402 handle the case of groups, but if we end up doing
1403 something wrong, it either will end up not mattering or
1404 we will abort elsewhere. */
1405
32131a9c
RK
1406 for (r = regno; r < regno + nregs; r++)
1407 if (spill_reg_order[r] >= 0)
1408 for (i = 0; i < N_REG_CLASSES; i++)
1409 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1410 {
546b63fb
RK
1411 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1412 {
1413 enum reg_class *p;
1414
1415 insn_needs[i]++;
1416 p = reg_class_superclasses[i];
1417 while (*p != LIM_REG_CLASSES)
1418 insn_needs[(int) *p++]++;
1419 }
32131a9c 1420 }
32131a9c
RK
1421 }
1422#endif /* SMALL_REGISTER_CLASSES */
1423
1424 /* For each class, collect maximum need of any insn. */
1425
1426 for (i = 0; i < N_REG_CLASSES; i++)
1427 {
1428 if (max_needs[i] < insn_needs[i])
5352b11a
RS
1429 {
1430 max_needs[i] = insn_needs[i];
1431 max_needs_insn[i] = insn;
1432 }
32131a9c 1433 if (max_groups[i] < insn_groups[i])
5352b11a
RS
1434 {
1435 max_groups[i] = insn_groups[i];
1436 max_groups_insn[i] = insn;
1437 }
32131a9c
RK
1438 if (insn_total_groups > 0)
1439 if (max_nongroups[i] < insn_needs[i])
5352b11a
RS
1440 {
1441 max_nongroups[i] = insn_needs[i];
1442 max_nongroups_insn[i] = insn;
1443 }
32131a9c
RK
1444 }
1445 }
1446 /* Note that there is a continue statement above. */
1447 }
1448
0dadecf6
RK
1449 /* If we allocated any new memory locations, make another pass
1450 since it might have changed elimination offsets. */
1451 if (starting_frame_size != get_frame_size ())
1452 something_changed = 1;
1453
e404a39a
RK
1454 if (dumpfile)
1455 for (i = 0; i < N_REG_CLASSES; i++)
1456 {
1457 if (max_needs[i] > 0)
1458 fprintf (dumpfile,
1459 ";; Need %d reg%s of class %s (for insn %d).\n",
1460 max_needs[i], max_needs[i] == 1 ? "" : "s",
1461 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1462 if (max_nongroups[i] > 0)
1463 fprintf (dumpfile,
1464 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1465 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1466 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1467 if (max_groups[i] > 0)
1468 fprintf (dumpfile,
1469 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1470 max_groups[i], max_groups[i] == 1 ? "" : "s",
1471 mode_name[(int) group_mode[i]],
1472 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1473 }
1474
d445b551 1475 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1476 will need a spill register. */
32131a9c 1477
d445b551 1478 if (caller_save_needed
a8efe40d
RK
1479 && ! setup_save_areas (&something_changed)
1480 && caller_save_spill_class == NO_REGS)
32131a9c 1481 {
a8efe40d
RK
1482 /* The class we will need depends on whether the machine
1483 supports the sum of two registers for an address; see
1484 find_address_reloads for details. */
1485
a8fdc208 1486 caller_save_spill_class
a8efe40d
RK
1487 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1488 caller_save_group_size
1489 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1490 something_changed = 1;
32131a9c
RK
1491 }
1492
5c23c401
RK
1493 /* See if anything that happened changes which eliminations are valid.
1494 For example, on the Sparc, whether or not the frame pointer can
1495 be eliminated can depend on what registers have been used. We need
1496 not check some conditions again (such as flag_omit_frame_pointer)
1497 since they can't have changed. */
1498
1499 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3ec2ea3e 1500 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5c23c401
RK
1501#ifdef ELIMINABLE_REGS
1502 || ! CAN_ELIMINATE (ep->from, ep->to)
1503#endif
1504 )
1505 ep->can_eliminate = 0;
1506
32131a9c
RK
1507 /* Look for the case where we have discovered that we can't replace
1508 register A with register B and that means that we will now be
1509 trying to replace register A with register C. This means we can
1510 no longer replace register C with register B and we need to disable
1511 such an elimination, if it exists. This occurs often with A == ap,
1512 B == sp, and C == fp. */
a8fdc208 1513
32131a9c
RK
1514 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1515 {
1516 struct elim_table *op;
1517 register int new_to = -1;
1518
1519 if (! ep->can_eliminate && ep->can_eliminate_previous)
1520 {
1521 /* Find the current elimination for ep->from, if there is a
1522 new one. */
1523 for (op = reg_eliminate;
1524 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1525 if (op->from == ep->from && op->can_eliminate)
1526 {
1527 new_to = op->to;
1528 break;
1529 }
1530
1531 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1532 disable it. */
1533 for (op = reg_eliminate;
1534 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1535 if (op->from == new_to && op->to == ep->to)
1536 op->can_eliminate = 0;
1537 }
1538 }
1539
1540 /* See if any registers that we thought we could eliminate the previous
1541 time are no longer eliminable. If so, something has changed and we
1542 must spill the register. Also, recompute the number of eliminable
1543 registers and see if the frame pointer is needed; it is if there is
1544 no elimination of the frame pointer that we can perform. */
1545
1546 frame_pointer_needed = 1;
1547 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1548 {
3ec2ea3e
DE
1549 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1550 && ep->to != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
1551 frame_pointer_needed = 0;
1552
1553 if (! ep->can_eliminate && ep->can_eliminate_previous)
1554 {
1555 ep->can_eliminate_previous = 0;
1556 spill_hard_reg (ep->from, global, dumpfile, 1);
1557 regs_ever_live[ep->from] = 1;
1558 something_changed = 1;
1559 num_eliminable--;
1560 }
1561 }
1562
1563 /* If all needs are met, we win. */
1564
1565 for (i = 0; i < N_REG_CLASSES; i++)
1566 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1567 break;
1568 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1569 break;
1570
546b63fb
RK
1571 /* Not all needs are met; must spill some hard regs. */
1572
1573 /* Put all registers spilled so far back in potential_reload_regs, but
1574 put them at the front, since we've already spilled most of the
1575 psuedos in them (we might have left some pseudos unspilled if they
1576 were in a block that didn't need any spill registers of a conflicting
1577 class. We used to try to mark off the need for those registers,
1578 but doing so properly is very complex and reallocating them is the
1579 simpler approach. First, "pack" potential_reload_regs by pushing
1580 any nonnegative entries towards the end. That will leave room
1581 for the registers we already spilled.
1582
1583 Also, undo the marking of the spill registers from the last time
1584 around in FORBIDDEN_REGS since we will be probably be allocating
1585 them again below.
1586
1587 ??? It is theoretically possible that we might end up not using one
1588 of our previously-spilled registers in this allocation, even though
1589 they are at the head of the list. It's not clear what to do about
1590 this, but it was no better before, when we marked off the needs met
1591 by the previously-spilled registers. With the current code, globals
1592 can be allocated into these registers, but locals cannot. */
1593
1594 if (n_spills)
1595 {
1596 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1597 if (potential_reload_regs[i] != -1)
1598 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1599
546b63fb
RK
1600 for (i = 0; i < n_spills; i++)
1601 {
1602 potential_reload_regs[i] = spill_regs[i];
1603 spill_reg_order[spill_regs[i]] = -1;
1604 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1605 }
32131a9c 1606
546b63fb
RK
1607 n_spills = 0;
1608 }
32131a9c
RK
1609
1610 /* Now find more reload regs to satisfy the remaining need
1611 Do it by ascending class number, since otherwise a reg
1612 might be spilled for a big class and might fail to count
1613 for a smaller class even though it belongs to that class.
1614
1615 Count spilled regs in `spills', and add entries to
1616 `spill_regs' and `spill_reg_order'.
1617
1618 ??? Note there is a problem here.
1619 When there is a need for a group in a high-numbered class,
1620 and also need for non-group regs that come from a lower class,
1621 the non-group regs are chosen first. If there aren't many regs,
1622 they might leave no room for a group.
1623
1624 This was happening on the 386. To fix it, we added the code
1625 that calls possible_group_p, so that the lower class won't
1626 break up the last possible group.
1627
1628 Really fixing the problem would require changes above
1629 in counting the regs already spilled, and in choose_reload_regs.
1630 It might be hard to avoid introducing bugs there. */
1631
546b63fb
RK
1632 CLEAR_HARD_REG_SET (counted_for_groups);
1633 CLEAR_HARD_REG_SET (counted_for_nongroups);
1634
32131a9c
RK
1635 for (class = 0; class < N_REG_CLASSES; class++)
1636 {
1637 /* First get the groups of registers.
1638 If we got single registers first, we might fragment
1639 possible groups. */
1640 while (max_groups[class] > 0)
1641 {
1642 /* If any single spilled regs happen to form groups,
1643 count them now. Maybe we don't really need
1644 to spill another group. */
1645 count_possible_groups (group_size, group_mode, max_groups);
1646
93193ab5
RK
1647 if (max_groups[class] <= 0)
1648 break;
1649
32131a9c
RK
1650 /* Groups of size 2 (the only groups used on most machines)
1651 are treated specially. */
1652 if (group_size[class] == 2)
1653 {
1654 /* First, look for a register that will complete a group. */
1655 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1656 {
32131a9c 1657 int other;
546b63fb
RK
1658
1659 j = potential_reload_regs[i];
32131a9c
RK
1660 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1661 &&
1662 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1663 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1664 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1665 && HARD_REGNO_MODE_OK (other, group_mode[class])
1666 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1667 other)
1668 /* We don't want one part of another group.
1669 We could get "two groups" that overlap! */
1670 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1671 ||
1672 (j < FIRST_PSEUDO_REGISTER - 1
1673 && (other = j + 1, spill_reg_order[other] >= 0)
1674 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1675 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1676 && HARD_REGNO_MODE_OK (j, group_mode[class])
1677 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1678 other)
1679 && ! TEST_HARD_REG_BIT (counted_for_groups,
1680 other))))
1681 {
1682 register enum reg_class *p;
1683
1684 /* We have found one that will complete a group,
1685 so count off one group as provided. */
1686 max_groups[class]--;
1687 p = reg_class_superclasses[class];
1688 while (*p != LIM_REG_CLASSES)
1689 max_groups[(int) *p++]--;
1690
1691 /* Indicate both these regs are part of a group. */
1692 SET_HARD_REG_BIT (counted_for_groups, j);
1693 SET_HARD_REG_BIT (counted_for_groups, other);
1694 break;
1695 }
1696 }
1697 /* We can't complete a group, so start one. */
92b0556d
RS
1698#ifdef SMALL_REGISTER_CLASSES
1699 /* Look for a pair neither of which is explicitly used. */
1700 if (i == FIRST_PSEUDO_REGISTER)
1701 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1702 {
1703 int k;
1704 j = potential_reload_regs[i];
1705 /* Verify that J+1 is a potential reload reg. */
1706 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1707 if (potential_reload_regs[k] == j + 1)
1708 break;
1709 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1710 && k < FIRST_PSEUDO_REGISTER
1711 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1712 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1713 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1714 && HARD_REGNO_MODE_OK (j, group_mode[class])
1715 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1716 j + 1)
1717 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1718 /* Reject J at this stage
1719 if J+1 was explicitly used. */
1720 && ! regs_explicitly_used[j + 1])
1721 break;
1722 }
1723#endif
1724 /* Now try any group at all
1725 whose registers are not in bad_spill_regs. */
32131a9c
RK
1726 if (i == FIRST_PSEUDO_REGISTER)
1727 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1728 {
57697575 1729 int k;
546b63fb 1730 j = potential_reload_regs[i];
57697575
RS
1731 /* Verify that J+1 is a potential reload reg. */
1732 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1733 if (potential_reload_regs[k] == j + 1)
1734 break;
32131a9c 1735 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1736 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1737 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1738 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1739 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1740 && HARD_REGNO_MODE_OK (j, group_mode[class])
1741 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1742 j + 1)
1743 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1744 break;
1745 }
1746
1747 /* I should be the index in potential_reload_regs
1748 of the new reload reg we have found. */
1749
5352b11a
RS
1750 if (i >= FIRST_PSEUDO_REGISTER)
1751 {
1752 /* There are no groups left to spill. */
1753 spill_failure (max_groups_insn[class]);
1754 failure = 1;
1755 goto failed;
1756 }
1757 else
1758 something_changed
fb3821f7 1759 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1760 global, dumpfile);
32131a9c
RK
1761 }
1762 else
1763 {
1764 /* For groups of more than 2 registers,
1765 look for a sufficient sequence of unspilled registers,
1766 and spill them all at once. */
1767 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1768 {
32131a9c 1769 int k;
546b63fb
RK
1770
1771 j = potential_reload_regs[i];
9d1a4667
RS
1772 if (j >= 0
1773 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1774 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1775 {
1776 /* Check each reg in the sequence. */
1777 for (k = 0; k < group_size[class]; k++)
1778 if (! (spill_reg_order[j + k] < 0
1779 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1780 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1781 break;
1782 /* We got a full sequence, so spill them all. */
1783 if (k == group_size[class])
1784 {
1785 register enum reg_class *p;
1786 for (k = 0; k < group_size[class]; k++)
1787 {
1788 int idx;
1789 SET_HARD_REG_BIT (counted_for_groups, j + k);
1790 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1791 if (potential_reload_regs[idx] == j + k)
1792 break;
9d1a4667
RS
1793 something_changed
1794 |= new_spill_reg (idx, class,
1795 max_needs, NULL_PTR,
1796 global, dumpfile);
32131a9c
RK
1797 }
1798
1799 /* We have found one that will complete a group,
1800 so count off one group as provided. */
1801 max_groups[class]--;
1802 p = reg_class_superclasses[class];
1803 while (*p != LIM_REG_CLASSES)
1804 max_groups[(int) *p++]--;
1805
1806 break;
1807 }
1808 }
1809 }
fa52261e 1810 /* We couldn't find any registers for this reload.
9d1a4667
RS
1811 Avoid going into an infinite loop. */
1812 if (i >= FIRST_PSEUDO_REGISTER)
1813 {
1814 /* There are no groups left. */
1815 spill_failure (max_groups_insn[class]);
1816 failure = 1;
1817 goto failed;
1818 }
32131a9c
RK
1819 }
1820 }
1821
1822 /* Now similarly satisfy all need for single registers. */
1823
1824 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1825 {
9a6cde3a
RS
1826#ifdef SMALL_REGISTER_CLASSES
1827 /* This should be right for all machines, but only the 386
1828 is known to need it, so this conditional plays safe.
1829 ??? For 2.5, try making this unconditional. */
1830 /* If we spilled enough regs, but they weren't counted
1831 against the non-group need, see if we can count them now.
1832 If so, we can avoid some actual spilling. */
1833 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1834 for (i = 0; i < n_spills; i++)
1835 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1836 spill_regs[i])
1837 && !TEST_HARD_REG_BIT (counted_for_groups,
1838 spill_regs[i])
1839 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1840 spill_regs[i])
1841 && max_nongroups[class] > 0)
1842 {
1843 register enum reg_class *p;
1844
1845 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1846 max_nongroups[class]--;
1847 p = reg_class_superclasses[class];
1848 while (*p != LIM_REG_CLASSES)
1849 max_nongroups[(int) *p++]--;
1850 }
1851 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1852 break;
1853#endif
1854
32131a9c
RK
1855 /* Consider the potential reload regs that aren't
1856 yet in use as reload regs, in order of preference.
1857 Find the most preferred one that's in this class. */
1858
1859 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1860 if (potential_reload_regs[i] >= 0
1861 && TEST_HARD_REG_BIT (reg_class_contents[class],
1862 potential_reload_regs[i])
1863 /* If this reg will not be available for groups,
1864 pick one that does not foreclose possible groups.
1865 This is a kludge, and not very general,
1866 but it should be sufficient to make the 386 work,
1867 and the problem should not occur on machines with
1868 more registers. */
1869 && (max_nongroups[class] == 0
1870 || possible_group_p (potential_reload_regs[i], max_groups)))
1871 break;
1872
e404a39a
RK
1873 /* If we couldn't get a register, try to get one even if we
1874 might foreclose possible groups. This may cause problems
1875 later, but that's better than aborting now, since it is
1876 possible that we will, in fact, be able to form the needed
1877 group even with this allocation. */
1878
1879 if (i >= FIRST_PSEUDO_REGISTER
1880 && (asm_noperands (max_needs[class] > 0
1881 ? max_needs_insn[class]
1882 : max_nongroups_insn[class])
1883 < 0))
1884 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1885 if (potential_reload_regs[i] >= 0
1886 && TEST_HARD_REG_BIT (reg_class_contents[class],
1887 potential_reload_regs[i]))
1888 break;
1889
32131a9c
RK
1890 /* I should be the index in potential_reload_regs
1891 of the new reload reg we have found. */
1892
5352b11a
RS
1893 if (i >= FIRST_PSEUDO_REGISTER)
1894 {
1895 /* There are no possible registers left to spill. */
1896 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1897 : max_nongroups_insn[class]);
1898 failure = 1;
1899 goto failed;
1900 }
1901 else
1902 something_changed
1903 |= new_spill_reg (i, class, max_needs, max_nongroups,
1904 global, dumpfile);
32131a9c
RK
1905 }
1906 }
1907 }
1908
1909 /* If global-alloc was run, notify it of any register eliminations we have
1910 done. */
1911 if (global)
1912 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1913 if (ep->can_eliminate)
1914 mark_elimination (ep->from, ep->to);
1915
32131a9c 1916 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1917 around calls. Tell if what mode to use so that we will process
1918 those insns in reload_as_needed if we have to. */
32131a9c
RK
1919
1920 if (caller_save_needed)
a8efe40d
RK
1921 save_call_clobbered_regs (num_eliminable ? QImode
1922 : caller_save_spill_class != NO_REGS ? HImode
1923 : VOIDmode);
32131a9c
RK
1924
1925 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1926 If that insn didn't set the register (i.e., it copied the register to
1927 memory), just delete that insn instead of the equivalencing insn plus
1928 anything now dead. If we call delete_dead_insn on that insn, we may
1929 delete the insn that actually sets the register if the register die
1930 there and that is incorrect. */
1931
1932 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1933 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1934 && GET_CODE (reg_equiv_init[i]) != NOTE)
1935 {
1936 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1937 delete_dead_insn (reg_equiv_init[i]);
1938 else
1939 {
1940 PUT_CODE (reg_equiv_init[i], NOTE);
1941 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1942 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1943 }
1944 }
1945
1946 /* Use the reload registers where necessary
1947 by generating move instructions to move the must-be-register
1948 values into or out of the reload registers. */
1949
a8efe40d
RK
1950 if (something_needs_reloads || something_needs_elimination
1951 || (caller_save_needed && num_eliminable)
1952 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1953 reload_as_needed (first, global);
1954
2a1f8b6b 1955 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1956 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1957 virtue of being in a pseudo, that pseudo will be marked live
1958 and hence the frame pointer will be known to be live via that
1959 pseudo. */
1960
1961 if (! frame_pointer_needed)
1962 for (i = 0; i < n_basic_blocks; i++)
3ec2ea3e
DE
1963 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1964 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1965 % REGSET_ELT_BITS));
2a1f8b6b 1966
5352b11a
RS
1967 /* Come here (with failure set nonzero) if we can't get enough spill regs
1968 and we decide not to abort about it. */
1969 failed:
1970
a3ec87a8
RS
1971 reload_in_progress = 0;
1972
32131a9c
RK
1973 /* Now eliminate all pseudo regs by modifying them into
1974 their equivalent memory references.
1975 The REG-rtx's for the pseudos are modified in place,
1976 so all insns that used to refer to them now refer to memory.
1977
1978 For a reg that has a reg_equiv_address, all those insns
1979 were changed by reloading so that no insns refer to it any longer;
1980 but the DECL_RTL of a variable decl may refer to it,
1981 and if so this causes the debugging info to mention the variable. */
1982
1983 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1984 {
1985 rtx addr = 0;
ab1fd483 1986 int in_struct = 0;
32131a9c 1987 if (reg_equiv_mem[i])
ab1fd483
RS
1988 {
1989 addr = XEXP (reg_equiv_mem[i], 0);
1990 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1991 }
32131a9c
RK
1992 if (reg_equiv_address[i])
1993 addr = reg_equiv_address[i];
1994 if (addr)
1995 {
1996 if (reg_renumber[i] < 0)
1997 {
1998 rtx reg = regno_reg_rtx[i];
1999 XEXP (reg, 0) = addr;
2000 REG_USERVAR_P (reg) = 0;
ab1fd483 2001 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
2002 PUT_CODE (reg, MEM);
2003 }
2004 else if (reg_equiv_mem[i])
2005 XEXP (reg_equiv_mem[i], 0) = addr;
2006 }
2007 }
2008
2009#ifdef PRESERVE_DEATH_INFO_REGNO_P
2010 /* Make a pass over all the insns and remove death notes for things that
2011 are no longer registers or no longer die in the insn (e.g., an input
2012 and output pseudo being tied). */
2013
2014 for (insn = first; insn; insn = NEXT_INSN (insn))
2015 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2016 {
2017 rtx note, next;
2018
2019 for (note = REG_NOTES (insn); note; note = next)
2020 {
2021 next = XEXP (note, 1);
2022 if (REG_NOTE_KIND (note) == REG_DEAD
2023 && (GET_CODE (XEXP (note, 0)) != REG
2024 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2025 remove_note (insn, note);
2026 }
2027 }
2028#endif
2029
2030 /* Indicate that we no longer have known memory locations or constants. */
2031 reg_equiv_constant = 0;
2032 reg_equiv_memory_loc = 0;
5352b11a 2033
c8ab4464
RS
2034 if (scratch_list)
2035 free (scratch_list);
c307c237 2036 scratch_list = 0;
c8ab4464
RS
2037 if (scratch_block)
2038 free (scratch_block);
c307c237
RK
2039 scratch_block = 0;
2040
5352b11a 2041 return failure;
32131a9c
RK
2042}
2043\f
2044/* Nonzero if, after spilling reg REGNO for non-groups,
2045 it will still be possible to find a group if we still need one. */
2046
2047static int
2048possible_group_p (regno, max_groups)
2049 int regno;
2050 int *max_groups;
2051{
2052 int i;
2053 int class = (int) NO_REGS;
2054
2055 for (i = 0; i < (int) N_REG_CLASSES; i++)
2056 if (max_groups[i] > 0)
2057 {
2058 class = i;
2059 break;
2060 }
2061
2062 if (class == (int) NO_REGS)
2063 return 1;
2064
2065 /* Consider each pair of consecutive registers. */
2066 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2067 {
2068 /* Ignore pairs that include reg REGNO. */
2069 if (i == regno || i + 1 == regno)
2070 continue;
2071
2072 /* Ignore pairs that are outside the class that needs the group.
2073 ??? Here we fail to handle the case where two different classes
2074 independently need groups. But this never happens with our
2075 current machine descriptions. */
2076 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2077 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2078 continue;
2079
2080 /* A pair of consecutive regs we can still spill does the trick. */
2081 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2082 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2083 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2084 return 1;
2085
2086 /* A pair of one already spilled and one we can spill does it
2087 provided the one already spilled is not otherwise reserved. */
2088 if (spill_reg_order[i] < 0
2089 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2090 && spill_reg_order[i + 1] >= 0
2091 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2092 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2093 return 1;
2094 if (spill_reg_order[i + 1] < 0
2095 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2096 && spill_reg_order[i] >= 0
2097 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2098 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2099 return 1;
2100 }
2101
2102 return 0;
2103}
2104\f
2105/* Count any groups that can be formed from the registers recently spilled.
2106 This is done class by class, in order of ascending class number. */
2107
2108static void
2109count_possible_groups (group_size, group_mode, max_groups)
546b63fb 2110 int *group_size;
32131a9c 2111 enum machine_mode *group_mode;
546b63fb 2112 int *max_groups;
32131a9c
RK
2113{
2114 int i;
2115 /* Now find all consecutive groups of spilled registers
2116 and mark each group off against the need for such groups.
2117 But don't count them against ordinary need, yet. */
2118
2119 for (i = 0; i < N_REG_CLASSES; i++)
2120 if (group_size[i] > 1)
2121 {
93193ab5 2122 HARD_REG_SET new;
32131a9c
RK
2123 int j;
2124
93193ab5
RK
2125 CLEAR_HARD_REG_SET (new);
2126
32131a9c
RK
2127 /* Make a mask of all the regs that are spill regs in class I. */
2128 for (j = 0; j < n_spills; j++)
2129 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2130 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2131 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2132 spill_regs[j]))
93193ab5
RK
2133 SET_HARD_REG_BIT (new, spill_regs[j]);
2134
32131a9c
RK
2135 /* Find each consecutive group of them. */
2136 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
93193ab5
RK
2137 if (TEST_HARD_REG_BIT (new, j)
2138 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
2139 /* Next line in case group-mode for this class
2140 demands an even-odd pair. */
2141 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2142 {
2143 int k;
2144 for (k = 1; k < group_size[i]; k++)
93193ab5 2145 if (! TEST_HARD_REG_BIT (new, j + k))
32131a9c
RK
2146 break;
2147 if (k == group_size[i])
2148 {
2149 /* We found a group. Mark it off against this class's
2150 need for groups, and against each superclass too. */
2151 register enum reg_class *p;
2152 max_groups[i]--;
2153 p = reg_class_superclasses[i];
2154 while (*p != LIM_REG_CLASSES)
2155 max_groups[(int) *p++]--;
a8fdc208 2156 /* Don't count these registers again. */
32131a9c
RK
2157 for (k = 0; k < group_size[i]; k++)
2158 SET_HARD_REG_BIT (counted_for_groups, j + k);
2159 }
fa52261e
RS
2160 /* Skip to the last reg in this group. When j is incremented
2161 above, it will then point to the first reg of the next
2162 possible group. */
2163 j += k - 1;
32131a9c
RK
2164 }
2165 }
2166
2167}
2168\f
2169/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2170 another mode that needs to be reloaded for the same register class CLASS.
2171 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2172 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2173
2174 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2175 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2176 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2177 causes unnecessary failures on machines requiring alignment of register
2178 groups when the two modes are different sizes, because the larger mode has
2179 more strict alignment rules than the smaller mode. */
2180
2181static int
2182modes_equiv_for_class_p (allocate_mode, other_mode, class)
2183 enum machine_mode allocate_mode, other_mode;
2184 enum reg_class class;
2185{
2186 register int regno;
2187 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2188 {
2189 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2190 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2191 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2192 return 0;
2193 }
2194 return 1;
2195}
2196
5352b11a
RS
2197/* Handle the failure to find a register to spill.
2198 INSN should be one of the insns which needed this particular spill reg. */
2199
2200static void
2201spill_failure (insn)
2202 rtx insn;
2203{
2204 if (asm_noperands (PATTERN (insn)) >= 0)
2205 error_for_asm (insn, "`asm' needs too many reloads");
2206 else
2207 abort ();
2208}
2209
32131a9c
RK
2210/* Add a new register to the tables of available spill-registers
2211 (as well as spilling all pseudos allocated to the register).
2212 I is the index of this register in potential_reload_regs.
2213 CLASS is the regclass whose need is being satisfied.
2214 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2215 so that this register can count off against them.
2216 MAX_NONGROUPS is 0 if this register is part of a group.
2217 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2218
2219static int
2220new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2221 int i;
2222 int class;
2223 int *max_needs;
2224 int *max_nongroups;
2225 int global;
2226 FILE *dumpfile;
2227{
2228 register enum reg_class *p;
2229 int val;
2230 int regno = potential_reload_regs[i];
2231
2232 if (i >= FIRST_PSEUDO_REGISTER)
2233 abort (); /* Caller failed to find any register. */
2234
2235 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2236 fatal ("fixed or forbidden register was spilled.\n\
2237This may be due to a compiler bug or to impossible asm statements.");
2238
2239 /* Make reg REGNO an additional reload reg. */
2240
2241 potential_reload_regs[i] = -1;
2242 spill_regs[n_spills] = regno;
2243 spill_reg_order[regno] = n_spills;
2244 if (dumpfile)
2245 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2246
2247 /* Clear off the needs we just satisfied. */
2248
2249 max_needs[class]--;
2250 p = reg_class_superclasses[class];
2251 while (*p != LIM_REG_CLASSES)
2252 max_needs[(int) *p++]--;
2253
2254 if (max_nongroups && max_nongroups[class] > 0)
2255 {
2256 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2257 max_nongroups[class]--;
2258 p = reg_class_superclasses[class];
2259 while (*p != LIM_REG_CLASSES)
2260 max_nongroups[(int) *p++]--;
2261 }
2262
2263 /* Spill every pseudo reg that was allocated to this reg
2264 or to something that overlaps this reg. */
2265
2266 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2267
2268 /* If there are some registers still to eliminate and this register
2269 wasn't ever used before, additional stack space may have to be
2270 allocated to store this register. Thus, we may have changed the offset
2271 between the stack and frame pointers, so mark that something has changed.
2272 (If new pseudos were spilled, thus requiring more space, VAL would have
2273 been set non-zero by the call to spill_hard_reg above since additional
2274 reloads may be needed in that case.
2275
2276 One might think that we need only set VAL to 1 if this is a call-used
2277 register. However, the set of registers that must be saved by the
2278 prologue is not identical to the call-used set. For example, the
2279 register used by the call insn for the return PC is a call-used register,
2280 but must be saved by the prologue. */
2281 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2282 val = 1;
2283
2284 regs_ever_live[spill_regs[n_spills]] = 1;
2285 n_spills++;
2286
2287 return val;
2288}
2289\f
2290/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2291 data that is dead in INSN. */
2292
2293static void
2294delete_dead_insn (insn)
2295 rtx insn;
2296{
2297 rtx prev = prev_real_insn (insn);
2298 rtx prev_dest;
2299
2300 /* If the previous insn sets a register that dies in our insn, delete it
2301 too. */
2302 if (prev && GET_CODE (PATTERN (prev)) == SET
2303 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2304 && reg_mentioned_p (prev_dest, PATTERN (insn))
2305 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2306 delete_dead_insn (prev);
2307
2308 PUT_CODE (insn, NOTE);
2309 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2310 NOTE_SOURCE_FILE (insn) = 0;
2311}
2312
2313/* Modify the home of pseudo-reg I.
2314 The new home is present in reg_renumber[I].
2315
2316 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2317 or it may be -1, meaning there is none or it is not relevant.
2318 This is used so that all pseudos spilled from a given hard reg
2319 can share one stack slot. */
2320
2321static void
2322alter_reg (i, from_reg)
2323 register int i;
2324 int from_reg;
2325{
2326 /* When outputting an inline function, this can happen
2327 for a reg that isn't actually used. */
2328 if (regno_reg_rtx[i] == 0)
2329 return;
2330
2331 /* If the reg got changed to a MEM at rtl-generation time,
2332 ignore it. */
2333 if (GET_CODE (regno_reg_rtx[i]) != REG)
2334 return;
2335
2336 /* Modify the reg-rtx to contain the new hard reg
2337 number or else to contain its pseudo reg number. */
2338 REGNO (regno_reg_rtx[i])
2339 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2340
2341 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2342 allocate a stack slot for it. */
2343
2344 if (reg_renumber[i] < 0
2345 && reg_n_refs[i] > 0
2346 && reg_equiv_constant[i] == 0
2347 && reg_equiv_memory_loc[i] == 0)
2348 {
2349 register rtx x;
2350 int inherent_size = PSEUDO_REGNO_BYTES (i);
2351 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2352 int adjust = 0;
2353
2354 /* Each pseudo reg has an inherent size which comes from its own mode,
2355 and a total size which provides room for paradoxical subregs
2356 which refer to the pseudo reg in wider modes.
2357
2358 We can use a slot already allocated if it provides both
2359 enough inherent space and enough total space.
2360 Otherwise, we allocate a new slot, making sure that it has no less
2361 inherent space, and no less total space, then the previous slot. */
2362 if (from_reg == -1)
2363 {
2364 /* No known place to spill from => no slot to reuse. */
2365 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2366#if BYTES_BIG_ENDIAN
2367 /* Cancel the big-endian correction done in assign_stack_local.
2368 Get the address of the beginning of the slot.
2369 This is so we can do a big-endian correction unconditionally
2370 below. */
2371 adjust = inherent_size - total_size;
2372#endif
2373 }
2374 /* Reuse a stack slot if possible. */
2375 else if (spill_stack_slot[from_reg] != 0
2376 && spill_stack_slot_width[from_reg] >= total_size
2377 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2378 >= inherent_size))
2379 x = spill_stack_slot[from_reg];
2380 /* Allocate a bigger slot. */
2381 else
2382 {
2383 /* Compute maximum size needed, both for inherent size
2384 and for total size. */
2385 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2386 if (spill_stack_slot[from_reg])
2387 {
2388 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2389 > inherent_size)
2390 mode = GET_MODE (spill_stack_slot[from_reg]);
2391 if (spill_stack_slot_width[from_reg] > total_size)
2392 total_size = spill_stack_slot_width[from_reg];
2393 }
2394 /* Make a slot with that size. */
2395 x = assign_stack_local (mode, total_size, -1);
2396#if BYTES_BIG_ENDIAN
2397 /* Cancel the big-endian correction done in assign_stack_local.
2398 Get the address of the beginning of the slot.
2399 This is so we can do a big-endian correction unconditionally
2400 below. */
2401 adjust = GET_MODE_SIZE (mode) - total_size;
2402#endif
2403 spill_stack_slot[from_reg] = x;
2404 spill_stack_slot_width[from_reg] = total_size;
2405 }
2406
2407#if BYTES_BIG_ENDIAN
2408 /* On a big endian machine, the "address" of the slot
2409 is the address of the low part that fits its inherent mode. */
2410 if (inherent_size < total_size)
2411 adjust += (total_size - inherent_size);
2412#endif /* BYTES_BIG_ENDIAN */
2413
2414 /* If we have any adjustment to make, or if the stack slot is the
2415 wrong mode, make a new stack slot. */
2416 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2417 {
2418 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2419 plus_constant (XEXP (x, 0), adjust));
2420 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2421 }
2422
2423 /* Save the stack slot for later. */
2424 reg_equiv_memory_loc[i] = x;
2425 }
2426}
2427
2428/* Mark the slots in regs_ever_live for the hard regs
2429 used by pseudo-reg number REGNO. */
2430
2431void
2432mark_home_live (regno)
2433 int regno;
2434{
2435 register int i, lim;
2436 i = reg_renumber[regno];
2437 if (i < 0)
2438 return;
2439 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2440 while (i < lim)
2441 regs_ever_live[i++] = 1;
2442}
c307c237
RK
2443
2444/* Mark the registers used in SCRATCH as being live. */
2445
2446static void
2447mark_scratch_live (scratch)
2448 rtx scratch;
2449{
2450 register int i;
2451 int regno = REGNO (scratch);
2452 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2453
2454 for (i = regno; i < lim; i++)
2455 regs_ever_live[i] = 1;
2456}
32131a9c
RK
2457\f
2458/* This function handles the tracking of elimination offsets around branches.
2459
2460 X is a piece of RTL being scanned.
2461
2462 INSN is the insn that it came from, if any.
2463
2464 INITIAL_P is non-zero if we are to set the offset to be the initial
2465 offset and zero if we are setting the offset of the label to be the
2466 current offset. */
2467
2468static void
2469set_label_offsets (x, insn, initial_p)
2470 rtx x;
2471 rtx insn;
2472 int initial_p;
2473{
2474 enum rtx_code code = GET_CODE (x);
2475 rtx tem;
2476 int i;
2477 struct elim_table *p;
2478
2479 switch (code)
2480 {
2481 case LABEL_REF:
8be386d9
RS
2482 if (LABEL_REF_NONLOCAL_P (x))
2483 return;
2484
32131a9c
RK
2485 x = XEXP (x, 0);
2486
2487 /* ... fall through ... */
2488
2489 case CODE_LABEL:
2490 /* If we know nothing about this label, set the desired offsets. Note
2491 that this sets the offset at a label to be the offset before a label
2492 if we don't know anything about the label. This is not correct for
2493 the label after a BARRIER, but is the best guess we can make. If
2494 we guessed wrong, we will suppress an elimination that might have
2495 been possible had we been able to guess correctly. */
2496
2497 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2498 {
2499 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2500 offsets_at[CODE_LABEL_NUMBER (x)][i]
2501 = (initial_p ? reg_eliminate[i].initial_offset
2502 : reg_eliminate[i].offset);
2503 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2504 }
2505
2506 /* Otherwise, if this is the definition of a label and it is
d45cf215 2507 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2508 that label. */
2509
2510 else if (x == insn
2511 && (tem = prev_nonnote_insn (insn)) != 0
2512 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2513 {
2514 num_not_at_initial_offset = 0;
2515 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2516 {
2517 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2518 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2519 if (reg_eliminate[i].can_eliminate
2520 && (reg_eliminate[i].offset
2521 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2522 num_not_at_initial_offset++;
2523 }
2524 }
32131a9c
RK
2525
2526 else
2527 /* If neither of the above cases is true, compare each offset
2528 with those previously recorded and suppress any eliminations
2529 where the offsets disagree. */
a8fdc208 2530
32131a9c
RK
2531 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2532 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2533 != (initial_p ? reg_eliminate[i].initial_offset
2534 : reg_eliminate[i].offset))
2535 reg_eliminate[i].can_eliminate = 0;
2536
2537 return;
2538
2539 case JUMP_INSN:
2540 set_label_offsets (PATTERN (insn), insn, initial_p);
2541
2542 /* ... fall through ... */
2543
2544 case INSN:
2545 case CALL_INSN:
2546 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2547 and hence must have all eliminations at their initial offsets. */
2548 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2549 if (REG_NOTE_KIND (tem) == REG_LABEL)
2550 set_label_offsets (XEXP (tem, 0), insn, 1);
2551 return;
2552
2553 case ADDR_VEC:
2554 case ADDR_DIFF_VEC:
2555 /* Each of the labels in the address vector must be at their initial
2556 offsets. We want the first first for ADDR_VEC and the second
2557 field for ADDR_DIFF_VEC. */
2558
2559 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2560 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2561 insn, initial_p);
2562 return;
2563
2564 case SET:
2565 /* We only care about setting PC. If the source is not RETURN,
2566 IF_THEN_ELSE, or a label, disable any eliminations not at
2567 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2568 isn't one of those possibilities. For branches to a label,
2569 call ourselves recursively.
2570
2571 Note that this can disable elimination unnecessarily when we have
2572 a non-local goto since it will look like a non-constant jump to
2573 someplace in the current function. This isn't a significant
2574 problem since such jumps will normally be when all elimination
2575 pairs are back to their initial offsets. */
2576
2577 if (SET_DEST (x) != pc_rtx)
2578 return;
2579
2580 switch (GET_CODE (SET_SRC (x)))
2581 {
2582 case PC:
2583 case RETURN:
2584 return;
2585
2586 case LABEL_REF:
2587 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2588 return;
2589
2590 case IF_THEN_ELSE:
2591 tem = XEXP (SET_SRC (x), 1);
2592 if (GET_CODE (tem) == LABEL_REF)
2593 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2594 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2595 break;
2596
2597 tem = XEXP (SET_SRC (x), 2);
2598 if (GET_CODE (tem) == LABEL_REF)
2599 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2600 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2601 break;
2602 return;
2603 }
2604
2605 /* If we reach here, all eliminations must be at their initial
2606 offset because we are doing a jump to a variable address. */
2607 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2608 if (p->offset != p->initial_offset)
2609 p->can_eliminate = 0;
2610 }
2611}
2612\f
2613/* Used for communication between the next two function to properly share
2614 the vector for an ASM_OPERANDS. */
2615
2616static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2617
a8fdc208 2618/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2619 replacement (such as sp), plus an offset.
2620
2621 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2622 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2623 MEM, we are allowed to replace a sum of a register and the constant zero
2624 with the register, which we cannot do outside a MEM. In addition, we need
2625 to record the fact that a register is referenced outside a MEM.
2626
ff32812a 2627 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2628 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2629 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2630 that the REG is being modified.
2631
ff32812a
RS
2632 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2633 That's used when we eliminate in expressions stored in notes.
2634 This means, do not set ref_outside_mem even if the reference
2635 is outside of MEMs.
2636
32131a9c
RK
2637 If we see a modification to a register we know about, take the
2638 appropriate action (see case SET, below).
2639
2640 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2641 replacements done assuming all offsets are at their initial values. If
2642 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2643 encounter, return the actual location so that find_reloads will do
2644 the proper thing. */
2645
2646rtx
2647eliminate_regs (x, mem_mode, insn)
2648 rtx x;
2649 enum machine_mode mem_mode;
2650 rtx insn;
2651{
2652 enum rtx_code code = GET_CODE (x);
2653 struct elim_table *ep;
2654 int regno;
2655 rtx new;
2656 int i, j;
2657 char *fmt;
2658 int copied = 0;
2659
2660 switch (code)
2661 {
2662 case CONST_INT:
2663 case CONST_DOUBLE:
2664 case CONST:
2665 case SYMBOL_REF:
2666 case CODE_LABEL:
2667 case PC:
2668 case CC0:
2669 case ASM_INPUT:
2670 case ADDR_VEC:
2671 case ADDR_DIFF_VEC:
2672 case RETURN:
2673 return x;
2674
2675 case REG:
2676 regno = REGNO (x);
2677
2678 /* First handle the case where we encounter a bare register that
2679 is eliminable. Replace it with a PLUS. */
2680 if (regno < FIRST_PSEUDO_REGISTER)
2681 {
2682 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2683 ep++)
2684 if (ep->from_rtx == x && ep->can_eliminate)
2685 {
ff32812a
RS
2686 if (! mem_mode
2687 /* Refs inside notes don't count for this purpose. */
fe089a90 2688 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2689 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2690 ep->ref_outside_mem = 1;
2691 return plus_constant (ep->to_rtx, ep->previous_offset);
2692 }
2693
2694 }
2695 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2696 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2697 {
2698 /* In this case, find_reloads would attempt to either use an
2699 incorrect address (if something is not at its initial offset)
2700 or substitute an replaced address into an insn (which loses
2701 if the offset is changed by some later action). So we simply
2702 return the replaced stack slot (assuming it is changed by
2703 elimination) and ignore the fact that this is actually a
2704 reference to the pseudo. Ensure we make a copy of the
2705 address in case it is shared. */
fb3821f7 2706 new = eliminate_regs (reg_equiv_memory_loc[regno],
e5687447 2707 mem_mode, insn);
32131a9c 2708 if (new != reg_equiv_memory_loc[regno])
208dffa5
RS
2709 {
2710 cannot_omit_stores[regno] = 1;
2711 return copy_rtx (new);
2712 }
32131a9c
RK
2713 }
2714 return x;
2715
2716 case PLUS:
2717 /* If this is the sum of an eliminable register and a constant, rework
2718 the sum. */
2719 if (GET_CODE (XEXP (x, 0)) == REG
2720 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2721 && CONSTANT_P (XEXP (x, 1)))
2722 {
2723 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2724 ep++)
2725 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2726 {
e5687447
JW
2727 if (! mem_mode
2728 /* Refs inside notes don't count for this purpose. */
2729 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2730 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2731 ep->ref_outside_mem = 1;
2732
2733 /* The only time we want to replace a PLUS with a REG (this
2734 occurs when the constant operand of the PLUS is the negative
2735 of the offset) is when we are inside a MEM. We won't want
2736 to do so at other times because that would change the
2737 structure of the insn in a way that reload can't handle.
2738 We special-case the commonest situation in
2739 eliminate_regs_in_insn, so just replace a PLUS with a
2740 PLUS here, unless inside a MEM. */
a23b64d5 2741 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2742 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2743 return ep->to_rtx;
2744 else
2745 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2746 plus_constant (XEXP (x, 1),
2747 ep->previous_offset));
2748 }
2749
2750 /* If the register is not eliminable, we are done since the other
2751 operand is a constant. */
2752 return x;
2753 }
2754
2755 /* If this is part of an address, we want to bring any constant to the
2756 outermost PLUS. We will do this by doing register replacement in
2757 our operands and seeing if a constant shows up in one of them.
2758
2759 We assume here this is part of an address (or a "load address" insn)
2760 since an eliminable register is not likely to appear in any other
2761 context.
2762
2763 If we have (plus (eliminable) (reg)), we want to produce
2764 (plus (plus (replacement) (reg) (const))). If this was part of a
2765 normal add insn, (plus (replacement) (reg)) will be pushed as a
2766 reload. This is the desired action. */
2767
2768 {
e5687447
JW
2769 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2770 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2771
2772 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2773 {
2774 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2775 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2776 we must replace the constant here since it may no longer
2777 be in the position of any operand. */
2778 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2779 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2780 && reg_renumber[REGNO (new1)] < 0
2781 && reg_equiv_constant != 0
2782 && reg_equiv_constant[REGNO (new1)] != 0)
2783 new1 = reg_equiv_constant[REGNO (new1)];
2784 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2785 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2786 && reg_renumber[REGNO (new0)] < 0
2787 && reg_equiv_constant[REGNO (new0)] != 0)
2788 new0 = reg_equiv_constant[REGNO (new0)];
2789
2790 new = form_sum (new0, new1);
2791
2792 /* As above, if we are not inside a MEM we do not want to
2793 turn a PLUS into something else. We might try to do so here
2794 for an addition of 0 if we aren't optimizing. */
2795 if (! mem_mode && GET_CODE (new) != PLUS)
2796 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2797 else
2798 return new;
2799 }
2800 }
2801 return x;
2802
2803 case EXPR_LIST:
2804 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2805 if (XEXP (x, 0))
2806 {
e5687447 2807 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2808 if (new != XEXP (x, 0))
2809 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2810 }
2811
2812 /* ... fall through ... */
2813
2814 case INSN_LIST:
2815 /* Now do eliminations in the rest of the chain. If this was
2816 an EXPR_LIST, this might result in allocating more memory than is
2817 strictly needed, but it simplifies the code. */
2818 if (XEXP (x, 1))
2819 {
e5687447 2820 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c 2821 if (new != XEXP (x, 1))
8c15858f 2822 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
32131a9c
RK
2823 }
2824 return x;
2825
2826 case CALL:
2827 case COMPARE:
2828 case MINUS:
2829 case MULT:
2830 case DIV: case UDIV:
2831 case MOD: case UMOD:
2832 case AND: case IOR: case XOR:
2833 case LSHIFT: case ASHIFT: case ROTATE:
2834 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2835 case NE: case EQ:
2836 case GE: case GT: case GEU: case GTU:
2837 case LE: case LT: case LEU: case LTU:
2838 {
e5687447 2839 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2840 rtx new1
e5687447 2841 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2842
2843 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2844 return gen_rtx (code, GET_MODE (x), new0, new1);
2845 }
2846 return x;
2847
2848 case PRE_INC:
2849 case POST_INC:
2850 case PRE_DEC:
2851 case POST_DEC:
2852 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2853 if (ep->to_rtx == XEXP (x, 0))
2854 {
4c05b187
RK
2855 int size = GET_MODE_SIZE (mem_mode);
2856
2857 /* If more bytes than MEM_MODE are pushed, account for them. */
2858#ifdef PUSH_ROUNDING
2859 if (ep->to_rtx == stack_pointer_rtx)
2860 size = PUSH_ROUNDING (size);
2861#endif
32131a9c 2862 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2863 ep->offset += size;
32131a9c 2864 else
4c05b187 2865 ep->offset -= size;
32131a9c
RK
2866 }
2867
2868 /* Fall through to generic unary operation case. */
2869 case USE:
2870 case STRICT_LOW_PART:
2871 case NEG: case NOT:
2872 case SIGN_EXTEND: case ZERO_EXTEND:
2873 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2874 case FLOAT: case FIX:
2875 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2876 case ABS:
2877 case SQRT:
2878 case FFS:
e5687447 2879 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2880 if (new != XEXP (x, 0))
2881 return gen_rtx (code, GET_MODE (x), new);
2882 return x;
2883
2884 case SUBREG:
2885 /* Similar to above processing, but preserve SUBREG_WORD.
2886 Convert (subreg (mem)) to (mem) if not paradoxical.
2887 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2888 pseudo didn't get a hard reg, we must replace this with the
2889 eliminated version of the memory location because push_reloads
2890 may do the replacement in certain circumstances. */
2891 if (GET_CODE (SUBREG_REG (x)) == REG
2892 && (GET_MODE_SIZE (GET_MODE (x))
2893 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2894 && reg_equiv_memory_loc != 0
2895 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2896 {
2897 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
e5687447 2898 mem_mode, insn);
32131a9c
RK
2899
2900 /* If we didn't change anything, we must retain the pseudo. */
2901 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2902 new = XEXP (x, 0);
2903 else
2904 /* Otherwise, ensure NEW isn't shared in case we have to reload
2905 it. */
2906 new = copy_rtx (new);
2907 }
2908 else
e5687447 2909 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
2910
2911 if (new != XEXP (x, 0))
2912 {
2913 if (GET_CODE (new) == MEM
2914 && (GET_MODE_SIZE (GET_MODE (x))
a3b75c07 2915 <= GET_MODE_SIZE (GET_MODE (new)))
e90d3cbb 2916#ifdef LOAD_EXTEND_OP
a3b75c07
RS
2917 /* On these machines we will be reloading what is
2918 inside the SUBREG if it originally was a pseudo and
2919 the inner and outer modes are both a word or
2920 smaller. So leave the SUBREG then. */
2921 && ! (GET_CODE (SUBREG_REG (x)) == REG
2922 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2923 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2924#endif
2925 )
32131a9c
RK
2926 {
2927 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2928 enum machine_mode mode = GET_MODE (x);
2929
2930#if BYTES_BIG_ENDIAN
2931 offset += (MIN (UNITS_PER_WORD,
2932 GET_MODE_SIZE (GET_MODE (new)))
2933 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2934#endif
2935
2936 PUT_MODE (new, mode);
2937 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2938 return new;
2939 }
2940 else
2941 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2942 }
2943
2944 return x;
2945
2946 case CLOBBER:
2947 /* If clobbering a register that is the replacement register for an
d45cf215 2948 elimination we still think can be performed, note that it cannot
32131a9c
RK
2949 be performed. Otherwise, we need not be concerned about it. */
2950 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2951 if (ep->to_rtx == XEXP (x, 0))
2952 ep->can_eliminate = 0;
2953
e5687447 2954 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c
JVA
2955 if (new != XEXP (x, 0))
2956 return gen_rtx (code, GET_MODE (x), new);
32131a9c
RK
2957 return x;
2958
2959 case ASM_OPERANDS:
2960 {
2961 rtx *temp_vec;
2962 /* Properly handle sharing input and constraint vectors. */
2963 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2964 {
2965 /* When we come to a new vector not seen before,
2966 scan all its elements; keep the old vector if none
2967 of them changes; otherwise, make a copy. */
2968 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2969 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2970 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2971 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
e5687447 2972 mem_mode, insn);
32131a9c
RK
2973
2974 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2975 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2976 break;
2977
2978 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2979 new_asm_operands_vec = old_asm_operands_vec;
2980 else
2981 new_asm_operands_vec
2982 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2983 }
2984
2985 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2986 if (new_asm_operands_vec == old_asm_operands_vec)
2987 return x;
2988
2989 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2990 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2991 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2992 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2993 ASM_OPERANDS_SOURCE_FILE (x),
2994 ASM_OPERANDS_SOURCE_LINE (x));
2995 new->volatil = x->volatil;
2996 return new;
2997 }
2998
2999 case SET:
3000 /* Check for setting a register that we know about. */
3001 if (GET_CODE (SET_DEST (x)) == REG)
3002 {
3003 /* See if this is setting the replacement register for an
a8fdc208 3004 elimination.
32131a9c 3005
3ec2ea3e
DE
3006 If DEST is the hard frame pointer, we do nothing because we
3007 assume that all assignments to the frame pointer are for
3008 non-local gotos and are being done at a time when they are valid
3009 and do not disturb anything else. Some machines want to
3010 eliminate a fake argument pointer (or even a fake frame pointer)
3011 with either the real frame or the stack pointer. Assignments to
3012 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
3013
3014 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3015 ep++)
3016 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 3017 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 3018 {
6dc42e49 3019 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
3020 this elimination can't be done. */
3021 rtx src = SET_SRC (x);
3022
3023 if (GET_CODE (src) == PLUS
3024 && XEXP (src, 0) == SET_DEST (x)
3025 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3026 ep->offset -= INTVAL (XEXP (src, 1));
3027 else
3028 ep->can_eliminate = 0;
3029 }
3030
3031 /* Now check to see we are assigning to a register that can be
3032 eliminated. If so, it must be as part of a PARALLEL, since we
3033 will not have been called if this is a single SET. So indicate
3034 that we can no longer eliminate this reg. */
3035 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3036 ep++)
3037 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3038 ep->can_eliminate = 0;
3039 }
3040
3041 /* Now avoid the loop below in this common case. */
3042 {
e5687447
JW
3043 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3044 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3045
ff32812a 3046 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3047 write a CLOBBER insn. */
3048 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3049 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3050 && GET_CODE (insn) != INSN_LIST)
32131a9c
RK
3051 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3052
3053 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3054 return gen_rtx (SET, VOIDmode, new0, new1);
3055 }
3056
3057 return x;
3058
3059 case MEM:
3060 /* Our only special processing is to pass the mode of the MEM to our
3061 recursive call and copy the flags. While we are here, handle this
3062 case more efficiently. */
e5687447 3063 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3064 if (new != XEXP (x, 0))
3065 {
3066 new = gen_rtx (MEM, GET_MODE (x), new);
3067 new->volatil = x->volatil;
3068 new->unchanging = x->unchanging;
3069 new->in_struct = x->in_struct;
3070 return new;
3071 }
3072 else
3073 return x;
3074 }
3075
3076 /* Process each of our operands recursively. If any have changed, make a
3077 copy of the rtx. */
3078 fmt = GET_RTX_FORMAT (code);
3079 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3080 {
3081 if (*fmt == 'e')
3082 {
e5687447 3083 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3084 if (new != XEXP (x, i) && ! copied)
3085 {
3086 rtx new_x = rtx_alloc (code);
3087 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3088 + (sizeof (new_x->fld[0])
3089 * GET_RTX_LENGTH (code))));
3090 x = new_x;
3091 copied = 1;
3092 }
3093 XEXP (x, i) = new;
3094 }
3095 else if (*fmt == 'E')
3096 {
3097 int copied_vec = 0;
3098 for (j = 0; j < XVECLEN (x, i); j++)
3099 {
3100 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3101 if (new != XVECEXP (x, i, j) && ! copied_vec)
3102 {
3103 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3104 &XVECEXP (x, i, 0));
3105 if (! copied)
3106 {
3107 rtx new_x = rtx_alloc (code);
3108 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3109 + (sizeof (new_x->fld[0])
3110 * GET_RTX_LENGTH (code))));
3111 x = new_x;
3112 copied = 1;
3113 }
3114 XVEC (x, i) = new_v;
3115 copied_vec = 1;
3116 }
3117 XVECEXP (x, i, j) = new;
3118 }
3119 }
3120 }
3121
3122 return x;
3123}
3124\f
3125/* Scan INSN and eliminate all eliminable registers in it.
3126
3127 If REPLACE is nonzero, do the replacement destructively. Also
3128 delete the insn as dead it if it is setting an eliminable register.
3129
3130 If REPLACE is zero, do all our allocations in reload_obstack.
3131
3132 If no eliminations were done and this insn doesn't require any elimination
3133 processing (these are not identical conditions: it might be updating sp,
3134 but not referencing fp; this needs to be seen during reload_as_needed so
3135 that the offset between fp and sp can be taken into consideration), zero
3136 is returned. Otherwise, 1 is returned. */
3137
3138static int
3139eliminate_regs_in_insn (insn, replace)
3140 rtx insn;
3141 int replace;
3142{
3143 rtx old_body = PATTERN (insn);
3144 rtx new_body;
3145 int val = 0;
3146 struct elim_table *ep;
3147
3148 if (! replace)
3149 push_obstacks (&reload_obstack, &reload_obstack);
3150
3151 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3152 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3153 {
3154 /* Check for setting an eliminable register. */
3155 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3156 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3157 {
3158 /* In this case this insn isn't serving a useful purpose. We
3159 will delete it in reload_as_needed once we know that this
3160 elimination is, in fact, being done.
3161
3162 If REPLACE isn't set, we can't delete this insn, but neededn't
3163 process it since it won't be used unless something changes. */
3164 if (replace)
3165 delete_dead_insn (insn);
3166 val = 1;
3167 goto done;
3168 }
3169
3170 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3171 in the insn is the negative of the offset in FROM. Substitute
3172 (set (reg) (reg to)) for the insn and change its code.
3173
3174 We have to do this here, rather than in eliminate_regs, do that we can
3175 change the insn code. */
3176
3177 if (GET_CODE (SET_SRC (old_body)) == PLUS
3178 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3179 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3180 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3181 ep++)
3182 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
922d9d40 3183 && ep->can_eliminate)
32131a9c 3184 {
922d9d40
RK
3185 /* We must stop at the first elimination that will be used.
3186 If this one would replace the PLUS with a REG, do it
3187 now. Otherwise, quit the loop and let eliminate_regs
3188 do its normal replacement. */
3189 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3190 {
3191 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3192 SET_DEST (old_body), ep->to_rtx);
3193 INSN_CODE (insn) = -1;
3194 val = 1;
3195 goto done;
3196 }
3197
3198 break;
32131a9c
RK
3199 }
3200 }
3201
3202 old_asm_operands_vec = 0;
3203
3204 /* Replace the body of this insn with a substituted form. If we changed
05b4c365 3205 something, return non-zero.
32131a9c
RK
3206
3207 If we are replacing a body that was a (set X (plus Y Z)), try to
3208 re-recognize the insn. We do this in case we had a simple addition
3209 but now can do this as a load-address. This saves an insn in this
3210 common case. */
3211
fb3821f7 3212 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3213 if (new_body != old_body)
3214 {
7c791b13
RK
3215 /* If we aren't replacing things permanently and we changed something,
3216 make another copy to ensure that all the RTL is new. Otherwise
3217 things can go wrong if find_reload swaps commutative operands
3218 and one is inside RTL that has been copied while the other is not. */
3219
4d411872
RS
3220 /* Don't copy an asm_operands because (1) there's no need and (2)
3221 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3222 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3223 new_body = copy_rtx (new_body);
3224
4a5d0fb5 3225 /* If we had a move insn but now we don't, rerecognize it. */
0ba846c7
RS
3226 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3227 && (GET_CODE (new_body) != SET
3228 || GET_CODE (SET_SRC (new_body)) != REG))
51b8cba1
JL
3229 /* If this was a load from or store to memory, compare
3230 the MEM in recog_operand to the one in the insn. If they
3231 are not equal, then rerecognize the insn. */
3232 || (GET_CODE (old_body) == SET
3233 && ((GET_CODE (SET_SRC (old_body)) == MEM
3234 && SET_SRC (old_body) != recog_operand[1])
3235 || (GET_CODE (SET_DEST (old_body)) == MEM
3236 && SET_DEST (old_body) != recog_operand[0])))
0ba846c7
RS
3237 /* If this was an add insn before, rerecognize. */
3238 ||
3239 (GET_CODE (old_body) == SET
3240 && GET_CODE (SET_SRC (old_body)) == PLUS))
4a5d0fb5
RS
3241 {
3242 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3243 /* If recognition fails, store the new body anyway.
3244 It's normal to have recognition failures here
3245 due to bizarre memory addresses; reloading will fix them. */
3246 PATTERN (insn) = new_body;
4a5d0fb5 3247 }
0ba846c7 3248 else
32131a9c
RK
3249 PATTERN (insn) = new_body;
3250
32131a9c
RK
3251 val = 1;
3252 }
a8fdc208 3253
32131a9c
RK
3254 /* Loop through all elimination pairs. See if any have changed and
3255 recalculate the number not at initial offset.
3256
a8efe40d
RK
3257 Compute the maximum offset (minimum offset if the stack does not
3258 grow downward) for each elimination pair.
3259
32131a9c
RK
3260 We also detect a cases where register elimination cannot be done,
3261 namely, if a register would be both changed and referenced outside a MEM
3262 in the resulting insn since such an insn is often undefined and, even if
3263 not, we cannot know what meaning will be given to it. Note that it is
3264 valid to have a register used in an address in an insn that changes it
3265 (presumably with a pre- or post-increment or decrement).
3266
3267 If anything changes, return nonzero. */
3268
3269 num_not_at_initial_offset = 0;
3270 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3271 {
3272 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3273 ep->can_eliminate = 0;
3274
3275 ep->ref_outside_mem = 0;
3276
3277 if (ep->previous_offset != ep->offset)
3278 val = 1;
3279
3280 ep->previous_offset = ep->offset;
3281 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3282 num_not_at_initial_offset++;
a8efe40d
RK
3283
3284#ifdef STACK_GROWS_DOWNWARD
3285 ep->max_offset = MAX (ep->max_offset, ep->offset);
3286#else
3287 ep->max_offset = MIN (ep->max_offset, ep->offset);
3288#endif
32131a9c
RK
3289 }
3290
3291 done:
05b4c365
RK
3292 /* If we changed something, perform elmination in REG_NOTES. This is
3293 needed even when REPLACE is zero because a REG_DEAD note might refer
3294 to a register that we eliminate and could cause a different number
3295 of spill registers to be needed in the final reload pass than in
3296 the pre-passes. */
20748cab 3297 if (val && REG_NOTES (insn) != 0)
ff32812a 3298 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3299
32131a9c
RK
3300 if (! replace)
3301 pop_obstacks ();
3302
3303 return val;
3304}
3305
3306/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3307 replacement we currently believe is valid, mark it as not eliminable if X
3308 modifies DEST in any way other than by adding a constant integer to it.
3309
3310 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3311 all assignments to the hard frame pointer are nonlocal gotos and are being
3312 done at a time when they are valid and do not disturb anything else.
32131a9c 3313 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3314 frame or stack pointer. Assignments to the hard frame pointer must not
3315 prevent this elimination.
32131a9c
RK
3316
3317 Called via note_stores from reload before starting its passes to scan
3318 the insns of the function. */
3319
3320static void
3321mark_not_eliminable (dest, x)
3322 rtx dest;
3323 rtx x;
3324{
3325 register int i;
3326
3327 /* A SUBREG of a hard register here is just changing its mode. We should
3328 not see a SUBREG of an eliminable hard register, but check just in
3329 case. */
3330 if (GET_CODE (dest) == SUBREG)
3331 dest = SUBREG_REG (dest);
3332
3ec2ea3e 3333 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3334 return;
3335
3336 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3337 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3338 && (GET_CODE (x) != SET
3339 || GET_CODE (SET_SRC (x)) != PLUS
3340 || XEXP (SET_SRC (x), 0) != dest
3341 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3342 {
3343 reg_eliminate[i].can_eliminate_previous
3344 = reg_eliminate[i].can_eliminate = 0;
3345 num_eliminable--;
3346 }
3347}
3348\f
3349/* Kick all pseudos out of hard register REGNO.
3350 If GLOBAL is nonzero, try to find someplace else to put them.
3351 If DUMPFILE is nonzero, log actions taken on that file.
3352
3353 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3354 because we found we can't eliminate some register. In the case, no pseudos
3355 are allowed to be in the register, even if they are only in a block that
3356 doesn't require spill registers, unlike the case when we are spilling this
3357 hard reg to produce another spill register.
3358
3359 Return nonzero if any pseudos needed to be kicked out. */
3360
3361static int
3362spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3363 register int regno;
3364 int global;
3365 FILE *dumpfile;
3366 int cant_eliminate;
3367{
c307c237 3368 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3369 int something_changed = 0;
3370 register int i;
3371
3372 SET_HARD_REG_BIT (forbidden_regs, regno);
3373
3374 /* Spill every pseudo reg that was allocated to this reg
3375 or to something that overlaps this reg. */
3376
3377 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3378 if (reg_renumber[i] >= 0
3379 && reg_renumber[i] <= regno
a8fdc208 3380 && (reg_renumber[i]
32131a9c
RK
3381 + HARD_REGNO_NREGS (reg_renumber[i],
3382 PSEUDO_REGNO_MODE (i))
3383 > regno))
3384 {
32131a9c
RK
3385 /* If this register belongs solely to a basic block which needed no
3386 spilling of any class that this register is contained in,
3387 leave it be, unless we are spilling this register because
3388 it was a hard register that can't be eliminated. */
3389
3390 if (! cant_eliminate
3391 && basic_block_needs[0]
3392 && reg_basic_block[i] >= 0
3393 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3394 {
3395 enum reg_class *p;
3396
3397 for (p = reg_class_superclasses[(int) class];
3398 *p != LIM_REG_CLASSES; p++)
3399 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3400 break;
a8fdc208 3401
32131a9c
RK
3402 if (*p == LIM_REG_CLASSES)
3403 continue;
3404 }
3405
3406 /* Mark it as no longer having a hard register home. */
3407 reg_renumber[i] = -1;
3408 /* We will need to scan everything again. */
3409 something_changed = 1;
3410 if (global)
3411 retry_global_alloc (i, forbidden_regs);
3412
3413 alter_reg (i, regno);
3414 if (dumpfile)
3415 {
3416 if (reg_renumber[i] == -1)
3417 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3418 else
3419 fprintf (dumpfile, " Register %d now in %d.\n\n",
3420 i, reg_renumber[i]);
3421 }
3422 }
c307c237
RK
3423 for (i = 0; i < scratch_list_length; i++)
3424 {
3425 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3426 {
3427 if (! cant_eliminate && basic_block_needs[0]
3428 && ! basic_block_needs[(int) class][scratch_block[i]])
3429 {
3430 enum reg_class *p;
3431
3432 for (p = reg_class_superclasses[(int) class];
3433 *p != LIM_REG_CLASSES; p++)
3434 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3435 break;
3436
3437 if (*p == LIM_REG_CLASSES)
3438 continue;
3439 }
3440 PUT_CODE (scratch_list[i], SCRATCH);
3441 scratch_list[i] = 0;
3442 something_changed = 1;
3443 continue;
3444 }
3445 }
32131a9c
RK
3446
3447 return something_changed;
3448}
3449\f
3450/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3451
3452static void
3453scan_paradoxical_subregs (x)
3454 register rtx x;
3455{
3456 register int i;
3457 register char *fmt;
3458 register enum rtx_code code = GET_CODE (x);
3459
3460 switch (code)
3461 {
3462 case CONST_INT:
3463 case CONST:
3464 case SYMBOL_REF:
3465 case LABEL_REF:
3466 case CONST_DOUBLE:
3467 case CC0:
3468 case PC:
3469 case REG:
3470 case USE:
3471 case CLOBBER:
3472 return;
3473
3474 case SUBREG:
3475 if (GET_CODE (SUBREG_REG (x)) == REG
3476 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3477 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3478 = GET_MODE_SIZE (GET_MODE (x));
3479 return;
3480 }
3481
3482 fmt = GET_RTX_FORMAT (code);
3483 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3484 {
3485 if (fmt[i] == 'e')
3486 scan_paradoxical_subregs (XEXP (x, i));
3487 else if (fmt[i] == 'E')
3488 {
3489 register int j;
3490 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3491 scan_paradoxical_subregs (XVECEXP (x, i, j));
3492 }
3493 }
3494}
3495\f
32131a9c
RK
3496static int
3497hard_reg_use_compare (p1, p2)
3498 struct hard_reg_n_uses *p1, *p2;
3499{
3500 int tem = p1->uses - p2->uses;
3501 if (tem != 0) return tem;
3502 /* If regs are equally good, sort by regno,
3503 so that the results of qsort leave nothing to chance. */
3504 return p1->regno - p2->regno;
3505}
3506
3507/* Choose the order to consider regs for use as reload registers
3508 based on how much trouble would be caused by spilling one.
3509 Store them in order of decreasing preference in potential_reload_regs. */
3510
3511static void
3512order_regs_for_reload ()
3513{
3514 register int i;
3515 register int o = 0;
3516 int large = 0;
3517
3518 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3519
3520 CLEAR_HARD_REG_SET (bad_spill_regs);
3521
3522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3523 potential_reload_regs[i] = -1;
3524
3525 /* Count number of uses of each hard reg by pseudo regs allocated to it
3526 and then order them by decreasing use. */
3527
3528 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3529 {
3530 hard_reg_n_uses[i].uses = 0;
3531 hard_reg_n_uses[i].regno = i;
3532 }
3533
3534 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3535 {
3536 int regno = reg_renumber[i];
3537 if (regno >= 0)
3538 {
3539 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3540 while (regno < lim)
3541 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3542 }
3543 large += reg_n_refs[i];
3544 }
3545
3546 /* Now fixed registers (which cannot safely be used for reloading)
3547 get a very high use count so they will be considered least desirable.
3548 Registers used explicitly in the rtl code are almost as bad. */
3549
3550 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3551 {
3552 if (fixed_regs[i])
3553 {
3554 hard_reg_n_uses[i].uses += 2 * large + 2;
3555 SET_HARD_REG_BIT (bad_spill_regs, i);
3556 }
3557 else if (regs_explicitly_used[i])
3558 {
3559 hard_reg_n_uses[i].uses += large + 1;
546b63fb 3560#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
3561 /* ??? We are doing this here because of the potential that
3562 bad code may be generated if a register explicitly used in
3563 an insn was used as a spill register for that insn. But
3564 not using these are spill registers may lose on some machine.
3565 We'll have to see how this works out. */
3566 SET_HARD_REG_BIT (bad_spill_regs, i);
546b63fb 3567#endif
32131a9c
RK
3568 }
3569 }
3ec2ea3e
DE
3570 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3571 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
32131a9c
RK
3572
3573#ifdef ELIMINABLE_REGS
3574 /* If registers other than the frame pointer are eliminable, mark them as
3575 poor choices. */
3576 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3577 {
3578 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3579 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3580 }
3581#endif
3582
3583 /* Prefer registers not so far used, for use in temporary loading.
3584 Among them, if REG_ALLOC_ORDER is defined, use that order.
3585 Otherwise, prefer registers not preserved by calls. */
3586
3587#ifdef REG_ALLOC_ORDER
3588 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3589 {
3590 int regno = reg_alloc_order[i];
3591
3592 if (hard_reg_n_uses[regno].uses == 0)
3593 potential_reload_regs[o++] = regno;
3594 }
3595#else
3596 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3597 {
3598 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3599 potential_reload_regs[o++] = i;
3600 }
3601 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3602 {
3603 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3604 potential_reload_regs[o++] = i;
3605 }
3606#endif
3607
3608 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3609 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3610
3611 /* Now add the regs that are already used,
3612 preferring those used less often. The fixed and otherwise forbidden
3613 registers will be at the end of this list. */
3614
3615 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3616 if (hard_reg_n_uses[i].uses != 0)
3617 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3618}
3619\f
3620/* Reload pseudo-registers into hard regs around each insn as needed.
3621 Additional register load insns are output before the insn that needs it
3622 and perhaps store insns after insns that modify the reloaded pseudo reg.
3623
3624 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3625 which registers are already available in reload registers.
32131a9c
RK
3626 We update these for the reloads that we perform,
3627 as the insns are scanned. */
3628
3629static void
3630reload_as_needed (first, live_known)
3631 rtx first;
3632 int live_known;
3633{
3634 register rtx insn;
3635 register int i;
3636 int this_block = 0;
3637 rtx x;
3638 rtx after_call = 0;
3639
3640 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3641 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3642 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3643 reg_has_output_reload = (char *) alloca (max_regno);
3644 for (i = 0; i < n_spills; i++)
3645 {
3646 reg_reloaded_contents[i] = -1;
3647 reg_reloaded_insn[i] = 0;
3648 }
3649
3650 /* Reset all offsets on eliminable registers to their initial values. */
3651#ifdef ELIMINABLE_REGS
3652 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3653 {
3654 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3655 reg_eliminate[i].initial_offset);
32131a9c
RK
3656 reg_eliminate[i].previous_offset
3657 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3658 }
3659#else
3660 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3661 reg_eliminate[0].previous_offset
3662 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3663#endif
3664
3665 num_not_at_initial_offset = 0;
3666
3667 for (insn = first; insn;)
3668 {
3669 register rtx next = NEXT_INSN (insn);
3670
3671 /* Notice when we move to a new basic block. */
aa2c50d6 3672 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3673 && insn == basic_block_head[this_block+1])
3674 ++this_block;
3675
3676 /* If we pass a label, copy the offsets from the label information
3677 into the current offsets of each elimination. */
3678 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3679 {
3680 num_not_at_initial_offset = 0;
3681 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3682 {
3683 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3684 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3685 if (reg_eliminate[i].can_eliminate
3686 && (reg_eliminate[i].offset
3687 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3688 num_not_at_initial_offset++;
3689 }
3690 }
32131a9c
RK
3691
3692 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3693 {
3694 rtx avoid_return_reg = 0;
3695
3696#ifdef SMALL_REGISTER_CLASSES
3697 /* Set avoid_return_reg if this is an insn
3698 that might use the value of a function call. */
3699 if (GET_CODE (insn) == CALL_INSN)
3700 {
3701 if (GET_CODE (PATTERN (insn)) == SET)
3702 after_call = SET_DEST (PATTERN (insn));
3703 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3704 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3705 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3706 else
3707 after_call = 0;
3708 }
3709 else if (after_call != 0
3710 && !(GET_CODE (PATTERN (insn)) == SET
3711 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3712 {
3713 if (reg_mentioned_p (after_call, PATTERN (insn)))
3714 avoid_return_reg = after_call;
3715 after_call = 0;
3716 }
3717#endif /* SMALL_REGISTER_CLASSES */
3718
2758481d
RS
3719 /* If this is a USE and CLOBBER of a MEM, ensure that any
3720 references to eliminable registers have been removed. */
3721
3722 if ((GET_CODE (PATTERN (insn)) == USE
3723 || GET_CODE (PATTERN (insn)) == CLOBBER)
3724 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3725 XEXP (XEXP (PATTERN (insn), 0), 0)
3726 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3727 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3728
32131a9c
RK
3729 /* If we need to do register elimination processing, do so.
3730 This might delete the insn, in which case we are done. */
3731 if (num_eliminable && GET_MODE (insn) == QImode)
3732 {
3733 eliminate_regs_in_insn (insn, 1);
3734 if (GET_CODE (insn) == NOTE)
3735 {
3736 insn = next;
3737 continue;
3738 }
3739 }
3740
3741 if (GET_MODE (insn) == VOIDmode)
3742 n_reloads = 0;
3743 /* First find the pseudo regs that must be reloaded for this insn.
3744 This info is returned in the tables reload_... (see reload.h).
3745 Also modify the body of INSN by substituting RELOAD
3746 rtx's for those pseudo regs. */
3747 else
3748 {
3749 bzero (reg_has_output_reload, max_regno);
3750 CLEAR_HARD_REG_SET (reg_is_output_reload);
3751
3752 find_reloads (insn, 1, spill_indirect_levels, live_known,
3753 spill_reg_order);
3754 }
3755
3756 if (n_reloads > 0)
3757 {
3c3eeea6
RK
3758 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3759 rtx p;
32131a9c
RK
3760 int class;
3761
3762 /* If this block has not had spilling done for a
546b63fb
RK
3763 particular clas and we have any non-optionals that need a
3764 spill reg in that class, abort. */
32131a9c
RK
3765
3766 for (class = 0; class < N_REG_CLASSES; class++)
3767 if (basic_block_needs[class] != 0
3768 && basic_block_needs[class][this_block] == 0)
3769 for (i = 0; i < n_reloads; i++)
546b63fb
RK
3770 if (class == (int) reload_reg_class[i]
3771 && reload_reg_rtx[i] == 0
3772 && ! reload_optional[i]
3773 && (reload_in[i] != 0 || reload_out[i] != 0
3774 || reload_secondary_p[i] != 0))
3775 abort ();
32131a9c
RK
3776
3777 /* Now compute which reload regs to reload them into. Perhaps
3778 reusing reload regs from previous insns, or else output
3779 load insns to reload them. Maybe output store insns too.
3780 Record the choices of reload reg in reload_reg_rtx. */
3781 choose_reload_regs (insn, avoid_return_reg);
3782
546b63fb
RK
3783#ifdef SMALL_REGISTER_CLASSES
3784 /* Merge any reloads that we didn't combine for fear of
3785 increasing the number of spill registers needed but now
3786 discover can be safely merged. */
3787 merge_assigned_reloads (insn);
3788#endif
3789
32131a9c
RK
3790 /* Generate the insns to reload operands into or out of
3791 their reload regs. */
3792 emit_reload_insns (insn);
3793
3794 /* Substitute the chosen reload regs from reload_reg_rtx
3795 into the insn's body (or perhaps into the bodies of other
3796 load and store insn that we just made for reloading
3797 and that we moved the structure into). */
3798 subst_reloads ();
3c3eeea6
RK
3799
3800 /* If this was an ASM, make sure that all the reload insns
3801 we have generated are valid. If not, give an error
3802 and delete them. */
3803
3804 if (asm_noperands (PATTERN (insn)) >= 0)
3805 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3806 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3807 && (recog_memoized (p) < 0
3808 || (insn_extract (p),
3809 ! constrain_operands (INSN_CODE (p), 1))))
3810 {
3811 error_for_asm (insn,
3812 "`asm' operand requires impossible reload");
3813 PUT_CODE (p, NOTE);
3814 NOTE_SOURCE_FILE (p) = 0;
3815 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3816 }
32131a9c
RK
3817 }
3818 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3819 is no longer validly lying around to save a future reload.
3820 Note that this does not detect pseudos that were reloaded
3821 for this insn in order to be stored in
3822 (obeying register constraints). That is correct; such reload
3823 registers ARE still valid. */
3824 note_stores (PATTERN (insn), forget_old_reloads_1);
3825
3826 /* There may have been CLOBBER insns placed after INSN. So scan
3827 between INSN and NEXT and use them to forget old reloads. */
3828 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3829 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3830 note_stores (PATTERN (x), forget_old_reloads_1);
3831
3832#ifdef AUTO_INC_DEC
3833 /* Likewise for regs altered by auto-increment in this insn.
3834 But note that the reg-notes are not changed by reloading:
3835 they still contain the pseudo-regs, not the spill regs. */
3836 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3837 if (REG_NOTE_KIND (x) == REG_INC)
3838 {
3839 /* See if this pseudo reg was reloaded in this insn.
3840 If so, its last-reload info is still valid
3841 because it is based on this insn's reload. */
3842 for (i = 0; i < n_reloads; i++)
3843 if (reload_out[i] == XEXP (x, 0))
3844 break;
3845
08fb99fa 3846 if (i == n_reloads)
9a881562 3847 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
3848 }
3849#endif
3850 }
3851 /* A reload reg's contents are unknown after a label. */
3852 if (GET_CODE (insn) == CODE_LABEL)
3853 for (i = 0; i < n_spills; i++)
3854 {
3855 reg_reloaded_contents[i] = -1;
3856 reg_reloaded_insn[i] = 0;
3857 }
3858
3859 /* Don't assume a reload reg is still good after a call insn
3860 if it is a call-used reg. */
546b63fb 3861 else if (GET_CODE (insn) == CALL_INSN)
32131a9c
RK
3862 for (i = 0; i < n_spills; i++)
3863 if (call_used_regs[spill_regs[i]])
3864 {
3865 reg_reloaded_contents[i] = -1;
3866 reg_reloaded_insn[i] = 0;
3867 }
3868
3869 /* In case registers overlap, allow certain insns to invalidate
3870 particular hard registers. */
3871
3872#ifdef INSN_CLOBBERS_REGNO_P
3873 for (i = 0 ; i < n_spills ; i++)
3874 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3875 {
3876 reg_reloaded_contents[i] = -1;
3877 reg_reloaded_insn[i] = 0;
3878 }
3879#endif
3880
3881 insn = next;
3882
3883#ifdef USE_C_ALLOCA
3884 alloca (0);
3885#endif
3886 }
3887}
3888
3889/* Discard all record of any value reloaded from X,
3890 or reloaded in X from someplace else;
3891 unless X is an output reload reg of the current insn.
3892
3893 X may be a hard reg (the reload reg)
3894 or it may be a pseudo reg that was reloaded from. */
3895
3896static void
9a881562 3897forget_old_reloads_1 (x, ignored)
32131a9c 3898 rtx x;
9a881562 3899 rtx ignored;
32131a9c
RK
3900{
3901 register int regno;
3902 int nr;
0a2e51a9
RS
3903 int offset = 0;
3904
3905 /* note_stores does give us subregs of hard regs. */
3906 while (GET_CODE (x) == SUBREG)
3907 {
3908 offset += SUBREG_WORD (x);
3909 x = SUBREG_REG (x);
3910 }
32131a9c
RK
3911
3912 if (GET_CODE (x) != REG)
3913 return;
3914
0a2e51a9 3915 regno = REGNO (x) + offset;
32131a9c
RK
3916
3917 if (regno >= FIRST_PSEUDO_REGISTER)
3918 nr = 1;
3919 else
3920 {
3921 int i;
3922 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3923 /* Storing into a spilled-reg invalidates its contents.
3924 This can happen if a block-local pseudo is allocated to that reg
3925 and it wasn't spilled because this block's total need is 0.
3926 Then some insn might have an optional reload and use this reg. */
3927 for (i = 0; i < nr; i++)
3928 if (spill_reg_order[regno + i] >= 0
3929 /* But don't do this if the reg actually serves as an output
3930 reload reg in the current instruction. */
3931 && (n_reloads == 0
3932 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3933 {
3934 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3935 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3936 }
3937 }
3938
3939 /* Since value of X has changed,
3940 forget any value previously copied from it. */
3941
3942 while (nr-- > 0)
3943 /* But don't forget a copy if this is the output reload
3944 that establishes the copy's validity. */
3945 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3946 reg_last_reload_reg[regno + nr] = 0;
3947}
3948\f
3949/* For each reload, the mode of the reload register. */
3950static enum machine_mode reload_mode[MAX_RELOADS];
3951
3952/* For each reload, the largest number of registers it will require. */
3953static int reload_nregs[MAX_RELOADS];
3954
3955/* Comparison function for qsort to decide which of two reloads
3956 should be handled first. *P1 and *P2 are the reload numbers. */
3957
3958static int
3959reload_reg_class_lower (p1, p2)
3960 short *p1, *p2;
3961{
3962 register int r1 = *p1, r2 = *p2;
3963 register int t;
a8fdc208 3964
32131a9c
RK
3965 /* Consider required reloads before optional ones. */
3966 t = reload_optional[r1] - reload_optional[r2];
3967 if (t != 0)
3968 return t;
3969
3970 /* Count all solitary classes before non-solitary ones. */
3971 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3972 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3973 if (t != 0)
3974 return t;
3975
3976 /* Aside from solitaires, consider all multi-reg groups first. */
3977 t = reload_nregs[r2] - reload_nregs[r1];
3978 if (t != 0)
3979 return t;
3980
3981 /* Consider reloads in order of increasing reg-class number. */
3982 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3983 if (t != 0)
3984 return t;
3985
3986 /* If reloads are equally urgent, sort by reload number,
3987 so that the results of qsort leave nothing to chance. */
3988 return r1 - r2;
3989}
3990\f
3991/* The following HARD_REG_SETs indicate when each hard register is
3992 used for a reload of various parts of the current insn. */
3993
3994/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3995static HARD_REG_SET reload_reg_used;
546b63fb
RK
3996/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3997static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3998/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3999static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4000/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4001static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4002/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4003static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
4004/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4005static HARD_REG_SET reload_reg_used_in_op_addr;
546b63fb
RK
4006/* If reg is in use for a RELOAD_FOR_INSN reload. */
4007static HARD_REG_SET reload_reg_used_in_insn;
4008/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4009static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
4010
4011/* If reg is in use as a reload reg for any sort of reload. */
4012static HARD_REG_SET reload_reg_used_at_all;
4013
be7ae2a4
RK
4014/* If reg is use as an inherited reload. We just mark the first register
4015 in the group. */
4016static HARD_REG_SET reload_reg_used_for_inherit;
4017
546b63fb
RK
4018/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4019 TYPE. MODE is used to indicate how many consecutive regs are
4020 actually used. */
32131a9c
RK
4021
4022static void
546b63fb 4023mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 4024 int regno;
546b63fb
RK
4025 int opnum;
4026 enum reload_type type;
32131a9c
RK
4027 enum machine_mode mode;
4028{
4029 int nregs = HARD_REGNO_NREGS (regno, mode);
4030 int i;
4031
4032 for (i = regno; i < nregs + regno; i++)
4033 {
546b63fb 4034 switch (type)
32131a9c
RK
4035 {
4036 case RELOAD_OTHER:
4037 SET_HARD_REG_BIT (reload_reg_used, i);
4038 break;
4039
546b63fb
RK
4040 case RELOAD_FOR_INPUT_ADDRESS:
4041 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4042 break;
4043
546b63fb
RK
4044 case RELOAD_FOR_OUTPUT_ADDRESS:
4045 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4046 break;
4047
4048 case RELOAD_FOR_OPERAND_ADDRESS:
4049 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4050 break;
4051
546b63fb
RK
4052 case RELOAD_FOR_OTHER_ADDRESS:
4053 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4054 break;
4055
32131a9c 4056 case RELOAD_FOR_INPUT:
546b63fb 4057 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4058 break;
4059
4060 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4061 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4062 break;
4063
4064 case RELOAD_FOR_INSN:
4065 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4066 break;
4067 }
4068
4069 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4070 }
4071}
4072
be7ae2a4
RK
4073/* Similarly, but show REGNO is no longer in use for a reload. */
4074
4075static void
4076clear_reload_reg_in_use (regno, opnum, type, mode)
4077 int regno;
4078 int opnum;
4079 enum reload_type type;
4080 enum machine_mode mode;
4081{
4082 int nregs = HARD_REGNO_NREGS (regno, mode);
4083 int i;
4084
4085 for (i = regno; i < nregs + regno; i++)
4086 {
4087 switch (type)
4088 {
4089 case RELOAD_OTHER:
4090 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4091 break;
4092
4093 case RELOAD_FOR_INPUT_ADDRESS:
4094 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4095 break;
4096
4097 case RELOAD_FOR_OUTPUT_ADDRESS:
4098 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4099 break;
4100
4101 case RELOAD_FOR_OPERAND_ADDRESS:
4102 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4103 break;
4104
4105 case RELOAD_FOR_OTHER_ADDRESS:
4106 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4107 break;
4108
4109 case RELOAD_FOR_INPUT:
4110 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4111 break;
4112
4113 case RELOAD_FOR_OUTPUT:
4114 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4115 break;
4116
4117 case RELOAD_FOR_INSN:
4118 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4119 break;
4120 }
4121 }
4122}
4123
32131a9c 4124/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4125 specified by OPNUM and TYPE. */
32131a9c
RK
4126
4127static int
546b63fb 4128reload_reg_free_p (regno, opnum, type)
32131a9c 4129 int regno;
546b63fb
RK
4130 int opnum;
4131 enum reload_type type;
32131a9c 4132{
546b63fb
RK
4133 int i;
4134
4135 /* In use for a RELOAD_OTHER means it's not available for anything except
4136 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4137 to be used only for inputs. */
4138
4139 if (type != RELOAD_FOR_OTHER_ADDRESS
4140 && TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4141 return 0;
546b63fb
RK
4142
4143 switch (type)
32131a9c
RK
4144 {
4145 case RELOAD_OTHER:
4146 /* In use for anything means not available for a RELOAD_OTHER. */
4147 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4148
4149 /* The other kinds of use can sometimes share a register. */
4150 case RELOAD_FOR_INPUT:
546b63fb
RK
4151 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4152 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4153 return 0;
4154
4155 /* If it is used for some other input, can't use it. */
4156 for (i = 0; i < reload_n_operands; i++)
4157 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4158 return 0;
4159
4160 /* If it is used in a later operand's address, can't use it. */
4161 for (i = opnum + 1; i < reload_n_operands; i++)
4162 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4163 return 0;
4164
4165 return 1;
4166
4167 case RELOAD_FOR_INPUT_ADDRESS:
4168 /* Can't use a register if it is used for an input address for this
4169 operand or used as an input in an earlier one. */
4170 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4171 return 0;
4172
4173 for (i = 0; i < opnum; i++)
4174 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4175 return 0;
4176
4177 return 1;
4178
4179 case RELOAD_FOR_OUTPUT_ADDRESS:
4180 /* Can't use a register if it is used for an output address for this
4181 operand or used as an output in this or a later operand. */
4182 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4183 return 0;
4184
4185 for (i = opnum; i < reload_n_operands; i++)
4186 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4187 return 0;
4188
4189 return 1;
4190
32131a9c 4191 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4192 for (i = 0; i < reload_n_operands; i++)
4193 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4194 return 0;
4195
4196 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4197 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4198
32131a9c 4199 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4200 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4201 outputs, or an operand address for this or an earlier output. */
4202 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4203 return 0;
4204
4205 for (i = 0; i < reload_n_operands; i++)
4206 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4207 return 0;
4208
4209 for (i = 0; i <= opnum; i++)
4210 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4211 return 0;
4212
4213 return 1;
4214
4215 case RELOAD_FOR_INSN:
4216 for (i = 0; i < reload_n_operands; i++)
4217 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4218 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4219 return 0;
4220
4221 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4222 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4223
4224 case RELOAD_FOR_OTHER_ADDRESS:
4225 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4226 }
4227 abort ();
4228}
4229
4230/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4231 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4232 is not in use for a reload in any prior part of the insn.
4233
4234 We can assume that the reload reg was already tested for availability
4235 at the time it is needed, and we should not check this again,
4236 in case the reg has already been marked in use. */
4237
4238static int
546b63fb 4239reload_reg_free_before_p (regno, opnum, type)
32131a9c 4240 int regno;
546b63fb
RK
4241 int opnum;
4242 enum reload_type type;
32131a9c 4243{
546b63fb
RK
4244 int i;
4245
4246 switch (type)
32131a9c 4247 {
546b63fb
RK
4248 case RELOAD_FOR_OTHER_ADDRESS:
4249 /* These always come first. */
32131a9c
RK
4250 return 1;
4251
546b63fb
RK
4252 case RELOAD_OTHER:
4253 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4254
32131a9c 4255 /* If this use is for part of the insn,
546b63fb
RK
4256 check the reg is not in use for any prior part. It is tempting
4257 to try to do this by falling through from objecs that occur
4258 later in the insn to ones that occur earlier, but that will not
4259 correctly take into account the fact that here we MUST ignore
4260 things that would prevent the register from being allocated in
4261 the first place, since we know that it was allocated. */
4262
4263 case RELOAD_FOR_OUTPUT_ADDRESS:
4264 /* Earlier reloads are for earlier outputs or their addresses,
4265 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4266 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4267 RELOAD_OTHER).. */
4268 for (i = 0; i < opnum; i++)
4269 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4270 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4271 return 0;
4272
4273 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4274 return 0;
546b63fb
RK
4275
4276 for (i = 0; i < reload_n_operands; i++)
4277 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4278 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4279 return 0;
4280
4281 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4282 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4283 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4284
32131a9c 4285 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4286 /* This can't be used in the output address for this operand and
4287 anything that can't be used for it, except that we've already
4288 tested for RELOAD_FOR_INSN objects. */
4289
4290 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
32131a9c 4291 return 0;
546b63fb
RK
4292
4293 for (i = 0; i < opnum; i++)
4294 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4295 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4296 return 0;
4297
4298 for (i = 0; i < reload_n_operands; i++)
4299 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4300 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4301 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4302 return 0;
4303
4304 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4305
32131a9c 4306 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4307 case RELOAD_FOR_INSN:
4308 /* These can't conflict with inputs, or each other, so all we have to
4309 test is input addresses and the addresses of OTHER items. */
4310
4311 for (i = 0; i < reload_n_operands; i++)
4312 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4313 return 0;
4314
4315 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4316
32131a9c 4317 case RELOAD_FOR_INPUT:
546b63fb
RK
4318 /* The only things earlier are the address for this and
4319 earlier inputs, other inputs (which we know we don't conflict
4320 with), and addresses of RELOAD_OTHER objects. */
4321
4322 for (i = 0; i <= opnum; i++)
4323 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4324 return 0;
4325
4326 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4327
4328 case RELOAD_FOR_INPUT_ADDRESS:
4329 /* Similarly, all we have to check is for use in earlier inputs'
4330 addresses. */
4331 for (i = 0; i < opnum; i++)
4332 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4333 return 0;
4334
4335 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4336 }
4337 abort ();
4338}
4339
4340/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4341 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4342 is still available in REGNO at the end of the insn.
4343
4344 We can assume that the reload reg was already tested for availability
4345 at the time it is needed, and we should not check this again,
4346 in case the reg has already been marked in use. */
4347
4348static int
546b63fb 4349reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4350 int regno;
546b63fb
RK
4351 int opnum;
4352 enum reload_type type;
32131a9c 4353{
546b63fb
RK
4354 int i;
4355
4356 switch (type)
32131a9c
RK
4357 {
4358 case RELOAD_OTHER:
4359 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4360 its value must reach the end. */
4361 return 1;
4362
4363 /* If this use is for part of the insn,
546b63fb
RK
4364 its value reaches if no subsequent part uses the same register.
4365 Just like the above function, don't try to do this with lots
4366 of fallthroughs. */
4367
4368 case RELOAD_FOR_OTHER_ADDRESS:
4369 /* Here we check for everything else, since these don't conflict
4370 with anything else and everything comes later. */
4371
4372 for (i = 0; i < reload_n_operands; i++)
4373 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4374 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4375 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4376 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4377 return 0;
4378
4379 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4380 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4381 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4382
4383 case RELOAD_FOR_INPUT_ADDRESS:
4384 /* Similar, except that we check only for this and subsequent inputs
4385 and the address of only subsequent inputs and we do not need
4386 to check for RELOAD_OTHER objects since they are known not to
4387 conflict. */
4388
4389 for (i = opnum; i < reload_n_operands; i++)
4390 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4391 return 0;
4392
4393 for (i = opnum + 1; i < reload_n_operands; i++)
4394 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4395 return 0;
4396
4397 for (i = 0; i < reload_n_operands; i++)
4398 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4399 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4400 return 0;
4401
4402 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4403 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4404
32131a9c 4405 case RELOAD_FOR_INPUT:
546b63fb
RK
4406 /* Similar to input address, except we start at the next operand for
4407 both input and input address and we do not check for
4408 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4409 would conflict. */
4410
4411 for (i = opnum + 1; i < reload_n_operands; i++)
4412 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4413 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4414 return 0;
4415
4416 /* ... fall through ... */
4417
32131a9c 4418 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4419 /* Check outputs and their addresses. */
4420
4421 for (i = 0; i < reload_n_operands; i++)
4422 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4423 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4424 return 0;
4425
4426 return 1;
4427
4428 case RELOAD_FOR_INSN:
4429 /* These conflict with other outputs with with RELOAD_OTHER. So
4430 we need only check for output addresses. */
4431
4432 opnum = -1;
4433
4434 /* ... fall through ... */
4435
32131a9c 4436 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4437 case RELOAD_FOR_OUTPUT_ADDRESS:
4438 /* We already know these can't conflict with a later output. So the
4439 only thing to check are later output addresses. */
4440 for (i = opnum + 1; i < reload_n_operands; i++)
4441 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4442 return 0;
4443
32131a9c
RK
4444 return 1;
4445 }
546b63fb 4446
32131a9c
RK
4447 abort ();
4448}
4449\f
4450/* Vector of reload-numbers showing the order in which the reloads should
4451 be processed. */
4452short reload_order[MAX_RELOADS];
4453
4454/* Indexed by reload number, 1 if incoming value
4455 inherited from previous insns. */
4456char reload_inherited[MAX_RELOADS];
4457
4458/* For an inherited reload, this is the insn the reload was inherited from,
4459 if we know it. Otherwise, this is 0. */
4460rtx reload_inheritance_insn[MAX_RELOADS];
4461
4462/* If non-zero, this is a place to get the value of the reload,
4463 rather than using reload_in. */
4464rtx reload_override_in[MAX_RELOADS];
4465
4466/* For each reload, the index in spill_regs of the spill register used,
4467 or -1 if we did not need one of the spill registers for this reload. */
4468int reload_spill_index[MAX_RELOADS];
4469
4470/* Index of last register assigned as a spill register. We allocate in
4471 a round-robin fashio. */
4472
1d2310f3 4473static int last_spill_reg = 0;
32131a9c
RK
4474
4475/* Find a spill register to use as a reload register for reload R.
4476 LAST_RELOAD is non-zero if this is the last reload for the insn being
4477 processed.
4478
4479 Set reload_reg_rtx[R] to the register allocated.
4480
4481 If NOERROR is nonzero, we return 1 if successful,
4482 or 0 if we couldn't find a spill reg and we didn't change anything. */
4483
4484static int
4485allocate_reload_reg (r, insn, last_reload, noerror)
4486 int r;
4487 rtx insn;
4488 int last_reload;
4489 int noerror;
4490{
4491 int i;
4492 int pass;
4493 int count;
4494 rtx new;
4495 int regno;
4496
4497 /* If we put this reload ahead, thinking it is a group,
4498 then insist on finding a group. Otherwise we can grab a
a8fdc208 4499 reg that some other reload needs.
32131a9c
RK
4500 (That can happen when we have a 68000 DATA_OR_FP_REG
4501 which is a group of data regs or one fp reg.)
4502 We need not be so restrictive if there are no more reloads
4503 for this insn.
4504
4505 ??? Really it would be nicer to have smarter handling
4506 for that kind of reg class, where a problem like this is normal.
4507 Perhaps those classes should be avoided for reloading
4508 by use of more alternatives. */
4509
4510 int force_group = reload_nregs[r] > 1 && ! last_reload;
4511
4512 /* If we want a single register and haven't yet found one,
4513 take any reg in the right class and not in use.
4514 If we want a consecutive group, here is where we look for it.
4515
4516 We use two passes so we can first look for reload regs to
4517 reuse, which are already in use for other reloads in this insn,
4518 and only then use additional registers.
4519 I think that maximizing reuse is needed to make sure we don't
4520 run out of reload regs. Suppose we have three reloads, and
4521 reloads A and B can share regs. These need two regs.
4522 Suppose A and B are given different regs.
4523 That leaves none for C. */
4524 for (pass = 0; pass < 2; pass++)
4525 {
4526 /* I is the index in spill_regs.
4527 We advance it round-robin between insns to use all spill regs
4528 equally, so that inherited reloads have a chance
4529 of leapfrogging each other. */
4530
4531 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4532 {
4533 int class = (int) reload_reg_class[r];
4534
4535 i = (i + 1) % n_spills;
4536
546b63fb
RK
4537 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4538 reload_when_needed[r])
32131a9c
RK
4539 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4540 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
4541 /* Look first for regs to share, then for unshared. But
4542 don't share regs used for inherited reloads; they are
4543 the ones we want to preserve. */
4544 && (pass
4545 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4546 spill_regs[i])
4547 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4548 spill_regs[i]))))
32131a9c
RK
4549 {
4550 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4551 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4552 (on 68000) got us two FP regs. If NR is 1,
4553 we would reject both of them. */
4554 if (force_group)
4555 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4556 /* If we need only one reg, we have already won. */
4557 if (nr == 1)
4558 {
4559 /* But reject a single reg if we demand a group. */
4560 if (force_group)
4561 continue;
4562 break;
4563 }
4564 /* Otherwise check that as many consecutive regs as we need
4565 are available here.
4566 Also, don't use for a group registers that are
4567 needed for nongroups. */
4568 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4569 while (nr > 1)
4570 {
4571 regno = spill_regs[i] + nr - 1;
4572 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4573 && spill_reg_order[regno] >= 0
546b63fb
RK
4574 && reload_reg_free_p (regno, reload_opnum[r],
4575 reload_when_needed[r])
32131a9c
RK
4576 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4577 regno)))
4578 break;
4579 nr--;
4580 }
4581 if (nr == 1)
4582 break;
4583 }
4584 }
4585
4586 /* If we found something on pass 1, omit pass 2. */
4587 if (count < n_spills)
4588 break;
4589 }
4590
4591 /* We should have found a spill register by now. */
4592 if (count == n_spills)
4593 {
4594 if (noerror)
4595 return 0;
139fc12e 4596 goto failure;
32131a9c
RK
4597 }
4598
be7ae2a4
RK
4599 /* I is the index in SPILL_REG_RTX of the reload register we are to
4600 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
4601
4602 new = spill_reg_rtx[i];
4603
4604 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4
RK
4605 spill_reg_rtx[i] = new
4606 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4607
32131a9c
RK
4608 regno = true_regnum (new);
4609
4610 /* Detect when the reload reg can't hold the reload mode.
4611 This used to be one `if', but Sequent compiler can't handle that. */
4612 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4613 {
4614 enum machine_mode test_mode = VOIDmode;
4615 if (reload_in[r])
4616 test_mode = GET_MODE (reload_in[r]);
4617 /* If reload_in[r] has VOIDmode, it means we will load it
4618 in whatever mode the reload reg has: to wit, reload_mode[r].
4619 We have already tested that for validity. */
4620 /* Aside from that, we need to test that the expressions
4621 to reload from or into have modes which are valid for this
4622 reload register. Otherwise the reload insns would be invalid. */
4623 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4624 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4625 if (! (reload_out[r] != 0
4626 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
4627 {
4628 /* The reg is OK. */
4629 last_spill_reg = i;
4630
4631 /* Mark as in use for this insn the reload regs we use
4632 for this. */
4633 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4634 reload_when_needed[r], reload_mode[r]);
4635
4636 reload_reg_rtx[r] = new;
4637 reload_spill_index[r] = i;
4638 return 1;
4639 }
32131a9c
RK
4640 }
4641
4642 /* The reg is not OK. */
4643 if (noerror)
4644 return 0;
4645
139fc12e 4646 failure:
32131a9c
RK
4647 if (asm_noperands (PATTERN (insn)) < 0)
4648 /* It's the compiler's fault. */
4649 abort ();
4650
4651 /* It's the user's fault; the operand's mode and constraint
4652 don't match. Disable this reload so we don't crash in final. */
4653 error_for_asm (insn,
4654 "`asm' operand constraint incompatible with operand size");
4655 reload_in[r] = 0;
4656 reload_out[r] = 0;
4657 reload_reg_rtx[r] = 0;
4658 reload_optional[r] = 1;
4659 reload_secondary_p[r] = 1;
4660
4661 return 1;
4662}
4663\f
4664/* Assign hard reg targets for the pseudo-registers we must reload
4665 into hard regs for this insn.
4666 Also output the instructions to copy them in and out of the hard regs.
4667
4668 For machines with register classes, we are responsible for
4669 finding a reload reg in the proper class. */
4670
4671static void
4672choose_reload_regs (insn, avoid_return_reg)
4673 rtx insn;
32131a9c
RK
4674 rtx avoid_return_reg;
4675{
4676 register int i, j;
4677 int max_group_size = 1;
4678 enum reg_class group_class = NO_REGS;
4679 int inheritance;
4680
4681 rtx save_reload_reg_rtx[MAX_RELOADS];
4682 char save_reload_inherited[MAX_RELOADS];
4683 rtx save_reload_inheritance_insn[MAX_RELOADS];
4684 rtx save_reload_override_in[MAX_RELOADS];
4685 int save_reload_spill_index[MAX_RELOADS];
4686 HARD_REG_SET save_reload_reg_used;
546b63fb
RK
4687 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4688 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4689 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4690 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 4691 HARD_REG_SET save_reload_reg_used_in_op_addr;
546b63fb
RK
4692 HARD_REG_SET save_reload_reg_used_in_insn;
4693 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
4694 HARD_REG_SET save_reload_reg_used_at_all;
4695
4696 bzero (reload_inherited, MAX_RELOADS);
4697 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4698 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4699
4700 CLEAR_HARD_REG_SET (reload_reg_used);
4701 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 4702 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
546b63fb
RK
4703 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4704 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 4705
546b63fb
RK
4706 for (i = 0; i < reload_n_operands; i++)
4707 {
4708 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4709 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4710 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4711 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4712 }
32131a9c
RK
4713
4714#ifdef SMALL_REGISTER_CLASSES
4715 /* Don't bother with avoiding the return reg
4716 if we have no mandatory reload that could use it. */
4717 if (avoid_return_reg)
4718 {
4719 int do_avoid = 0;
4720 int regno = REGNO (avoid_return_reg);
4721 int nregs
4722 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4723 int r;
4724
4725 for (r = regno; r < regno + nregs; r++)
4726 if (spill_reg_order[r] >= 0)
4727 for (j = 0; j < n_reloads; j++)
4728 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4729 && (reload_in[j] != 0 || reload_out[j] != 0
4730 || reload_secondary_p[j])
4731 &&
4732 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4733 do_avoid = 1;
4734 if (!do_avoid)
4735 avoid_return_reg = 0;
4736 }
4737#endif /* SMALL_REGISTER_CLASSES */
4738
4739#if 0 /* Not needed, now that we can always retry without inheritance. */
4740 /* See if we have more mandatory reloads than spill regs.
4741 If so, then we cannot risk optimizations that could prevent
a8fdc208 4742 reloads from sharing one spill register.
32131a9c
RK
4743
4744 Since we will try finding a better register than reload_reg_rtx
4745 unless it is equal to reload_in or reload_out, count such reloads. */
4746
4747 {
4748 int tem = 0;
4749#ifdef SMALL_REGISTER_CLASSES
4750 int tem = (avoid_return_reg != 0);
a8fdc208 4751#endif
32131a9c
RK
4752 for (j = 0; j < n_reloads; j++)
4753 if (! reload_optional[j]
4754 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4755 && (reload_reg_rtx[j] == 0
4756 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4757 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4758 tem++;
4759 if (tem > n_spills)
4760 must_reuse = 1;
4761 }
4762#endif
4763
4764#ifdef SMALL_REGISTER_CLASSES
4765 /* Don't use the subroutine call return reg for a reload
4766 if we are supposed to avoid it. */
4767 if (avoid_return_reg)
4768 {
4769 int regno = REGNO (avoid_return_reg);
4770 int nregs
4771 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4772 int r;
4773
4774 for (r = regno; r < regno + nregs; r++)
4775 if (spill_reg_order[r] >= 0)
4776 SET_HARD_REG_BIT (reload_reg_used, r);
4777 }
4778#endif /* SMALL_REGISTER_CLASSES */
4779
4780 /* In order to be certain of getting the registers we need,
4781 we must sort the reloads into order of increasing register class.
4782 Then our grabbing of reload registers will parallel the process
a8fdc208 4783 that provided the reload registers.
32131a9c
RK
4784
4785 Also note whether any of the reloads wants a consecutive group of regs.
4786 If so, record the maximum size of the group desired and what
4787 register class contains all the groups needed by this insn. */
4788
4789 for (j = 0; j < n_reloads; j++)
4790 {
4791 reload_order[j] = j;
4792 reload_spill_index[j] = -1;
4793
4794 reload_mode[j]
546b63fb
RK
4795 = (reload_inmode[j] == VOIDmode
4796 || (GET_MODE_SIZE (reload_outmode[j])
4797 > GET_MODE_SIZE (reload_inmode[j])))
4798 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
4799
4800 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4801
4802 if (reload_nregs[j] > 1)
4803 {
4804 max_group_size = MAX (reload_nregs[j], max_group_size);
4805 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4806 }
4807
4808 /* If we have already decided to use a certain register,
4809 don't use it in another way. */
4810 if (reload_reg_rtx[j])
546b63fb 4811 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
4812 reload_when_needed[j], reload_mode[j]);
4813 }
4814
4815 if (n_reloads > 1)
4816 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4817
4818 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4819 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4820 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4821 sizeof reload_inheritance_insn);
4822 bcopy (reload_override_in, save_reload_override_in,
4823 sizeof reload_override_in);
4824 bcopy (reload_spill_index, save_reload_spill_index,
4825 sizeof reload_spill_index);
4826 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4827 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
4828 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4829 reload_reg_used_in_op_addr);
546b63fb
RK
4830 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4831 reload_reg_used_in_insn);
4832 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4833 reload_reg_used_in_other_addr);
4834
4835 for (i = 0; i < reload_n_operands; i++)
4836 {
4837 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4838 reload_reg_used_in_output[i]);
4839 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4840 reload_reg_used_in_input[i]);
4841 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4842 reload_reg_used_in_input_addr[i]);
4843 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4844 reload_reg_used_in_output_addr[i]);
4845 }
32131a9c 4846
58b1581b
RS
4847 /* If -O, try first with inheritance, then turning it off.
4848 If not -O, don't do inheritance.
4849 Using inheritance when not optimizing leads to paradoxes
4850 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4851 because one side of the comparison might be inherited. */
32131a9c 4852
58b1581b 4853 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
4854 {
4855 /* Process the reloads in order of preference just found.
4856 Beyond this point, subregs can be found in reload_reg_rtx.
4857
4858 This used to look for an existing reloaded home for all
4859 of the reloads, and only then perform any new reloads.
4860 But that could lose if the reloads were done out of reg-class order
4861 because a later reload with a looser constraint might have an old
4862 home in a register needed by an earlier reload with a tighter constraint.
4863
4864 To solve this, we make two passes over the reloads, in the order
4865 described above. In the first pass we try to inherit a reload
4866 from a previous insn. If there is a later reload that needs a
4867 class that is a proper subset of the class being processed, we must
4868 also allocate a spill register during the first pass.
4869
4870 Then make a second pass over the reloads to allocate any reloads
4871 that haven't been given registers yet. */
4872
be7ae2a4
RK
4873 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4874
32131a9c
RK
4875 for (j = 0; j < n_reloads; j++)
4876 {
4877 register int r = reload_order[j];
4878
4879 /* Ignore reloads that got marked inoperative. */
4880 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4881 continue;
4882
4883 /* If find_reloads chose a to use reload_in or reload_out as a reload
4884 register, we don't need to chose one. Otherwise, try even if it found
4885 one since we might save an insn if we find the value lying around. */
4886 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4887 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4888 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4889 continue;
4890
4891#if 0 /* No longer needed for correct operation.
4892 It might give better code, or might not; worth an experiment? */
4893 /* If this is an optional reload, we can't inherit from earlier insns
4894 until we are sure that any non-optional reloads have been allocated.
4895 The following code takes advantage of the fact that optional reloads
4896 are at the end of reload_order. */
4897 if (reload_optional[r] != 0)
4898 for (i = 0; i < j; i++)
4899 if ((reload_out[reload_order[i]] != 0
4900 || reload_in[reload_order[i]] != 0
4901 || reload_secondary_p[reload_order[i]])
4902 && ! reload_optional[reload_order[i]]
4903 && reload_reg_rtx[reload_order[i]] == 0)
4904 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4905#endif
4906
4907 /* First see if this pseudo is already available as reloaded
4908 for a previous insn. We cannot try to inherit for reloads
4909 that are smaller than the maximum number of registers needed
4910 for groups unless the register we would allocate cannot be used
4911 for the groups.
4912
4913 We could check here to see if this is a secondary reload for
4914 an object that is already in a register of the desired class.
4915 This would avoid the need for the secondary reload register.
4916 But this is complex because we can't easily determine what
4917 objects might want to be loaded via this reload. So let a register
4918 be allocated here. In `emit_reload_insns' we suppress one of the
4919 loads in the case described above. */
4920
4921 if (inheritance)
4922 {
4923 register int regno = -1;
db660765 4924 enum machine_mode mode;
32131a9c
RK
4925
4926 if (reload_in[r] == 0)
4927 ;
4928 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
4929 {
4930 regno = REGNO (reload_in[r]);
4931 mode = GET_MODE (reload_in[r]);
4932 }
32131a9c 4933 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
4934 {
4935 regno = REGNO (reload_in_reg[r]);
4936 mode = GET_MODE (reload_in_reg[r]);
4937 }
32131a9c
RK
4938#if 0
4939 /* This won't work, since REGNO can be a pseudo reg number.
4940 Also, it takes much more hair to keep track of all the things
4941 that can invalidate an inherited reload of part of a pseudoreg. */
4942 else if (GET_CODE (reload_in[r]) == SUBREG
4943 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4944 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4945#endif
4946
4947 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4948 {
4949 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4950
4951 if (reg_reloaded_contents[i] == regno
db660765
TW
4952 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4953 >= GET_MODE_SIZE (mode))
32131a9c
RK
4954 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4955 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4956 spill_regs[i])
4957 && (reload_nregs[r] == max_group_size
4958 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4959 spill_regs[i]))
546b63fb
RK
4960 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
4961 reload_when_needed[r])
32131a9c 4962 && reload_reg_free_before_p (spill_regs[i],
546b63fb 4963 reload_opnum[r],
32131a9c
RK
4964 reload_when_needed[r]))
4965 {
4966 /* If a group is needed, verify that all the subsequent
4967 registers still have their values intact. */
4968 int nr
4969 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4970 int k;
4971
4972 for (k = 1; k < nr; k++)
4973 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4974 != regno)
4975 break;
4976
4977 if (k == nr)
4978 {
c74fa651
RS
4979 int i1;
4980
4981 /* We found a register that contains the
4982 value we need. If this register is the
4983 same as an `earlyclobber' operand of the
4984 current insn, just mark it as a place to
4985 reload from since we can't use it as the
4986 reload register itself. */
4987
4988 for (i1 = 0; i1 < n_earlyclobbers; i1++)
4989 if (reg_overlap_mentioned_for_reload_p
4990 (reg_last_reload_reg[regno],
4991 reload_earlyclobbers[i1]))
4992 break;
4993
8908158d
RS
4994 if (i1 != n_earlyclobbers
4995 /* Don't really use the inherited spill reg
4996 if we need it wider than we've got it. */
4997 || (GET_MODE_SIZE (reload_mode[r])
4998 > GET_MODE_SIZE (mode)))
c74fa651
RS
4999 reload_override_in[r] = reg_last_reload_reg[regno];
5000 else
5001 {
54c40e68 5002 int k;
c74fa651
RS
5003 /* We can use this as a reload reg. */
5004 /* Mark the register as in use for this part of
5005 the insn. */
5006 mark_reload_reg_in_use (spill_regs[i],
5007 reload_opnum[r],
5008 reload_when_needed[r],
5009 reload_mode[r]);
5010 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5011 reload_inherited[r] = 1;
5012 reload_inheritance_insn[r]
5013 = reg_reloaded_insn[i];
5014 reload_spill_index[r] = i;
54c40e68
RS
5015 for (k = 0; k < nr; k++)
5016 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5017 spill_regs[i + k]);
c74fa651 5018 }
32131a9c
RK
5019 }
5020 }
5021 }
5022 }
5023
5024 /* Here's another way to see if the value is already lying around. */
5025 if (inheritance
5026 && reload_in[r] != 0
5027 && ! reload_inherited[r]
5028 && reload_out[r] == 0
5029 && (CONSTANT_P (reload_in[r])
5030 || GET_CODE (reload_in[r]) == PLUS
5031 || GET_CODE (reload_in[r]) == REG
5032 || GET_CODE (reload_in[r]) == MEM)
5033 && (reload_nregs[r] == max_group_size
5034 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5035 {
5036 register rtx equiv
5037 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 5038 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
5039 int regno;
5040
5041 if (equiv != 0)
5042 {
5043 if (GET_CODE (equiv) == REG)
5044 regno = REGNO (equiv);
5045 else if (GET_CODE (equiv) == SUBREG)
5046 {
5047 regno = REGNO (SUBREG_REG (equiv));
5048 if (regno < FIRST_PSEUDO_REGISTER)
5049 regno += SUBREG_WORD (equiv);
5050 }
5051 else
5052 abort ();
5053 }
5054
5055 /* If we found a spill reg, reject it unless it is free
5056 and of the desired class. */
5057 if (equiv != 0
5058 && ((spill_reg_order[regno] >= 0
546b63fb 5059 && ! reload_reg_free_before_p (regno, reload_opnum[r],
32131a9c
RK
5060 reload_when_needed[r]))
5061 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5062 regno)))
5063 equiv = 0;
5064
5065 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5066 equiv = 0;
5067
5068 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5069 equiv = 0;
5070
5071 /* We found a register that contains the value we need.
5072 If this register is the same as an `earlyclobber' operand
5073 of the current insn, just mark it as a place to reload from
5074 since we can't use it as the reload register itself. */
5075
5076 if (equiv != 0)
5077 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5078 if (reg_overlap_mentioned_for_reload_p (equiv,
5079 reload_earlyclobbers[i]))
32131a9c
RK
5080 {
5081 reload_override_in[r] = equiv;
5082 equiv = 0;
5083 break;
5084 }
5085
5086 /* JRV: If the equiv register we have found is explicitly
5087 clobbered in the current insn, mark but don't use, as above. */
5088
5089 if (equiv != 0 && regno_clobbered_p (regno, insn))
5090 {
5091 reload_override_in[r] = equiv;
5092 equiv = 0;
5093 }
5094
5095 /* If we found an equivalent reg, say no code need be generated
5096 to load it, and use it as our reload reg. */
3ec2ea3e 5097 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
5098 {
5099 reload_reg_rtx[r] = equiv;
5100 reload_inherited[r] = 1;
5101 /* If it is a spill reg,
5102 mark the spill reg as in use for this insn. */
5103 i = spill_reg_order[regno];
5104 if (i >= 0)
be7ae2a4 5105 {
54c40e68
RS
5106 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5107 int k;
be7ae2a4
RK
5108 mark_reload_reg_in_use (regno, reload_opnum[r],
5109 reload_when_needed[r],
5110 reload_mode[r]);
54c40e68
RS
5111 for (k = 0; k < nr; k++)
5112 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
be7ae2a4 5113 }
32131a9c
RK
5114 }
5115 }
5116
5117 /* If we found a register to use already, or if this is an optional
5118 reload, we are done. */
5119 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5120 continue;
5121
5122#if 0 /* No longer needed for correct operation. Might or might not
5123 give better code on the average. Want to experiment? */
5124
5125 /* See if there is a later reload that has a class different from our
5126 class that intersects our class or that requires less register
5127 than our reload. If so, we must allocate a register to this
5128 reload now, since that reload might inherit a previous reload
5129 and take the only available register in our class. Don't do this
5130 for optional reloads since they will force all previous reloads
5131 to be allocated. Also don't do this for reloads that have been
5132 turned off. */
5133
5134 for (i = j + 1; i < n_reloads; i++)
5135 {
5136 int s = reload_order[i];
5137
d45cf215
RS
5138 if ((reload_in[s] == 0 && reload_out[s] == 0
5139 && ! reload_secondary_p[s])
32131a9c
RK
5140 || reload_optional[s])
5141 continue;
5142
5143 if ((reload_reg_class[s] != reload_reg_class[r]
5144 && reg_classes_intersect_p (reload_reg_class[r],
5145 reload_reg_class[s]))
5146 || reload_nregs[s] < reload_nregs[r])
5147 break;
5148 }
5149
5150 if (i == n_reloads)
5151 continue;
5152
5153 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5154#endif
5155 }
5156
5157 /* Now allocate reload registers for anything non-optional that
5158 didn't get one yet. */
5159 for (j = 0; j < n_reloads; j++)
5160 {
5161 register int r = reload_order[j];
5162
5163 /* Ignore reloads that got marked inoperative. */
5164 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5165 continue;
5166
5167 /* Skip reloads that already have a register allocated or are
5168 optional. */
5169 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5170 continue;
5171
5172 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5173 break;
5174 }
5175
5176 /* If that loop got all the way, we have won. */
5177 if (j == n_reloads)
5178 break;
5179
5180 fail:
5181 /* Loop around and try without any inheritance. */
5182 /* First undo everything done by the failed attempt
5183 to allocate with inheritance. */
5184 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5185 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5186 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5187 sizeof reload_inheritance_insn);
5188 bcopy (save_reload_override_in, reload_override_in,
5189 sizeof reload_override_in);
5190 bcopy (save_reload_spill_index, reload_spill_index,
5191 sizeof reload_spill_index);
5192 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5193 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5194 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5195 save_reload_reg_used_in_op_addr);
546b63fb
RK
5196 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5197 save_reload_reg_used_in_insn);
5198 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5199 save_reload_reg_used_in_other_addr);
5200
5201 for (i = 0; i < reload_n_operands; i++)
5202 {
5203 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5204 save_reload_reg_used_in_input[i]);
5205 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5206 save_reload_reg_used_in_output[i]);
5207 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5208 save_reload_reg_used_in_input_addr[i]);
5209 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5210 save_reload_reg_used_in_output_addr[i]);
5211 }
32131a9c
RK
5212 }
5213
5214 /* If we thought we could inherit a reload, because it seemed that
5215 nothing else wanted the same reload register earlier in the insn,
5216 verify that assumption, now that all reloads have been assigned. */
5217
5218 for (j = 0; j < n_reloads; j++)
5219 {
5220 register int r = reload_order[j];
5221
5222 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5223 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
546b63fb 5224 reload_opnum[r],
32131a9c
RK
5225 reload_when_needed[r]))
5226 reload_inherited[r] = 0;
5227
5228 /* If we found a better place to reload from,
5229 validate it in the same fashion, if it is a reload reg. */
5230 if (reload_override_in[r]
5231 && (GET_CODE (reload_override_in[r]) == REG
5232 || GET_CODE (reload_override_in[r]) == SUBREG))
5233 {
5234 int regno = true_regnum (reload_override_in[r]);
5235 if (spill_reg_order[regno] >= 0
546b63fb
RK
5236 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5237 reload_when_needed[r]))
32131a9c
RK
5238 reload_override_in[r] = 0;
5239 }
5240 }
5241
5242 /* Now that reload_override_in is known valid,
5243 actually override reload_in. */
5244 for (j = 0; j < n_reloads; j++)
5245 if (reload_override_in[j])
5246 reload_in[j] = reload_override_in[j];
5247
5248 /* If this reload won't be done because it has been cancelled or is
5249 optional and not inherited, clear reload_reg_rtx so other
5250 routines (such as subst_reloads) don't get confused. */
5251 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
5252 if (reload_reg_rtx[j] != 0
5253 && ((reload_optional[j] && ! reload_inherited[j])
5254 || (reload_in[j] == 0 && reload_out[j] == 0
5255 && ! reload_secondary_p[j])))
5256 {
5257 int regno = true_regnum (reload_reg_rtx[j]);
5258
5259 if (spill_reg_order[regno] >= 0)
5260 clear_reload_reg_in_use (regno, reload_opnum[j],
5261 reload_when_needed[j], reload_mode[j]);
5262 reload_reg_rtx[j] = 0;
5263 }
32131a9c
RK
5264
5265 /* Record which pseudos and which spill regs have output reloads. */
5266 for (j = 0; j < n_reloads; j++)
5267 {
5268 register int r = reload_order[j];
5269
5270 i = reload_spill_index[r];
5271
5272 /* I is nonneg if this reload used one of the spill regs.
5273 If reload_reg_rtx[r] is 0, this is an optional reload
5274 that we opted to ignore. */
5275 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5276 && reload_reg_rtx[r] != 0)
5277 {
5278 register int nregno = REGNO (reload_out[r]);
372e033b
RS
5279 int nr = 1;
5280
5281 if (nregno < FIRST_PSEUDO_REGISTER)
5282 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
5283
5284 while (--nr >= 0)
372e033b
RS
5285 reg_has_output_reload[nregno + nr] = 1;
5286
5287 if (i >= 0)
32131a9c 5288 {
372e033b
RS
5289 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5290 while (--nr >= 0)
32131a9c
RK
5291 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5292 }
5293
5294 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
5295 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5296 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
5297 abort ();
5298 }
5299 }
5300}
5301\f
546b63fb
RK
5302/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5303 reloads of the same item for fear that we might not have enough reload
5304 registers. However, normally they will get the same reload register
5305 and hence actually need not be loaded twice.
5306
5307 Here we check for the most common case of this phenomenon: when we have
5308 a number of reloads for the same object, each of which were allocated
5309 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5310 reload, and is not modified in the insn itself. If we find such,
5311 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5312 This will not increase the number of spill registers needed and will
5313 prevent redundant code. */
5314
5315#ifdef SMALL_REGISTER_CLASSES
5316
5317static void
5318merge_assigned_reloads (insn)
5319 rtx insn;
5320{
5321 int i, j;
5322
5323 /* Scan all the reloads looking for ones that only load values and
5324 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5325 assigned and not modified by INSN. */
5326
5327 for (i = 0; i < n_reloads; i++)
5328 {
5329 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5330 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5331 || reg_set_p (reload_reg_rtx[i], insn))
5332 continue;
5333
5334 /* Look at all other reloads. Ensure that the only use of this
5335 reload_reg_rtx is in a reload that just loads the same value
5336 as we do. Note that any secondary reloads must be of the identical
5337 class since the values, modes, and result registers are the
5338 same, so we need not do anything with any secondary reloads. */
5339
5340 for (j = 0; j < n_reloads; j++)
5341 {
5342 if (i == j || reload_reg_rtx[j] == 0
5343 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5344 reload_reg_rtx[i]))
5345 continue;
5346
5347 /* If the reload regs aren't exactly the same (e.g, different modes)
5348 or if the values are different, we can't merge anything with this
5349 reload register. */
5350
5351 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5352 || reload_out[j] != 0 || reload_in[j] == 0
5353 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5354 break;
5355 }
5356
5357 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5358 we, in fact, found any matching reloads. */
5359
5360 if (j == n_reloads)
5361 {
5362 for (j = 0; j < n_reloads; j++)
5363 if (i != j && reload_reg_rtx[j] != 0
5364 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5365 {
5366 reload_when_needed[i] = RELOAD_OTHER;
5367 reload_in[j] = 0;
5368 transfer_replacements (i, j);
5369 }
5370
5371 /* If this is now RELOAD_OTHER, look for any reloads that load
5372 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5373 if they were for inputs, RELOAD_OTHER for outputs. Note that
5374 this test is equivalent to looking for reloads for this operand
5375 number. */
5376
5377 if (reload_when_needed[i] == RELOAD_OTHER)
5378 for (j = 0; j < n_reloads; j++)
5379 if (reload_in[j] != 0
5380 && reload_when_needed[i] != RELOAD_OTHER
5381 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5382 reload_in[i]))
5383 reload_when_needed[j]
5384 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5385 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5386 }
5387 }
5388}
5389#endif /* SMALL_RELOAD_CLASSES */
5390\f
32131a9c
RK
5391/* Output insns to reload values in and out of the chosen reload regs. */
5392
5393static void
5394emit_reload_insns (insn)
5395 rtx insn;
5396{
5397 register int j;
546b63fb
RK
5398 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5399 rtx other_input_address_reload_insns = 0;
5400 rtx other_input_reload_insns = 0;
5401 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5402 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5403 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5404 rtx operand_reload_insns = 0;
32131a9c 5405 rtx following_insn = NEXT_INSN (insn);
a8efe40d 5406 rtx before_insn = insn;
32131a9c
RK
5407 int special;
5408 /* Values to be put in spill_reg_store are put here first. */
5409 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5410
546b63fb
RK
5411 for (j = 0; j < reload_n_operands; j++)
5412 input_reload_insns[j] = input_address_reload_insns[j]
5413 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5414
d45cf215 5415 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
5416 must go in front of the first USE insn, not in front of INSN. */
5417
5418 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
5419 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
5420 while (GET_CODE (PREV_INSN (before_insn)) == INSN
5421 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
546b63fb
RK
5422 before_insn = PREV_INSN (before_insn);
5423
a34a369b 5424 /* If INSN is followed by any CLOBBER insns made by find_reloads,
546b63fb
RK
5425 put our reloads after them since they may otherwise be
5426 misinterpreted. */
5427
a34a369b
DE
5428 while (GET_CODE (following_insn) == INSN
5429 && GET_MODE (following_insn) == DImode
5430 && GET_CODE (PATTERN (following_insn)) == CLOBBER
5431 && NEXT_INSN (following_insn) != 0)
546b63fb 5432 following_insn = NEXT_INSN (following_insn);
a8efe40d 5433
32131a9c
RK
5434 /* Now output the instructions to copy the data into and out of the
5435 reload registers. Do these in the order that the reloads were reported,
5436 since reloads of base and index registers precede reloads of operands
5437 and the operands may need the base and index registers reloaded. */
5438
5439 for (j = 0; j < n_reloads; j++)
5440 {
5441 register rtx old;
5442 rtx oldequiv_reg = 0;
32131a9c
RK
5443 rtx store_insn = 0;
5444
5445 old = reload_in[j];
5446 if (old != 0 && ! reload_inherited[j]
5447 && ! rtx_equal_p (reload_reg_rtx[j], old)
5448 && reload_reg_rtx[j] != 0)
5449 {
5450 register rtx reloadreg = reload_reg_rtx[j];
5451 rtx oldequiv = 0;
5452 enum machine_mode mode;
546b63fb 5453 rtx *where;
32131a9c
RK
5454
5455 /* Determine the mode to reload in.
5456 This is very tricky because we have three to choose from.
5457 There is the mode the insn operand wants (reload_inmode[J]).
5458 There is the mode of the reload register RELOADREG.
5459 There is the intrinsic mode of the operand, which we could find
5460 by stripping some SUBREGs.
5461 It turns out that RELOADREG's mode is irrelevant:
5462 we can change that arbitrarily.
5463
5464 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5465 then the reload reg may not support QImode moves, so use SImode.
5466 If foo is in memory due to spilling a pseudo reg, this is safe,
5467 because the QImode value is in the least significant part of a
5468 slot big enough for a SImode. If foo is some other sort of
5469 memory reference, then it is impossible to reload this case,
5470 so previous passes had better make sure this never happens.
5471
5472 Then consider a one-word union which has SImode and one of its
5473 members is a float, being fetched as (SUBREG:SF union:SI).
5474 We must fetch that as SFmode because we could be loading into
5475 a float-only register. In this case OLD's mode is correct.
5476
5477 Consider an immediate integer: it has VOIDmode. Here we need
5478 to get a mode from something else.
5479
5480 In some cases, there is a fourth mode, the operand's
5481 containing mode. If the insn specifies a containing mode for
5482 this operand, it overrides all others.
5483
5484 I am not sure whether the algorithm here is always right,
5485 but it does the right things in those cases. */
5486
5487 mode = GET_MODE (old);
5488 if (mode == VOIDmode)
5489 mode = reload_inmode[j];
32131a9c
RK
5490
5491#ifdef SECONDARY_INPUT_RELOAD_CLASS
5492 /* If we need a secondary register for this operation, see if
5493 the value is already in a register in that class. Don't
5494 do this if the secondary register will be used as a scratch
5495 register. */
5496
5497 if (reload_secondary_reload[j] >= 0
58b1581b
RS
5498 && reload_secondary_icode[j] == CODE_FOR_nothing
5499 && optimize)
32131a9c
RK
5500 oldequiv
5501 = find_equiv_reg (old, insn,
5502 reload_reg_class[reload_secondary_reload[j]],
fb3821f7 5503 -1, NULL_PTR, 0, mode);
32131a9c
RK
5504#endif
5505
5506 /* If reloading from memory, see if there is a register
5507 that already holds the same value. If so, reload from there.
5508 We can pass 0 as the reload_reg_p argument because
5509 any other reload has either already been emitted,
5510 in which case find_equiv_reg will see the reload-insn,
5511 or has yet to be emitted, in which case it doesn't matter
5512 because we will use this equiv reg right away. */
5513
58b1581b 5514 if (oldequiv == 0 && optimize
32131a9c
RK
5515 && (GET_CODE (old) == MEM
5516 || (GET_CODE (old) == REG
5517 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5518 && reg_renumber[REGNO (old)] < 0)))
546b63fb 5519 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 5520 -1, NULL_PTR, 0, mode);
32131a9c
RK
5521
5522 if (oldequiv)
5523 {
5524 int regno = true_regnum (oldequiv);
5525
5526 /* If OLDEQUIV is a spill register, don't use it for this
5527 if any other reload needs it at an earlier stage of this insn
a8fdc208 5528 or at this stage. */
32131a9c 5529 if (spill_reg_order[regno] >= 0
546b63fb
RK
5530 && (! reload_reg_free_p (regno, reload_opnum[j],
5531 reload_when_needed[j])
5532 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
5533 reload_when_needed[j])))
5534 oldequiv = 0;
5535
5536 /* If OLDEQUIV is not a spill register,
5537 don't use it if any other reload wants it. */
5538 if (spill_reg_order[regno] < 0)
5539 {
5540 int k;
5541 for (k = 0; k < n_reloads; k++)
5542 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
5543 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5544 oldequiv))
32131a9c
RK
5545 {
5546 oldequiv = 0;
5547 break;
5548 }
5549 }
546b63fb
RK
5550
5551 /* If it is no cheaper to copy from OLDEQUIV into the
5552 reload register than it would be to move from memory,
5553 don't use it. Likewise, if we need a secondary register
5554 or memory. */
5555
5556 if (oldequiv != 0
5557 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5558 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5559 reload_reg_class[j])
5560 >= MEMORY_MOVE_COST (mode)))
5561#ifdef SECONDARY_INPUT_RELOAD_CLASS
5562 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5563 mode, oldequiv)
5564 != NO_REGS)
5565#endif
5566#ifdef SECONDARY_MEMORY_NEEDED
5567 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5568 REGNO_REG_CLASS (regno),
5569 mode)
5570#endif
5571 ))
5572 oldequiv = 0;
32131a9c
RK
5573 }
5574
5575 if (oldequiv == 0)
5576 oldequiv = old;
5577 else if (GET_CODE (oldequiv) == REG)
5578 oldequiv_reg = oldequiv;
5579 else if (GET_CODE (oldequiv) == SUBREG)
5580 oldequiv_reg = SUBREG_REG (oldequiv);
5581
5582 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
5583 then load RELOADREG from OLDEQUIV. Note that we cannot use
5584 gen_lowpart_common since it can do the wrong thing when
5585 RELOADREG has a multi-word mode. Note that RELOADREG
5586 must always be a REG here. */
32131a9c
RK
5587
5588 if (GET_MODE (reloadreg) != mode)
3abe6f90 5589 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
5590 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5591 oldequiv = SUBREG_REG (oldequiv);
5592 if (GET_MODE (oldequiv) != VOIDmode
5593 && mode != GET_MODE (oldequiv))
5594 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5595
546b63fb 5596 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
5597 switch (reload_when_needed[j])
5598 {
32131a9c 5599 case RELOAD_OTHER:
546b63fb
RK
5600 where = &other_input_reload_insns;
5601 break;
5602 case RELOAD_FOR_INPUT:
5603 where = &input_reload_insns[reload_opnum[j]];
32131a9c 5604 break;
546b63fb
RK
5605 case RELOAD_FOR_INPUT_ADDRESS:
5606 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 5607 break;
546b63fb
RK
5608 case RELOAD_FOR_OUTPUT_ADDRESS:
5609 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c
RK
5610 break;
5611 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5612 where = &operand_reload_insns;
5613 break;
5614 case RELOAD_FOR_OTHER_ADDRESS:
5615 where = &other_input_address_reload_insns;
5616 break;
5617 default:
5618 abort ();
32131a9c
RK
5619 }
5620
546b63fb 5621 push_to_sequence (*where);
32131a9c
RK
5622 special = 0;
5623
5624 /* Auto-increment addresses must be reloaded in a special way. */
5625 if (GET_CODE (oldequiv) == POST_INC
5626 || GET_CODE (oldequiv) == POST_DEC
5627 || GET_CODE (oldequiv) == PRE_INC
5628 || GET_CODE (oldequiv) == PRE_DEC)
5629 {
5630 /* We are not going to bother supporting the case where a
5631 incremented register can't be copied directly from
5632 OLDEQUIV since this seems highly unlikely. */
5633 if (reload_secondary_reload[j] >= 0)
5634 abort ();
5635 /* Prevent normal processing of this reload. */
5636 special = 1;
5637 /* Output a special code sequence for this case. */
546b63fb 5638 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
5639 }
5640
5641 /* If we are reloading a pseudo-register that was set by the previous
5642 insn, see if we can get rid of that pseudo-register entirely
5643 by redirecting the previous insn into our reload register. */
5644
5645 else if (optimize && GET_CODE (old) == REG
5646 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5647 && dead_or_set_p (insn, old)
5648 /* This is unsafe if some other reload
5649 uses the same reg first. */
546b63fb
RK
5650 && reload_reg_free_before_p (REGNO (reloadreg),
5651 reload_opnum[j],
5652 reload_when_needed[j]))
32131a9c
RK
5653 {
5654 rtx temp = PREV_INSN (insn);
5655 while (temp && GET_CODE (temp) == NOTE)
5656 temp = PREV_INSN (temp);
5657 if (temp
5658 && GET_CODE (temp) == INSN
5659 && GET_CODE (PATTERN (temp)) == SET
5660 && SET_DEST (PATTERN (temp)) == old
5661 /* Make sure we can access insn_operand_constraint. */
5662 && asm_noperands (PATTERN (temp)) < 0
5663 /* This is unsafe if prev insn rejects our reload reg. */
5664 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5665 reloadreg)
5666 /* This is unsafe if operand occurs more than once in current
5667 insn. Perhaps some occurrences aren't reloaded. */
5668 && count_occurrences (PATTERN (insn), old) == 1
5669 /* Don't risk splitting a matching pair of operands. */
5670 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5671 {
5672 /* Store into the reload register instead of the pseudo. */
5673 SET_DEST (PATTERN (temp)) = reloadreg;
5674 /* If these are the only uses of the pseudo reg,
5675 pretend for GDB it lives in the reload reg we used. */
5676 if (reg_n_deaths[REGNO (old)] == 1
5677 && reg_n_sets[REGNO (old)] == 1)
5678 {
5679 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5680 alter_reg (REGNO (old), -1);
5681 }
5682 special = 1;
5683 }
5684 }
5685
546b63fb
RK
5686 /* We can't do that, so output an insn to load RELOADREG. */
5687
32131a9c
RK
5688 if (! special)
5689 {
5690#ifdef SECONDARY_INPUT_RELOAD_CLASS
5691 rtx second_reload_reg = 0;
5692 enum insn_code icode;
5693
5694 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
5695 and icode, if any. If OLDEQUIV and OLD are different or
5696 if this is an in-out reload, recompute whether or not we
5697 still need a secondary register and what the icode should
5698 be. If we still need a secondary register and the class or
5699 icode is different, go back to reloading from OLD if using
5700 OLDEQUIV means that we got the wrong type of register. We
5701 cannot have different class or icode due to an in-out reload
5702 because we don't make such reloads when both the input and
5703 output need secondary reload registers. */
32131a9c
RK
5704
5705 if (reload_secondary_reload[j] >= 0)
5706 {
5707 int secondary_reload = reload_secondary_reload[j];
1554c2c6
RK
5708 rtx real_oldequiv = oldequiv;
5709 rtx real_old = old;
5710
5711 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5712 and similarly for OLD.
5713 See comments in find_secondary_reload in reload.c. */
5714 if (GET_CODE (oldequiv) == REG
5715 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5716 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5717 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5718
5719 if (GET_CODE (old) == REG
5720 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5721 && reg_equiv_mem[REGNO (old)] != 0)
5722 real_old = reg_equiv_mem[REGNO (old)];
5723
32131a9c
RK
5724 second_reload_reg = reload_reg_rtx[secondary_reload];
5725 icode = reload_secondary_icode[j];
5726
d445b551
RK
5727 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5728 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
5729 {
5730 enum reg_class new_class
5731 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 5732 mode, real_oldequiv);
32131a9c
RK
5733
5734 if (new_class == NO_REGS)
5735 second_reload_reg = 0;
5736 else
5737 {
5738 enum insn_code new_icode;
5739 enum machine_mode new_mode;
5740
5741 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5742 REGNO (second_reload_reg)))
1554c2c6 5743 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5744 else
5745 {
5746 new_icode = reload_in_optab[(int) mode];
5747 if (new_icode != CODE_FOR_nothing
5748 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 5749 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 5750 (reloadreg, mode)))
a8fdc208
RS
5751 || (insn_operand_predicate[(int) new_icode][1]
5752 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 5753 (real_oldequiv, mode)))))
32131a9c
RK
5754 new_icode = CODE_FOR_nothing;
5755
5756 if (new_icode == CODE_FOR_nothing)
5757 new_mode = mode;
5758 else
196ddf8a 5759 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
5760
5761 if (GET_MODE (second_reload_reg) != new_mode)
5762 {
5763 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5764 new_mode))
1554c2c6 5765 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5766 else
5767 second_reload_reg
3aaa90c7
MM
5768 = gen_rtx (REG, new_mode,
5769 REGNO (second_reload_reg));
32131a9c
RK
5770 }
5771 }
5772 }
5773 }
5774
5775 /* If we still need a secondary reload register, check
5776 to see if it is being used as a scratch or intermediate
1554c2c6
RK
5777 register and generate code appropriately. If we need
5778 a scratch register, use REAL_OLDEQUIV since the form of
5779 the insn may depend on the actual address if it is
5780 a MEM. */
32131a9c
RK
5781
5782 if (second_reload_reg)
5783 {
5784 if (icode != CODE_FOR_nothing)
5785 {
546b63fb
RK
5786 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5787 second_reload_reg));
32131a9c
RK
5788 special = 1;
5789 }
5790 else
5791 {
5792 /* See if we need a scratch register to load the
5793 intermediate register (a tertiary reload). */
5794 enum insn_code tertiary_icode
5795 = reload_secondary_icode[secondary_reload];
5796
5797 if (tertiary_icode != CODE_FOR_nothing)
5798 {
5799 rtx third_reload_reg
5800 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5801
546b63fb
RK
5802 emit_insn ((GEN_FCN (tertiary_icode)
5803 (second_reload_reg, real_oldequiv,
5804 third_reload_reg)));
32131a9c
RK
5805 }
5806 else
546b63fb
RK
5807 gen_input_reload (second_reload_reg, oldequiv,
5808 reload_opnum[j],
5809 reload_when_needed[j]);
5810
5811 oldequiv = second_reload_reg;
32131a9c
RK
5812 }
5813 }
5814 }
5815#endif
5816
5817 if (! special)
546b63fb
RK
5818 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5819 reload_when_needed[j]);
32131a9c
RK
5820
5821#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5822 /* We may have to make a REG_DEAD note for the secondary reload
5823 register in the insns we just made. Find the last insn that
5824 mentioned the register. */
5825 if (! special && second_reload_reg
5826 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5827 {
5828 rtx prev;
5829
546b63fb 5830 for (prev = get_last_insn (); prev;
32131a9c
RK
5831 prev = PREV_INSN (prev))
5832 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5833 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5834 PATTERN (prev)))
32131a9c
RK
5835 {
5836 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5837 second_reload_reg,
5838 REG_NOTES (prev));
5839 break;
5840 }
5841 }
5842#endif
5843 }
5844
546b63fb
RK
5845 /* End this sequence. */
5846 *where = get_insns ();
5847 end_sequence ();
32131a9c
RK
5848 }
5849
5850 /* Add a note saying the input reload reg
5851 dies in this insn, if anyone cares. */
5852#ifdef PRESERVE_DEATH_INFO_REGNO_P
5853 if (old != 0
5854 && reload_reg_rtx[j] != old
5855 && reload_reg_rtx[j] != 0
5856 && reload_out[j] == 0
5857 && ! reload_inherited[j]
5858 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5859 {
5860 register rtx reloadreg = reload_reg_rtx[j];
5861
a8fdc208 5862#if 0
32131a9c
RK
5863 /* We can't abort here because we need to support this for sched.c.
5864 It's not terrible to miss a REG_DEAD note, but we should try
5865 to figure out how to do this correctly. */
5866 /* The code below is incorrect for address-only reloads. */
5867 if (reload_when_needed[j] != RELOAD_OTHER
5868 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5869 abort ();
5870#endif
5871
5872 /* Add a death note to this insn, for an input reload. */
5873
5874 if ((reload_when_needed[j] == RELOAD_OTHER
5875 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5876 && ! dead_or_set_p (insn, reloadreg))
5877 REG_NOTES (insn)
5878 = gen_rtx (EXPR_LIST, REG_DEAD,
5879 reloadreg, REG_NOTES (insn));
5880 }
5881
5882 /* When we inherit a reload, the last marked death of the reload reg
5883 may no longer really be a death. */
5884 if (reload_reg_rtx[j] != 0
5885 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5886 && reload_inherited[j])
5887 {
5888 /* Handle inheriting an output reload.
5889 Remove the death note from the output reload insn. */
5890 if (reload_spill_index[j] >= 0
5891 && GET_CODE (reload_in[j]) == REG
5892 && spill_reg_store[reload_spill_index[j]] != 0
5893 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5894 REG_DEAD, REGNO (reload_reg_rtx[j])))
5895 remove_death (REGNO (reload_reg_rtx[j]),
5896 spill_reg_store[reload_spill_index[j]]);
5897 /* Likewise for input reloads that were inherited. */
5898 else if (reload_spill_index[j] >= 0
5899 && GET_CODE (reload_in[j]) == REG
5900 && spill_reg_store[reload_spill_index[j]] == 0
5901 && reload_inheritance_insn[j] != 0
a8fdc208 5902 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5903 REGNO (reload_reg_rtx[j])))
5904 remove_death (REGNO (reload_reg_rtx[j]),
5905 reload_inheritance_insn[j]);
5906 else
5907 {
5908 rtx prev;
5909
5910 /* We got this register from find_equiv_reg.
5911 Search back for its last death note and get rid of it.
5912 But don't search back too far.
5913 Don't go past a place where this reg is set,
5914 since a death note before that remains valid. */
5915 for (prev = PREV_INSN (insn);
5916 prev && GET_CODE (prev) != CODE_LABEL;
5917 prev = PREV_INSN (prev))
5918 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5919 && dead_or_set_p (prev, reload_reg_rtx[j]))
5920 {
5921 if (find_regno_note (prev, REG_DEAD,
5922 REGNO (reload_reg_rtx[j])))
5923 remove_death (REGNO (reload_reg_rtx[j]), prev);
5924 break;
5925 }
5926 }
5927 }
5928
5929 /* We might have used find_equiv_reg above to choose an alternate
5930 place from which to reload. If so, and it died, we need to remove
5931 that death and move it to one of the insns we just made. */
5932
5933 if (oldequiv_reg != 0
5934 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5935 {
5936 rtx prev, prev1;
5937
5938 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5939 prev = PREV_INSN (prev))
5940 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5941 && dead_or_set_p (prev, oldequiv_reg))
5942 {
5943 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5944 {
5945 for (prev1 = this_reload_insn;
5946 prev1; prev1 = PREV_INSN (prev1))
5947 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5948 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5949 PATTERN (prev1)))
32131a9c
RK
5950 {
5951 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5952 oldequiv_reg,
5953 REG_NOTES (prev1));
5954 break;
5955 }
5956 remove_death (REGNO (oldequiv_reg), prev);
5957 }
5958 break;
5959 }
5960 }
5961#endif
5962
5963 /* If we are reloading a register that was recently stored in with an
5964 output-reload, see if we can prove there was
5965 actually no need to store the old value in it. */
5966
5967 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 5968 && reload_in[j] != 0
32131a9c
RK
5969 && GET_CODE (reload_in[j]) == REG
5970#if 0
5971 /* There doesn't seem to be any reason to restrict this to pseudos
5972 and doing so loses in the case where we are copying from a
5973 register of the wrong class. */
5974 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5975#endif
5976 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb
RK
5977 /* This is unsafe if some other reload uses the same reg first. */
5978 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
5979 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
5980 && dead_or_set_p (insn, reload_in[j])
5981 /* This is unsafe if operand occurs more than once in current
5982 insn. Perhaps some occurrences weren't reloaded. */
5983 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5984 delete_output_reload (insn, j,
5985 spill_reg_store[reload_spill_index[j]]);
5986
5987 /* Input-reloading is done. Now do output-reloading,
5988 storing the value from the reload-register after the main insn
5989 if reload_out[j] is nonzero.
5990
5991 ??? At some point we need to support handling output reloads of
5992 JUMP_INSNs or insns that set cc0. */
5993 old = reload_out[j];
5994 if (old != 0
5995 && reload_reg_rtx[j] != old
5996 && reload_reg_rtx[j] != 0)
5997 {
5998 register rtx reloadreg = reload_reg_rtx[j];
5999 register rtx second_reloadreg = 0;
32131a9c
RK
6000 rtx note, p;
6001 enum machine_mode mode;
6002 int special = 0;
6003
6004 /* An output operand that dies right away does need a reload,
6005 but need not be copied from it. Show the new location in the
6006 REG_UNUSED note. */
6007 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6008 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6009 {
6010 XEXP (note, 0) = reload_reg_rtx[j];
6011 continue;
6012 }
6013 else if (GET_CODE (old) == SCRATCH)
6014 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6015 but we don't want to make an output reload. */
6016 continue;
6017
6018#if 0
6019 /* Strip off of OLD any size-increasing SUBREGs such as
6020 (SUBREG:SI foo:QI 0). */
6021
6022 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6023 && (GET_MODE_SIZE (GET_MODE (old))
6024 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6025 old = SUBREG_REG (old);
6026#endif
6027
6028 /* If is a JUMP_INSN, we can't support output reloads yet. */
6029 if (GET_CODE (insn) == JUMP_INSN)
6030 abort ();
6031
546b63fb
RK
6032 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6033
32131a9c
RK
6034 /* Determine the mode to reload in.
6035 See comments above (for input reloading). */
6036
6037 mode = GET_MODE (old);
6038 if (mode == VOIDmode)
79a365a7
RS
6039 {
6040 /* VOIDmode should never happen for an output. */
6041 if (asm_noperands (PATTERN (insn)) < 0)
6042 /* It's the compiler's fault. */
6043 abort ();
6044 error_for_asm (insn, "output operand is constant in `asm'");
6045 /* Prevent crash--use something we know is valid. */
6046 mode = word_mode;
6047 old = gen_rtx (REG, mode, REGNO (reloadreg));
6048 }
32131a9c 6049
32131a9c 6050 if (GET_MODE (reloadreg) != mode)
3abe6f90 6051 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
6052
6053#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6054
6055 /* If we need two reload regs, set RELOADREG to the intermediate
6056 one, since it will be stored into OUT. We might need a secondary
6057 register only for an input reload, so check again here. */
6058
1554c2c6 6059 if (reload_secondary_reload[j] >= 0)
32131a9c 6060 {
1554c2c6 6061 rtx real_old = old;
32131a9c 6062
1554c2c6
RK
6063 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6064 && reg_equiv_mem[REGNO (old)] != 0)
6065 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6066
1554c2c6
RK
6067 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6068 mode, real_old)
6069 != NO_REGS))
6070 {
6071 second_reloadreg = reloadreg;
6072 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
32131a9c 6073
1554c2c6
RK
6074 /* See if RELOADREG is to be used as a scratch register
6075 or as an intermediate register. */
6076 if (reload_secondary_icode[j] != CODE_FOR_nothing)
32131a9c 6077 {
546b63fb
RK
6078 emit_insn ((GEN_FCN (reload_secondary_icode[j])
6079 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6080 special = 1;
32131a9c
RK
6081 }
6082 else
1554c2c6
RK
6083 {
6084 /* See if we need both a scratch and intermediate reload
6085 register. */
6086 int secondary_reload = reload_secondary_reload[j];
6087 enum insn_code tertiary_icode
6088 = reload_secondary_icode[secondary_reload];
6089 rtx pat;
32131a9c 6090
1554c2c6
RK
6091 if (GET_MODE (reloadreg) != mode)
6092 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6093
6094 if (tertiary_icode != CODE_FOR_nothing)
6095 {
6096 rtx third_reloadreg
6097 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
6098 pat = (GEN_FCN (tertiary_icode)
6099 (reloadreg, second_reloadreg, third_reloadreg));
6100 }
9ad5f9f6
JW
6101#ifdef SECONDARY_MEMORY_NEEDED
6102 /* If we need a memory location to do the move, do it that way. */
6103 else if (GET_CODE (reloadreg) == REG
6104 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6105 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6106 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6107 GET_MODE (second_reloadreg)))
6108 {
6109 /* Get the memory to use and rewrite both registers
6110 to its mode. */
546b63fb
RK
6111 rtx loc
6112 = get_secondary_mem (reloadreg,
6113 GET_MODE (second_reloadreg),
6114 reload_opnum[j],
6115 reload_when_needed[j]);
9ad5f9f6
JW
6116 rtx tmp_reloadreg;
6117
6118 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6119 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6120 REGNO (second_reloadreg));
6121
6122 if (GET_MODE (loc) != GET_MODE (reloadreg))
6123 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6124 REGNO (reloadreg));
6125 else
6126 tmp_reloadreg = reloadreg;
6127
546b63fb 6128 emit_move_insn (loc, second_reloadreg);
9ad5f9f6
JW
6129 pat = gen_move_insn (tmp_reloadreg, loc);
6130 }
6131#endif
1554c2c6
RK
6132 else
6133 pat = gen_move_insn (reloadreg, second_reloadreg);
6134
546b63fb 6135 emit_insn (pat);
1554c2c6 6136 }
32131a9c
RK
6137 }
6138 }
6139#endif
6140
6141 /* Output the last reload insn. */
6142 if (! special)
0dadecf6
RK
6143 {
6144#ifdef SECONDARY_MEMORY_NEEDED
6145 /* If we need a memory location to do the move, do it that way. */
6146 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6147 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6148 REGNO_REG_CLASS (REGNO (reloadreg)),
6149 GET_MODE (reloadreg)))
6150 {
6151 /* Get the memory to use and rewrite both registers to
6152 its mode. */
546b63fb
RK
6153 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6154 reload_opnum[j],
6155 reload_when_needed[j]);
0dadecf6
RK
6156
6157 if (GET_MODE (loc) != GET_MODE (reloadreg))
6158 reloadreg = gen_rtx (REG, GET_MODE (loc),
6159 REGNO (reloadreg));
6160
6161 if (GET_MODE (loc) != GET_MODE (old))
6162 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6163
546b63fb
RK
6164 emit_insn (gen_move_insn (loc, reloadreg));
6165 emit_insn (gen_move_insn (old, loc));
0dadecf6
RK
6166 }
6167 else
6168#endif
546b63fb 6169 emit_insn (gen_move_insn (old, reloadreg));
0dadecf6 6170 }
32131a9c
RK
6171
6172#ifdef PRESERVE_DEATH_INFO_REGNO_P
6173 /* If final will look at death notes for this reg,
6174 put one on the last output-reload insn to use it. Similarly
6175 for any secondary register. */
6176 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 6177 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6178 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6179 && reg_overlap_mentioned_for_reload_p (reloadreg,
6180 PATTERN (p)))
32131a9c
RK
6181 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6182 reloadreg, REG_NOTES (p));
6183
6184#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6185 if (! special
6186 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 6187 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6188 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6189 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6190 PATTERN (p)))
32131a9c
RK
6191 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6192 second_reloadreg, REG_NOTES (p));
6193#endif
6194#endif
6195 /* Look at all insns we emitted, just to be safe. */
546b63fb 6196 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
6197 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6198 {
6199 /* If this output reload doesn't come from a spill reg,
6200 clear any memory of reloaded copies of the pseudo reg.
6201 If this output reload comes from a spill reg,
6202 reg_has_output_reload will make this do nothing. */
6203 note_stores (PATTERN (p), forget_old_reloads_1);
6204
6205 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6206 store_insn = p;
6207 }
6208
546b63fb
RK
6209 output_reload_insns[reload_opnum[j]] = get_insns ();
6210 end_sequence ();
6211
32131a9c
RK
6212 }
6213
6214 if (reload_spill_index[j] >= 0)
6215 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6216 }
6217
546b63fb
RK
6218 /* Now write all the insns we made for reloads in the order expected by
6219 the allocation functions. Prior to the insn being reloaded, we write
6220 the following reloads:
6221
6222 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6223
6224 RELOAD_OTHER reloads.
6225
6226 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6227 the RELOAD_FOR_INPUT reload for the operand.
6228
6229 RELOAD_FOR_OPERAND_ADDRESS reloads.
6230
6231 After the insn being reloaded, we write the following:
6232
6233 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6234 the RELOAD_FOR_OUTPUT reload for that operand. */
6235
6236 emit_insns_before (other_input_address_reload_insns, before_insn);
6237 emit_insns_before (other_input_reload_insns, before_insn);
6238
6239 for (j = 0; j < reload_n_operands; j++)
6240 {
6241 emit_insns_before (input_address_reload_insns[j], before_insn);
6242 emit_insns_before (input_reload_insns[j], before_insn);
6243 }
6244
6245 emit_insns_before (operand_reload_insns, before_insn);
6246
6247 for (j = 0; j < reload_n_operands; j++)
6248 {
6249 emit_insns_before (output_address_reload_insns[j], following_insn);
6250 emit_insns_before (output_reload_insns[j], following_insn);
6251 }
6252
32131a9c
RK
6253 /* Move death notes from INSN
6254 to output-operand-address and output reload insns. */
6255#ifdef PRESERVE_DEATH_INFO_REGNO_P
6256 {
6257 rtx insn1;
6258 /* Loop over those insns, last ones first. */
6259 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6260 insn1 = PREV_INSN (insn1))
6261 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6262 {
6263 rtx source = SET_SRC (PATTERN (insn1));
6264 rtx dest = SET_DEST (PATTERN (insn1));
6265
6266 /* The note we will examine next. */
6267 rtx reg_notes = REG_NOTES (insn);
6268 /* The place that pointed to this note. */
6269 rtx *prev_reg_note = &REG_NOTES (insn);
6270
6271 /* If the note is for something used in the source of this
6272 reload insn, or in the output address, move the note. */
6273 while (reg_notes)
6274 {
6275 rtx next_reg_notes = XEXP (reg_notes, 1);
6276 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6277 && GET_CODE (XEXP (reg_notes, 0)) == REG
6278 && ((GET_CODE (dest) != REG
bfa30b22
RK
6279 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6280 dest))
6281 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6282 source)))
32131a9c
RK
6283 {
6284 *prev_reg_note = next_reg_notes;
6285 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6286 REG_NOTES (insn1) = reg_notes;
6287 }
6288 else
6289 prev_reg_note = &XEXP (reg_notes, 1);
6290
6291 reg_notes = next_reg_notes;
6292 }
6293 }
6294 }
6295#endif
6296
6297 /* For all the spill regs newly reloaded in this instruction,
6298 record what they were reloaded from, so subsequent instructions
d445b551
RK
6299 can inherit the reloads.
6300
6301 Update spill_reg_store for the reloads of this insn.
e9e79d69 6302 Copy the elements that were updated in the loop above. */
32131a9c
RK
6303
6304 for (j = 0; j < n_reloads; j++)
6305 {
6306 register int r = reload_order[j];
6307 register int i = reload_spill_index[r];
6308
6309 /* I is nonneg if this reload used one of the spill regs.
6310 If reload_reg_rtx[r] is 0, this is an optional reload
546b63fb
RK
6311 that we opted to ignore.
6312
6313 Also ignore reloads that don't reach the end of the insn,
6314 since we will eventually see the one that does. */
d445b551 6315
546b63fb
RK
6316 if (i >= 0 && reload_reg_rtx[r] != 0
6317 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6318 reload_when_needed[r]))
32131a9c
RK
6319 {
6320 /* First, clear out memory of what used to be in this spill reg.
6321 If consecutive registers are used, clear them all. */
6322 int nr
6323 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6324 int k;
6325
6326 for (k = 0; k < nr; k++)
6327 {
6328 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6329 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6330 }
6331
6332 /* Maybe the spill reg contains a copy of reload_out. */
6333 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6334 {
6335 register int nregno = REGNO (reload_out[r]);
d08ea79f
RK
6336 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6337 : HARD_REGNO_NREGS (nregno,
6338 GET_MODE (reload_reg_rtx[r])));
d445b551
RK
6339
6340 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 6341 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6342
d08ea79f
RK
6343 /* If NREGNO is a hard register, it may occupy more than
6344 one register. If it does, say what is in the
6345 rest of the registers assuming that both registers
6346 agree on how many words the object takes. If not,
6347 invalidate the subsequent registers. */
6348
6349 if (nregno < FIRST_PSEUDO_REGISTER)
6350 for (k = 1; k < nnr; k++)
6351 reg_last_reload_reg[nregno + k]
6352 = (nr == nnr ? gen_rtx (REG, word_mode,
6353 REGNO (reload_reg_rtx[r]) + k)
6354 : 0);
6355
6356 /* Now do the inverse operation. */
32131a9c
RK
6357 for (k = 0; k < nr; k++)
6358 {
6359 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6360 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6361 : nregno + k);
32131a9c
RK
6362 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6363 }
6364 }
d445b551 6365
2c9ce2ef
RK
6366 /* Maybe the spill reg contains a copy of reload_in. Only do
6367 something if there will not be an output reload for
6368 the register being reloaded. */
32131a9c
RK
6369 else if (reload_out[r] == 0
6370 && reload_in[r] != 0
2c9ce2ef
RK
6371 && ((GET_CODE (reload_in[r]) == REG
6372 && ! reg_has_output_reload[REGNO (reload_in[r])]
6373 || (GET_CODE (reload_in_reg[r]) == REG
6374 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
32131a9c
RK
6375 {
6376 register int nregno;
d08ea79f
RK
6377 int nnr;
6378
32131a9c
RK
6379 if (GET_CODE (reload_in[r]) == REG)
6380 nregno = REGNO (reload_in[r]);
6381 else
6382 nregno = REGNO (reload_in_reg[r]);
6383
d08ea79f
RK
6384 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6385 : HARD_REGNO_NREGS (nregno,
6386 GET_MODE (reload_reg_rtx[r])));
6387
546b63fb 6388 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6389
d08ea79f
RK
6390 if (nregno < FIRST_PSEUDO_REGISTER)
6391 for (k = 1; k < nnr; k++)
6392 reg_last_reload_reg[nregno + k]
6393 = (nr == nnr ? gen_rtx (REG, word_mode,
6394 REGNO (reload_reg_rtx[r]) + k)
6395 : 0);
6396
546b63fb
RK
6397 /* Unless we inherited this reload, show we haven't
6398 recently done a store. */
6399 if (! reload_inherited[r])
6400 spill_reg_store[i] = 0;
d445b551 6401
546b63fb
RK
6402 for (k = 0; k < nr; k++)
6403 {
6404 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6405 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6406 : nregno + k);
546b63fb
RK
6407 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6408 = insn;
32131a9c
RK
6409 }
6410 }
6411 }
6412
6413 /* The following if-statement was #if 0'd in 1.34 (or before...).
6414 It's reenabled in 1.35 because supposedly nothing else
6415 deals with this problem. */
6416
6417 /* If a register gets output-reloaded from a non-spill register,
6418 that invalidates any previous reloaded copy of it.
6419 But forget_old_reloads_1 won't get to see it, because
6420 it thinks only about the original insn. So invalidate it here. */
6421 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6422 {
6423 register int nregno = REGNO (reload_out[r]);
6424 reg_last_reload_reg[nregno] = 0;
6425 }
6426 }
6427}
6428\f
546b63fb
RK
6429/* Emit code to perform an input reload of IN to RELOADREG. IN is from
6430 operand OPNUM with reload type TYPE.
6431
3c3eeea6 6432 Returns first insn emitted. */
32131a9c
RK
6433
6434rtx
546b63fb 6435gen_input_reload (reloadreg, in, opnum, type)
32131a9c
RK
6436 rtx reloadreg;
6437 rtx in;
546b63fb
RK
6438 int opnum;
6439 enum reload_type type;
32131a9c 6440{
546b63fb 6441 rtx last = get_last_insn ();
32131a9c 6442
a8fdc208 6443 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
6444 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6445 register that didn't get a hard register. In that case we can just
6446 call emit_move_insn.
6447
a7fd196c
JW
6448 We can also be asked to reload a PLUS that adds a register or a MEM to
6449 another register, constant or MEM. This can occur during frame pointer
6450 elimination and while reloading addresses. This case is handled by
6451 trying to emit a single insn to perform the add. If it is not valid,
6452 we use a two insn sequence.
32131a9c
RK
6453
6454 Finally, we could be called to handle an 'o' constraint by putting
6455 an address into a register. In that case, we first try to do this
6456 with a named pattern of "reload_load_address". If no such pattern
6457 exists, we just emit a SET insn and hope for the best (it will normally
6458 be valid on machines that use 'o').
6459
6460 This entire process is made complex because reload will never
6461 process the insns we generate here and so we must ensure that
6462 they will fit their constraints and also by the fact that parts of
6463 IN might be being reloaded separately and replaced with spill registers.
6464 Because of this, we are, in some sense, just guessing the right approach
6465 here. The one listed above seems to work.
6466
6467 ??? At some point, this whole thing needs to be rethought. */
6468
6469 if (GET_CODE (in) == PLUS
a7fd196c
JW
6470 && (GET_CODE (XEXP (in, 0)) == REG
6471 || GET_CODE (XEXP (in, 0)) == MEM)
6472 && (GET_CODE (XEXP (in, 1)) == REG
6473 || CONSTANT_P (XEXP (in, 1))
6474 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 6475 {
a7fd196c
JW
6476 /* We need to compute the sum of a register or a MEM and another
6477 register, constant, or MEM, and put it into the reload
3002e160
JW
6478 register. The best possible way of doing this is if the machine
6479 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
6480
6481 The simplest approach is to try to generate such an insn and see if it
6482 is recognized and matches its constraints. If so, it can be used.
6483
6484 It might be better not to actually emit the insn unless it is valid,
0009eff2 6485 but we need to pass the insn as an operand to `recog' and
b36d7dd7 6486 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 6487 not valid than to dummy things up. */
a8fdc208 6488
af929c62 6489 rtx op0, op1, tem, insn;
32131a9c 6490 int code;
a8fdc208 6491
af929c62
RK
6492 op0 = find_replacement (&XEXP (in, 0));
6493 op1 = find_replacement (&XEXP (in, 1));
6494
32131a9c
RK
6495 /* Since constraint checking is strict, commutativity won't be
6496 checked, so we need to do that here to avoid spurious failure
6497 if the add instruction is two-address and the second operand
6498 of the add is the same as the reload reg, which is frequently
6499 the case. If the insn would be A = B + A, rearrange it so
6500 it will be A = A + B as constrain_operands expects. */
a8fdc208 6501
32131a9c
RK
6502 if (GET_CODE (XEXP (in, 1)) == REG
6503 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
6504 tem = op0, op0 = op1, op1 = tem;
6505
6506 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6507 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c 6508
546b63fb 6509 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6510 code = recog_memoized (insn);
6511
6512 if (code >= 0)
6513 {
6514 insn_extract (insn);
6515 /* We want constrain operands to treat this insn strictly in
6516 its validity determination, i.e., the way it would after reload
6517 has completed. */
6518 if (constrain_operands (code, 1))
6519 return insn;
6520 }
6521
546b63fb 6522 delete_insns_since (last);
32131a9c
RK
6523
6524 /* If that failed, we must use a conservative two-insn sequence.
6525 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
6526 register since "move" will be able to handle an arbitrary operand,
6527 unlike add which can't, in general. Then add the registers.
32131a9c
RK
6528
6529 If there is another way to do this for a specific machine, a
6530 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6531 we emit below. */
6532
af929c62
RK
6533 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6534 || (GET_CODE (op1) == REG
6535 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6536 tem = op0, op0 = op1, op1 = tem;
32131a9c 6537
546b63fb 6538 emit_insn (gen_move_insn (reloadreg, op0));
39b56c2a
RK
6539
6540 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6541 This fixes a problem on the 32K where the stack pointer cannot
6542 be used as an operand of an add insn. */
6543
6544 if (rtx_equal_p (op0, op1))
6545 op1 = reloadreg;
6546
546b63fb 6547 emit_insn (gen_add2_insn (reloadreg, op1));
32131a9c
RK
6548 }
6549
0dadecf6
RK
6550#ifdef SECONDARY_MEMORY_NEEDED
6551 /* If we need a memory location to do the move, do it that way. */
6552 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6553 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6554 REGNO_REG_CLASS (REGNO (reloadreg)),
6555 GET_MODE (reloadreg)))
6556 {
6557 /* Get the memory to use and rewrite both registers to its mode. */
546b63fb 6558 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
0dadecf6
RK
6559
6560 if (GET_MODE (loc) != GET_MODE (reloadreg))
6561 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6562
6563 if (GET_MODE (loc) != GET_MODE (in))
6564 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6565
546b63fb
RK
6566 emit_insn (gen_move_insn (loc, in));
6567 emit_insn (gen_move_insn (reloadreg, loc));
0dadecf6
RK
6568 }
6569#endif
6570
32131a9c
RK
6571 /* If IN is a simple operand, use gen_move_insn. */
6572 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
546b63fb 6573 emit_insn (gen_move_insn (reloadreg, in));
32131a9c
RK
6574
6575#ifdef HAVE_reload_load_address
6576 else if (HAVE_reload_load_address)
546b63fb 6577 emit_insn (gen_reload_load_address (reloadreg, in));
32131a9c
RK
6578#endif
6579
6580 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6581 else
546b63fb 6582 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6583
6584 /* Return the first insn emitted.
546b63fb 6585 We can not just return get_last_insn, because there may have
32131a9c
RK
6586 been multiple instructions emitted. Also note that gen_move_insn may
6587 emit more than one insn itself, so we can not assume that there is one
6588 insn emitted per emit_insn_before call. */
6589
546b63fb 6590 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
6591}
6592\f
6593/* Delete a previously made output-reload
6594 whose result we now believe is not needed.
6595 First we double-check.
6596
6597 INSN is the insn now being processed.
6598 OUTPUT_RELOAD_INSN is the insn of the output reload.
6599 J is the reload-number for this insn. */
6600
6601static void
6602delete_output_reload (insn, j, output_reload_insn)
6603 rtx insn;
6604 int j;
6605 rtx output_reload_insn;
6606{
6607 register rtx i1;
6608
6609 /* Get the raw pseudo-register referred to. */
6610
6611 rtx reg = reload_in[j];
6612 while (GET_CODE (reg) == SUBREG)
6613 reg = SUBREG_REG (reg);
6614
6615 /* If the pseudo-reg we are reloading is no longer referenced
6616 anywhere between the store into it and here,
6617 and no jumps or labels intervene, then the value can get
6618 here through the reload reg alone.
6619 Otherwise, give up--return. */
6620 for (i1 = NEXT_INSN (output_reload_insn);
6621 i1 != insn; i1 = NEXT_INSN (i1))
6622 {
6623 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6624 return;
6625 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6626 && reg_mentioned_p (reg, PATTERN (i1)))
6627 return;
6628 }
6629
208dffa5
RS
6630 if (cannot_omit_stores[REGNO (reg)])
6631 return;
6632
32131a9c
RK
6633 /* If this insn will store in the pseudo again,
6634 the previous store can be removed. */
6635 if (reload_out[j] == reload_in[j])
6636 delete_insn (output_reload_insn);
6637
6638 /* See if the pseudo reg has been completely replaced
6639 with reload regs. If so, delete the store insn
6640 and forget we had a stack slot for the pseudo. */
6641 else if (reg_n_deaths[REGNO (reg)] == 1
6642 && reg_basic_block[REGNO (reg)] >= 0
6643 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6644 {
6645 rtx i2;
6646
6647 /* We know that it was used only between here
6648 and the beginning of the current basic block.
6649 (We also know that the last use before INSN was
6650 the output reload we are thinking of deleting, but never mind that.)
6651 Search that range; see if any ref remains. */
6652 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6653 {
d445b551
RK
6654 rtx set = single_set (i2);
6655
32131a9c
RK
6656 /* Uses which just store in the pseudo don't count,
6657 since if they are the only uses, they are dead. */
d445b551 6658 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6659 continue;
6660 if (GET_CODE (i2) == CODE_LABEL
6661 || GET_CODE (i2) == JUMP_INSN)
6662 break;
6663 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6664 && reg_mentioned_p (reg, PATTERN (i2)))
6665 /* Some other ref remains;
6666 we can't do anything. */
6667 return;
6668 }
6669
6670 /* Delete the now-dead stores into this pseudo. */
6671 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6672 {
d445b551
RK
6673 rtx set = single_set (i2);
6674
6675 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6676 delete_insn (i2);
6677 if (GET_CODE (i2) == CODE_LABEL
6678 || GET_CODE (i2) == JUMP_INSN)
6679 break;
6680 }
6681
6682 /* For the debugging info,
6683 say the pseudo lives in this reload reg. */
6684 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6685 alter_reg (REGNO (reg), -1);
6686 }
6687}
32131a9c 6688\f
a8fdc208 6689/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 6690 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
6691 is a register or memory location;
6692 so reloading involves incrementing that location.
6693
6694 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 6695 This cannot be deduced from VALUE. */
32131a9c 6696
546b63fb
RK
6697static void
6698inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
6699 rtx reloadreg;
6700 rtx value;
6701 int inc_amount;
32131a9c
RK
6702{
6703 /* REG or MEM to be copied and incremented. */
6704 rtx incloc = XEXP (value, 0);
6705 /* Nonzero if increment after copying. */
6706 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 6707 rtx last;
0009eff2
RK
6708 rtx inc;
6709 rtx add_insn;
6710 int code;
32131a9c
RK
6711
6712 /* No hard register is equivalent to this register after
6713 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6714 we could inc/dec that register as well (maybe even using it for
6715 the source), but I'm not sure it's worth worrying about. */
6716 if (GET_CODE (incloc) == REG)
6717 reg_last_reload_reg[REGNO (incloc)] = 0;
6718
6719 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6720 inc_amount = - inc_amount;
6721
fb3821f7 6722 inc = GEN_INT (inc_amount);
0009eff2
RK
6723
6724 /* If this is post-increment, first copy the location to the reload reg. */
6725 if (post)
546b63fb 6726 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
6727
6728 /* See if we can directly increment INCLOC. Use a method similar to that
6729 in gen_input_reload. */
6730
546b63fb
RK
6731 last = get_last_insn ();
6732 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6733 gen_rtx (PLUS, GET_MODE (incloc),
6734 incloc, inc)));
0009eff2
RK
6735
6736 code = recog_memoized (add_insn);
6737 if (code >= 0)
32131a9c 6738 {
0009eff2
RK
6739 insn_extract (add_insn);
6740 if (constrain_operands (code, 1))
32131a9c 6741 {
0009eff2
RK
6742 /* If this is a pre-increment and we have incremented the value
6743 where it lives, copy the incremented value to RELOADREG to
6744 be used as an address. */
6745
6746 if (! post)
546b63fb
RK
6747 emit_insn (gen_move_insn (reloadreg, incloc));
6748
6749 return;
32131a9c
RK
6750 }
6751 }
0009eff2 6752
546b63fb 6753 delete_insns_since (last);
0009eff2
RK
6754
6755 /* If couldn't do the increment directly, must increment in RELOADREG.
6756 The way we do this depends on whether this is pre- or post-increment.
6757 For pre-increment, copy INCLOC to the reload register, increment it
6758 there, then save back. */
6759
6760 if (! post)
6761 {
546b63fb
RK
6762 emit_insn (gen_move_insn (reloadreg, incloc));
6763 emit_insn (gen_add2_insn (reloadreg, inc));
6764 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 6765 }
32131a9c
RK
6766 else
6767 {
0009eff2
RK
6768 /* Postincrement.
6769 Because this might be a jump insn or a compare, and because RELOADREG
6770 may not be available after the insn in an input reload, we must do
6771 the incrementation before the insn being reloaded for.
6772
6773 We have already copied INCLOC to RELOADREG. Increment the copy in
6774 RELOADREG, save that back, then decrement RELOADREG so it has
6775 the original value. */
6776
546b63fb
RK
6777 emit_insn (gen_add2_insn (reloadreg, inc));
6778 emit_insn (gen_move_insn (incloc, reloadreg));
6779 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 6780 }
0009eff2 6781
546b63fb 6782 return;
32131a9c
RK
6783}
6784\f
6785/* Return 1 if we are certain that the constraint-string STRING allows
6786 the hard register REG. Return 0 if we can't be sure of this. */
6787
6788static int
6789constraint_accepts_reg_p (string, reg)
6790 char *string;
6791 rtx reg;
6792{
6793 int value = 0;
6794 int regno = true_regnum (reg);
6795 int c;
6796
6797 /* Initialize for first alternative. */
6798 value = 0;
6799 /* Check that each alternative contains `g' or `r'. */
6800 while (1)
6801 switch (c = *string++)
6802 {
6803 case 0:
6804 /* If an alternative lacks `g' or `r', we lose. */
6805 return value;
6806 case ',':
6807 /* If an alternative lacks `g' or `r', we lose. */
6808 if (value == 0)
6809 return 0;
6810 /* Initialize for next alternative. */
6811 value = 0;
6812 break;
6813 case 'g':
6814 case 'r':
6815 /* Any general reg wins for this alternative. */
6816 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6817 value = 1;
6818 break;
6819 default:
6820 /* Any reg in specified class wins for this alternative. */
6821 {
0009eff2 6822 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 6823
0009eff2 6824 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
6825 value = 1;
6826 }
6827 }
6828}
6829\f
d445b551
RK
6830/* Return the number of places FIND appears within X, but don't count
6831 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
6832
6833static int
6834count_occurrences (x, find)
6835 register rtx x, find;
6836{
6837 register int i, j;
6838 register enum rtx_code code;
6839 register char *format_ptr;
6840 int count;
6841
6842 if (x == find)
6843 return 1;
6844 if (x == 0)
6845 return 0;
6846
6847 code = GET_CODE (x);
6848
6849 switch (code)
6850 {
6851 case REG:
6852 case QUEUED:
6853 case CONST_INT:
6854 case CONST_DOUBLE:
6855 case SYMBOL_REF:
6856 case CODE_LABEL:
6857 case PC:
6858 case CC0:
6859 return 0;
d445b551
RK
6860
6861 case SET:
6862 if (SET_DEST (x) == find)
6863 return count_occurrences (SET_SRC (x), find);
6864 break;
32131a9c
RK
6865 }
6866
6867 format_ptr = GET_RTX_FORMAT (code);
6868 count = 0;
6869
6870 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6871 {
6872 switch (*format_ptr++)
6873 {
6874 case 'e':
6875 count += count_occurrences (XEXP (x, i), find);
6876 break;
6877
6878 case 'E':
6879 if (XVEC (x, i) != NULL)
6880 {
6881 for (j = 0; j < XVECLEN (x, i); j++)
6882 count += count_occurrences (XVECEXP (x, i, j), find);
6883 }
6884 break;
6885 }
6886 }
6887 return count;
6888}
This page took 1.034985 seconds and 5 git commands to generate.