]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(eliminate_regs): All recursive calls now pass INSN instead of NULL_RTX.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
4c05b187 2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
546b63fb
RK
69
70
71#ifndef REGISTER_MOVE_COST
72#define REGISTER_MOVE_COST(x, y) 2
73#endif
74
75#ifndef MEMORY_MOVE_COST
76#define MEMORY_MOVE_COST(x) 4
77#endif
32131a9c
RK
78\f
79/* During reload_as_needed, element N contains a REG rtx for the hard reg
d08ea79f 80 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
81static rtx *reg_last_reload_reg;
82
83/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85static char *reg_has_output_reload;
86
87/* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89static HARD_REG_SET reg_is_output_reload;
90
91/* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95rtx *reg_equiv_constant;
96
97/* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 101rtx *reg_equiv_memory_loc;
32131a9c
RK
102
103/* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106rtx *reg_equiv_address;
107
108/* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110rtx *reg_equiv_mem;
111
112/* Widest width in which each pseudo reg is referred to (via subreg). */
113static int *reg_max_ref_width;
114
115/* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117static rtx *reg_equiv_init;
118
119/* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126/* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132/* Number of spill-regs so far; number of valid elements of spill_regs. */
133static int n_spills;
134
135/* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141/* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146/* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152/* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155HARD_REG_SET forbidden_regs;
156
157/* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
546b63fb
RK
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
32131a9c
RK
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164static HARD_REG_SET bad_spill_regs;
165
166/* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171/* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178/* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183/* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185static HARD_REG_SET counted_for_groups;
186
187/* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191static HARD_REG_SET counted_for_nongroups;
192
208dffa5
RS
193/* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197static char *cannot_omit_stores;
198
32131a9c
RK
199/* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206static char spill_indirect_levels;
207
208/* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212char indirect_symref_ok;
213
214/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216char double_reg_address_ok;
217
218/* Record the stack slot for each spilled hard register. */
219
220static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222/* Width allocated so far for that stack slot. */
223
224static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226/* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231char *basic_block_needs[N_REG_CLASSES];
232
233/* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235int reload_first_uid;
236
237/* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240int caller_save_needed;
241
242/* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245int reload_in_progress = 0;
246
247/* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
d45cf215 254/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258struct obstack reload_obstack;
259char *reload_firstobj;
260
261#define obstack_chunk_alloc xmalloc
262#define obstack_chunk_free free
263
32131a9c
RK
264/* List of labels that must never be deleted. */
265extern rtx forced_labels;
266\f
267/* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272static struct elim_table
273{
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
a8efe40d 281 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290} reg_eliminate[] =
291
292/* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296#ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298#else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300#endif
301
302#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304/* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307static int num_not_at_initial_offset;
308
309/* Count the number of registers that we may be able to eliminate. */
310static int num_eliminable;
311
312/* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319static char *offsets_known_at;
320static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322/* Number of labels in the current function. */
323
324static int num_labels;
546b63fb
RK
325
326struct hard_reg_n_uses { int regno; int uses; };
32131a9c 327\f
546b63fb
RK
328static int possible_group_p PROTO((int, int *));
329static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334static void spill_failure PROTO((rtx));
335static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337static void delete_dead_insn PROTO((rtx));
338static void alter_reg PROTO((int, int));
c307c237 339static void mark_scratch_live PROTO((rtx));
546b63fb
RK
340static void set_label_offsets PROTO((rtx, rtx, int));
341static int eliminate_regs_in_insn PROTO((rtx, int));
342static void mark_not_eliminable PROTO((rtx, rtx));
343static int spill_hard_reg PROTO((int, int, FILE *, int));
344static void scan_paradoxical_subregs PROTO((rtx));
345static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347static void order_regs_for_reload PROTO((void));
348static void reload_as_needed PROTO((rtx, int));
9a881562 349static void forget_old_reloads_1 PROTO((rtx, rtx));
546b63fb
RK
350static int reload_reg_class_lower PROTO((short *, short *));
351static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
352 enum machine_mode));
be7ae2a4
RK
353static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
354 enum machine_mode));
546b63fb
RK
355static int reload_reg_free_p PROTO((int, int, enum reload_type));
356static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
357static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
358static int allocate_reload_reg PROTO((int, rtx, int, int));
359static void choose_reload_regs PROTO((rtx, rtx));
360static void merge_assigned_reloads PROTO((rtx));
361static void emit_reload_insns PROTO((rtx));
362static void delete_output_reload PROTO((rtx, int, rtx));
363static void inc_for_reload PROTO((rtx, rtx, int));
364static int constraint_accepts_reg_p PROTO((char *, rtx));
365static int count_occurrences PROTO((rtx, rtx));
32131a9c 366\f
546b63fb
RK
367/* Initialize the reload pass once per compilation. */
368
32131a9c
RK
369void
370init_reload ()
371{
372 register int i;
373
374 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
375 Set spill_indirect_levels to the number of levels such addressing is
376 permitted, zero if it is not permitted at all. */
377
378 register rtx tem
379 = gen_rtx (MEM, Pmode,
380 gen_rtx (PLUS, Pmode,
381 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 382 GEN_INT (4)));
32131a9c
RK
383 spill_indirect_levels = 0;
384
385 while (memory_address_p (QImode, tem))
386 {
387 spill_indirect_levels++;
388 tem = gen_rtx (MEM, Pmode, tem);
389 }
390
391 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
392
393 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
394 indirect_symref_ok = memory_address_p (QImode, tem);
395
396 /* See if reg+reg is a valid (and offsettable) address. */
397
65701fd2 398 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
399 {
400 tem = gen_rtx (PLUS, Pmode,
3ec2ea3e 401 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
57caa638
RS
402 gen_rtx (REG, Pmode, i));
403 /* This way, we make sure that reg+reg is an offsettable address. */
404 tem = plus_constant (tem, 4);
405
406 if (memory_address_p (QImode, tem))
407 {
408 double_reg_address_ok = 1;
409 break;
410 }
411 }
32131a9c
RK
412
413 /* Initialize obstack for our rtl allocation. */
414 gcc_obstack_init (&reload_obstack);
415 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
416}
417
546b63fb 418/* Main entry point for the reload pass.
32131a9c
RK
419
420 FIRST is the first insn of the function being compiled.
421
422 GLOBAL nonzero means we were called from global_alloc
423 and should attempt to reallocate any pseudoregs that we
424 displace from hard regs we will use for reloads.
425 If GLOBAL is zero, we do not have enough information to do that,
426 so any pseudo reg that is spilled must go to the stack.
427
428 DUMPFILE is the global-reg debugging dump file stream, or 0.
429 If it is nonzero, messages are written to it to describe
430 which registers are seized as reload regs, which pseudo regs
5352b11a 431 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 432
5352b11a
RS
433 Return value is nonzero if reload failed
434 and we must not do any more for this function. */
435
436int
32131a9c
RK
437reload (first, global, dumpfile)
438 rtx first;
439 int global;
440 FILE *dumpfile;
441{
442 register int class;
546b63fb 443 register int i, j;
32131a9c
RK
444 register rtx insn;
445 register struct elim_table *ep;
446
447 int something_changed;
448 int something_needs_reloads;
449 int something_needs_elimination;
450 int new_basic_block_needs;
a8efe40d
RK
451 enum reg_class caller_save_spill_class = NO_REGS;
452 int caller_save_group_size = 1;
32131a9c 453
5352b11a
RS
454 /* Nonzero means we couldn't get enough spill regs. */
455 int failure = 0;
456
32131a9c
RK
457 /* The basic block number currently being processed for INSN. */
458 int this_block;
459
460 /* Make sure even insns with volatile mem refs are recognizable. */
461 init_recog ();
462
463 /* Enable find_equiv_reg to distinguish insns made by reload. */
464 reload_first_uid = get_max_uid ();
465
466 for (i = 0; i < N_REG_CLASSES; i++)
467 basic_block_needs[i] = 0;
468
0dadecf6
RK
469#ifdef SECONDARY_MEMORY_NEEDED
470 /* Initialize the secondary memory table. */
471 clear_secondary_mem ();
472#endif
473
32131a9c
RK
474 /* Remember which hard regs appear explicitly
475 before we merge into `regs_ever_live' the ones in which
476 pseudo regs have been allocated. */
477 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
478
479 /* We don't have a stack slot for any spill reg yet. */
480 bzero (spill_stack_slot, sizeof spill_stack_slot);
481 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
482
a8efe40d
RK
483 /* Initialize the save area information for caller-save, in case some
484 are needed. */
485 init_save_areas ();
a8fdc208 486
32131a9c
RK
487 /* Compute which hard registers are now in use
488 as homes for pseudo registers.
489 This is done here rather than (eg) in global_alloc
490 because this point is reached even if not optimizing. */
491
492 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
493 mark_home_live (i);
494
c307c237
RK
495 for (i = 0; i < scratch_list_length; i++)
496 if (scratch_list[i])
497 mark_scratch_live (scratch_list[i]);
498
32131a9c
RK
499 /* Make sure that the last insn in the chain
500 is not something that needs reloading. */
fb3821f7 501 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
502
503 /* Find all the pseudo registers that didn't get hard regs
504 but do have known equivalent constants or memory slots.
505 These include parameters (known equivalent to parameter slots)
506 and cse'd or loop-moved constant memory addresses.
507
508 Record constant equivalents in reg_equiv_constant
509 so they will be substituted by find_reloads.
510 Record memory equivalents in reg_mem_equiv so they can
511 be substituted eventually by altering the REG-rtx's. */
512
513 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
514 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
515 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
517 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
519 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero (reg_equiv_init, max_regno * sizeof (rtx));
521 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero (reg_equiv_address, max_regno * sizeof (rtx));
523 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
524 bzero (reg_max_ref_width, max_regno * sizeof (int));
208dffa5
RS
525 cannot_omit_stores = (char *) alloca (max_regno);
526 bzero (cannot_omit_stores, max_regno);
32131a9c
RK
527
528 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
529 Also find all paradoxical subregs
530 and find largest such for each pseudo. */
531
532 for (insn = first; insn; insn = NEXT_INSN (insn))
533 {
534 rtx set = single_set (insn);
535
536 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
537 {
fb3821f7 538 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
539 if (note
540#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 541 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
542 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
543#endif
544 )
32131a9c
RK
545 {
546 rtx x = XEXP (note, 0);
547 i = REGNO (SET_DEST (set));
548 if (i > LAST_VIRTUAL_REGISTER)
549 {
550 if (GET_CODE (x) == MEM)
551 reg_equiv_memory_loc[i] = x;
552 else if (CONSTANT_P (x))
553 {
554 if (LEGITIMATE_CONSTANT_P (x))
555 reg_equiv_constant[i] = x;
556 else
557 reg_equiv_memory_loc[i]
d445b551 558 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
559 }
560 else
561 continue;
562
563 /* If this register is being made equivalent to a MEM
564 and the MEM is not SET_SRC, the equivalencing insn
565 is one with the MEM as a SET_DEST and it occurs later.
566 So don't mark this insn now. */
567 if (GET_CODE (x) != MEM
568 || rtx_equal_p (SET_SRC (set), x))
569 reg_equiv_init[i] = insn;
570 }
571 }
572 }
573
574 /* If this insn is setting a MEM from a register equivalent to it,
575 this is the equivalencing insn. */
576 else if (set && GET_CODE (SET_DEST (set)) == MEM
577 && GET_CODE (SET_SRC (set)) == REG
578 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
579 && rtx_equal_p (SET_DEST (set),
580 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
581 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
582
583 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
584 scan_paradoxical_subregs (PATTERN (insn));
585 }
586
587 /* Does this function require a frame pointer? */
588
589 frame_pointer_needed = (! flag_omit_frame_pointer
590#ifdef EXIT_IGNORE_STACK
591 /* ?? If EXIT_IGNORE_STACK is set, we will not save
592 and restore sp for alloca. So we can't eliminate
593 the frame pointer in that case. At some point,
594 we should improve this by emitting the
595 sp-adjusting insns for this case. */
596 || (current_function_calls_alloca
597 && EXIT_IGNORE_STACK)
598#endif
599 || FRAME_POINTER_REQUIRED);
600
601 num_eliminable = 0;
602
603 /* Initialize the table of registers to eliminate. The way we do this
604 depends on how the eliminable registers were defined. */
605#ifdef ELIMINABLE_REGS
606 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
607 {
608 ep->can_eliminate = ep->can_eliminate_previous
609 = (CAN_ELIMINATE (ep->from, ep->to)
3ec2ea3e
DE
610 && (ep->from != HARD_FRAME_POINTER_REGNUM
611 || ! frame_pointer_needed));
32131a9c
RK
612 }
613#else
614 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
615 = ! frame_pointer_needed;
616#endif
617
618 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 619 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
620 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
621 We depend on this. */
622 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
623 {
624 num_eliminable += ep->can_eliminate;
625 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
626 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
627 }
628
629 num_labels = max_label_num () - get_first_label_num ();
630
631 /* Allocate the tables used to store offset information at labels. */
632 offsets_known_at = (char *) alloca (num_labels);
633 offsets_at
634 = (int (*)[NUM_ELIMINABLE_REGS])
635 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
636
637 offsets_known_at -= get_first_label_num ();
638 offsets_at -= get_first_label_num ();
639
640 /* Alter each pseudo-reg rtx to contain its hard reg number.
641 Assign stack slots to the pseudos that lack hard regs or equivalents.
642 Do not touch virtual registers. */
643
644 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
645 alter_reg (i, -1);
646
647 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
648 because the stack size may be a part of the offset computation for
649 register elimination. */
650 assign_stack_local (BLKmode, 0, 0);
651
652 /* If we have some registers we think can be eliminated, scan all insns to
653 see if there is an insn that sets one of these registers to something
654 other than itself plus a constant. If so, the register cannot be
655 eliminated. Doing this scan here eliminates an extra pass through the
656 main reload loop in the most common case where register elimination
657 cannot be done. */
658 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
659 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
660 || GET_CODE (insn) == CALL_INSN)
661 note_stores (PATTERN (insn), mark_not_eliminable);
662
663#ifndef REGISTER_CONSTRAINTS
664 /* If all the pseudo regs have hard regs,
665 except for those that are never referenced,
666 we know that no reloads are needed. */
667 /* But that is not true if there are register constraints, since
668 in that case some pseudos might be in the wrong kind of hard reg. */
669
670 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
671 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
672 break;
673
b8093d02 674 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
675 return;
676#endif
677
678 /* Compute the order of preference for hard registers to spill.
679 Store them by decreasing preference in potential_reload_regs. */
680
681 order_regs_for_reload ();
682
683 /* So far, no hard regs have been spilled. */
684 n_spills = 0;
685 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
686 spill_reg_order[i] = -1;
687
688 /* On most machines, we can't use any register explicitly used in the
689 rtl as a spill register. But on some, we have to. Those will have
690 taken care to keep the life of hard regs as short as possible. */
691
692#ifdef SMALL_REGISTER_CLASSES
693 CLEAR_HARD_REG_SET (forbidden_regs);
694#else
695 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
696#endif
697
698 /* Spill any hard regs that we know we can't eliminate. */
699 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
700 if (! ep->can_eliminate)
701 {
702 spill_hard_reg (ep->from, global, dumpfile, 1);
703 regs_ever_live[ep->from] = 1;
704 }
705
706 if (global)
707 for (i = 0; i < N_REG_CLASSES; i++)
708 {
709 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
710 bzero (basic_block_needs[i], n_basic_blocks);
711 }
712
b2f15f94
RK
713 /* From now on, we need to emit any moves without making new pseudos. */
714 reload_in_progress = 1;
715
32131a9c
RK
716 /* This loop scans the entire function each go-round
717 and repeats until one repetition spills no additional hard regs. */
718
d45cf215 719 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
720 to require another pass. Note that getting an additional reload
721 reg does not necessarily imply any pseudo reg was spilled;
722 sometimes we find a reload reg that no pseudo reg was allocated in. */
723 something_changed = 1;
724 /* This flag is set if there are any insns that require reloading. */
725 something_needs_reloads = 0;
726 /* This flag is set if there are any insns that require register
727 eliminations. */
728 something_needs_elimination = 0;
729 while (something_changed)
730 {
731 rtx after_call = 0;
732
733 /* For each class, number of reload regs needed in that class.
734 This is the maximum over all insns of the needs in that class
735 of the individual insn. */
736 int max_needs[N_REG_CLASSES];
737 /* For each class, size of group of consecutive regs
738 that is needed for the reloads of this class. */
739 int group_size[N_REG_CLASSES];
740 /* For each class, max number of consecutive groups needed.
741 (Each group contains group_size[CLASS] consecutive registers.) */
742 int max_groups[N_REG_CLASSES];
743 /* For each class, max number needed of regs that don't belong
744 to any of the groups. */
745 int max_nongroups[N_REG_CLASSES];
746 /* For each class, the machine mode which requires consecutive
747 groups of regs of that class.
748 If two different modes ever require groups of one class,
749 they must be the same size and equally restrictive for that class,
750 otherwise we can't handle the complexity. */
751 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
752 /* Record the insn where each maximum need is first found. */
753 rtx max_needs_insn[N_REG_CLASSES];
754 rtx max_groups_insn[N_REG_CLASSES];
755 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 756 rtx x;
0dadecf6 757 int starting_frame_size = get_frame_size ();
e404a39a 758 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
759
760 something_changed = 0;
761 bzero (max_needs, sizeof max_needs);
762 bzero (max_groups, sizeof max_groups);
763 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
764 bzero (max_needs_insn, sizeof max_needs_insn);
765 bzero (max_groups_insn, sizeof max_groups_insn);
766 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
767 bzero (group_size, sizeof group_size);
768 for (i = 0; i < N_REG_CLASSES; i++)
769 group_mode[i] = VOIDmode;
770
771 /* Keep track of which basic blocks are needing the reloads. */
772 this_block = 0;
773
774 /* Remember whether any element of basic_block_needs
775 changes from 0 to 1 in this pass. */
776 new_basic_block_needs = 0;
777
778 /* Reset all offsets on eliminable registers to their initial values. */
779#ifdef ELIMINABLE_REGS
780 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
781 {
782 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
783 ep->previous_offset = ep->offset
784 = ep->max_offset = ep->initial_offset;
32131a9c
RK
785 }
786#else
787#ifdef INITIAL_FRAME_POINTER_OFFSET
788 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
789#else
790 if (!FRAME_POINTER_REQUIRED)
791 abort ();
792 reg_eliminate[0].initial_offset = 0;
793#endif
a8efe40d 794 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
795 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
796#endif
797
798 num_not_at_initial_offset = 0;
799
800 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
801
802 /* Set a known offset for each forced label to be at the initial offset
803 of each elimination. We do this because we assume that all
804 computed jumps occur from a location where each elimination is
805 at its initial offset. */
806
807 for (x = forced_labels; x; x = XEXP (x, 1))
808 if (XEXP (x, 0))
fb3821f7 809 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
810
811 /* For each pseudo register that has an equivalent location defined,
812 try to eliminate any eliminable registers (such as the frame pointer)
813 assuming initial offsets for the replacement register, which
814 is the normal case.
815
816 If the resulting location is directly addressable, substitute
817 the MEM we just got directly for the old REG.
818
819 If it is not addressable but is a constant or the sum of a hard reg
820 and constant, it is probably not addressable because the constant is
821 out of range, in that case record the address; we will generate
822 hairy code to compute the address in a register each time it is
6491dbbb
RK
823 needed. Similarly if it is a hard register, but one that is not
824 valid as an address register.
32131a9c
RK
825
826 If the location is not addressable, but does not have one of the
827 above forms, assign a stack slot. We have to do this to avoid the
828 potential of producing lots of reloads if, e.g., a location involves
829 a pseudo that didn't get a hard register and has an equivalent memory
830 location that also involves a pseudo that didn't get a hard register.
831
832 Perhaps at some point we will improve reload_when_needed handling
833 so this problem goes away. But that's very hairy. */
834
835 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
836 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
837 {
fb3821f7 838 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
839
840 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
841 XEXP (x, 0)))
842 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
843 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
844 || (GET_CODE (XEXP (x, 0)) == REG
845 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
846 || (GET_CODE (XEXP (x, 0)) == PLUS
847 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
848 && (REGNO (XEXP (XEXP (x, 0), 0))
849 < FIRST_PSEUDO_REGISTER)
850 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
851 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
852 else
853 {
854 /* Make a new stack slot. Then indicate that something
a8fdc208 855 changed so we go back and recompute offsets for
32131a9c
RK
856 eliminable registers because the allocation of memory
857 below might change some offset. reg_equiv_{mem,address}
858 will be set up for this pseudo on the next pass around
859 the loop. */
860 reg_equiv_memory_loc[i] = 0;
861 reg_equiv_init[i] = 0;
862 alter_reg (i, -1);
863 something_changed = 1;
864 }
865 }
a8fdc208 866
d45cf215 867 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
868 bookkeeping. */
869 if (something_changed)
870 continue;
871
a8efe40d
RK
872 /* If caller-saves needs a group, initialize the group to include
873 the size and mode required for caller-saves. */
874
875 if (caller_save_group_size > 1)
876 {
877 group_mode[(int) caller_save_spill_class] = Pmode;
878 group_size[(int) caller_save_spill_class] = caller_save_group_size;
879 }
880
32131a9c
RK
881 /* Compute the most additional registers needed by any instruction.
882 Collect information separately for each class of regs. */
883
884 for (insn = first; insn; insn = NEXT_INSN (insn))
885 {
886 if (global && this_block + 1 < n_basic_blocks
887 && insn == basic_block_head[this_block+1])
888 ++this_block;
889
890 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
891 might include REG_LABEL), we need to see what effects this
892 has on the known offsets at labels. */
893
894 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
895 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
896 && REG_NOTES (insn) != 0))
897 set_label_offsets (insn, insn, 0);
898
899 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
900 {
901 /* Nonzero means don't use a reload reg that overlaps
902 the place where a function value can be returned. */
903 rtx avoid_return_reg = 0;
904
905 rtx old_body = PATTERN (insn);
906 int old_code = INSN_CODE (insn);
907 rtx old_notes = REG_NOTES (insn);
908 int did_elimination = 0;
546b63fb
RK
909 int max_total_input_groups = 0, max_total_output_groups = 0;
910
911 /* To compute the number of reload registers of each class
912 needed for an insn, we must similate what choose_reload_regs
913 can do. We do this by splitting an insn into an "input" and
914 an "output" part. RELOAD_OTHER reloads are used in both.
915 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
916 which must be live over the entire input section of reloads,
917 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
918 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
919 inputs.
920
921 The registers needed for output are RELOAD_OTHER and
922 RELOAD_FOR_OUTPUT, which are live for the entire output
923 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
924 reloads for each operand.
925
926 The total number of registers needed is the maximum of the
927 inputs and outputs. */
928
929 /* These just count RELOAD_OTHER. */
32131a9c
RK
930 int insn_needs[N_REG_CLASSES];
931 int insn_groups[N_REG_CLASSES];
932 int insn_total_groups = 0;
933
546b63fb 934 /* Count RELOAD_FOR_INPUT reloads. */
32131a9c
RK
935 int insn_needs_for_inputs[N_REG_CLASSES];
936 int insn_groups_for_inputs[N_REG_CLASSES];
937 int insn_total_groups_for_inputs = 0;
938
546b63fb 939 /* Count RELOAD_FOR_OUTPUT reloads. */
32131a9c
RK
940 int insn_needs_for_outputs[N_REG_CLASSES];
941 int insn_groups_for_outputs[N_REG_CLASSES];
942 int insn_total_groups_for_outputs = 0;
943
546b63fb
RK
944 /* Count RELOAD_FOR_INSN reloads. */
945 int insn_needs_for_insn[N_REG_CLASSES];
946 int insn_groups_for_insn[N_REG_CLASSES];
947 int insn_total_groups_for_insn = 0;
948
949 /* Count RELOAD_FOR_OTHER_ADDRESS reloads. */
950 int insn_needs_for_other_addr[N_REG_CLASSES];
951 int insn_groups_for_other_addr[N_REG_CLASSES];
952 int insn_total_groups_for_other_addr = 0;
953
954 /* Count RELOAD_FOR_INPUT_ADDRESS reloads. */
955 int insn_needs_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
956 int insn_groups_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
957 int insn_total_groups_for_in_addr[MAX_RECOG_OPERANDS];
958
959 /* Count RELOAD_FOR_OUTPUT_ADDRESS reloads. */
960 int insn_needs_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
961 int insn_groups_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
962 int insn_total_groups_for_out_addr[MAX_RECOG_OPERANDS];
963
32131a9c 964 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
546b63fb
RK
965 int insn_needs_for_op_addr[N_REG_CLASSES];
966 int insn_groups_for_op_addr[N_REG_CLASSES];
967 int insn_total_groups_for_op_addr = 0;
32131a9c 968
32131a9c
RK
969#if 0 /* This wouldn't work nowadays, since optimize_bit_field
970 looks for non-strict memory addresses. */
971 /* Optimization: a bit-field instruction whose field
972 happens to be a byte or halfword in memory
973 can be changed to a move instruction. */
974
975 if (GET_CODE (PATTERN (insn)) == SET)
976 {
977 rtx dest = SET_DEST (PATTERN (insn));
978 rtx src = SET_SRC (PATTERN (insn));
979
980 if (GET_CODE (dest) == ZERO_EXTRACT
981 || GET_CODE (dest) == SIGN_EXTRACT)
982 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
983 if (GET_CODE (src) == ZERO_EXTRACT
984 || GET_CODE (src) == SIGN_EXTRACT)
985 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
986 }
987#endif
988
989 /* If needed, eliminate any eliminable registers. */
990 if (num_eliminable)
991 did_elimination = eliminate_regs_in_insn (insn, 0);
992
993#ifdef SMALL_REGISTER_CLASSES
994 /* Set avoid_return_reg if this is an insn
995 that might use the value of a function call. */
996 if (GET_CODE (insn) == CALL_INSN)
997 {
998 if (GET_CODE (PATTERN (insn)) == SET)
999 after_call = SET_DEST (PATTERN (insn));
1000 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1001 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1002 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1003 else
1004 after_call = 0;
1005 }
1006 else if (after_call != 0
1007 && !(GET_CODE (PATTERN (insn)) == SET
1008 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1009 {
1010 if (reg_mentioned_p (after_call, PATTERN (insn)))
1011 avoid_return_reg = after_call;
1012 after_call = 0;
1013 }
1014#endif /* SMALL_REGISTER_CLASSES */
1015
1016 /* Analyze the instruction. */
1017 find_reloads (insn, 0, spill_indirect_levels, global,
1018 spill_reg_order);
1019
1020 /* Remember for later shortcuts which insns had any reloads or
1021 register eliminations.
1022
1023 One might think that it would be worthwhile to mark insns
1024 that need register replacements but not reloads, but this is
1025 not safe because find_reloads may do some manipulation of
1026 the insn (such as swapping commutative operands), which would
1027 be lost when we restore the old pattern after register
1028 replacement. So the actions of find_reloads must be redone in
1029 subsequent passes or in reload_as_needed.
1030
1031 However, it is safe to mark insns that need reloads
1032 but not register replacement. */
1033
1034 PUT_MODE (insn, (did_elimination ? QImode
1035 : n_reloads ? HImode
546b63fb 1036 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
1037 : VOIDmode));
1038
1039 /* Discard any register replacements done. */
1040 if (did_elimination)
1041 {
1042 obstack_free (&reload_obstack, reload_firstobj);
1043 PATTERN (insn) = old_body;
1044 INSN_CODE (insn) = old_code;
1045 REG_NOTES (insn) = old_notes;
1046 something_needs_elimination = 1;
1047 }
1048
a8efe40d 1049 /* If this insn has no reloads, we need not do anything except
a8fdc208 1050 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1051 caller-save needs reloads. */
1052
1053 if (n_reloads == 0
1054 && ! (GET_CODE (insn) == CALL_INSN
1055 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1056 continue;
1057
1058 something_needs_reloads = 1;
1059
a8efe40d
RK
1060 for (i = 0; i < N_REG_CLASSES; i++)
1061 {
1062 insn_needs[i] = 0, insn_groups[i] = 0;
1063 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1064 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
546b63fb
RK
1065 insn_needs_for_insn[i] = 0, insn_groups_for_insn[i] = 0;
1066 insn_needs_for_op_addr[i] = 0, insn_groups_for_op_addr[i] = 0;
1067 insn_needs_for_other_addr[i] = 0;
1068 insn_groups_for_other_addr[i] = 0;
a8efe40d
RK
1069 }
1070
546b63fb
RK
1071 for (i = 0; i < reload_n_operands; i++)
1072 {
1073 insn_total_groups_for_in_addr[i] = 0;
1074 insn_total_groups_for_out_addr[i] = 0;
1075
1076 for (j = 0; j < N_REG_CLASSES; j++)
1077 {
1078 insn_needs_for_in_addr[i][j] = 0;
1079 insn_needs_for_out_addr[i][j] = 0;
1080 insn_groups_for_in_addr[i][j] = 0;
1081 insn_groups_for_out_addr[i][j] = 0;
1082 }
1083 }
1084
32131a9c
RK
1085 /* Count each reload once in every class
1086 containing the reload's own class. */
1087
1088 for (i = 0; i < n_reloads; i++)
1089 {
1090 register enum reg_class *p;
e85ddd99 1091 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1092 int size;
1093 enum machine_mode mode;
1094 int *this_groups;
1095 int *this_needs;
1096 int *this_total_groups;
1097
1098 /* Don't count the dummy reloads, for which one of the
1099 regs mentioned in the insn can be used for reloading.
1100 Don't count optional reloads.
1101 Don't count reloads that got combined with others. */
1102 if (reload_reg_rtx[i] != 0
1103 || reload_optional[i] != 0
1104 || (reload_out[i] == 0 && reload_in[i] == 0
1105 && ! reload_secondary_p[i]))
1106 continue;
1107
e85ddd99
RK
1108 /* Show that a reload register of this class is needed
1109 in this basic block. We do not use insn_needs and
1110 insn_groups because they are overly conservative for
1111 this purpose. */
1112 if (global && ! basic_block_needs[(int) class][this_block])
1113 {
1114 basic_block_needs[(int) class][this_block] = 1;
1115 new_basic_block_needs = 1;
1116 }
1117
32131a9c
RK
1118 /* Decide which time-of-use to count this reload for. */
1119 switch (reload_when_needed[i])
1120 {
1121 case RELOAD_OTHER:
32131a9c
RK
1122 this_needs = insn_needs;
1123 this_groups = insn_groups;
1124 this_total_groups = &insn_total_groups;
1125 break;
1126
546b63fb 1127 case RELOAD_FOR_INPUT:
32131a9c
RK
1128 this_needs = insn_needs_for_inputs;
1129 this_groups = insn_groups_for_inputs;
1130 this_total_groups = &insn_total_groups_for_inputs;
1131 break;
1132
546b63fb 1133 case RELOAD_FOR_OUTPUT:
32131a9c
RK
1134 this_needs = insn_needs_for_outputs;
1135 this_groups = insn_groups_for_outputs;
1136 this_total_groups = &insn_total_groups_for_outputs;
1137 break;
1138
546b63fb
RK
1139 case RELOAD_FOR_INSN:
1140 this_needs = insn_needs_for_insn;
1141 this_groups = insn_groups_for_outputs;
1142 this_total_groups = &insn_total_groups_for_insn;
1143 break;
1144
1145 case RELOAD_FOR_OTHER_ADDRESS:
1146 this_needs = insn_needs_for_other_addr;
1147 this_groups = insn_groups_for_other_addr;
1148 this_total_groups = &insn_total_groups_for_other_addr;
1149 break;
1150
1151 case RELOAD_FOR_INPUT_ADDRESS:
1152 this_needs = insn_needs_for_in_addr[reload_opnum[i]];
1153 this_groups = insn_groups_for_in_addr[reload_opnum[i]];
1154 this_total_groups
1155 = &insn_total_groups_for_in_addr[reload_opnum[i]];
1156 break;
1157
1158 case RELOAD_FOR_OUTPUT_ADDRESS:
1159 this_needs = insn_needs_for_out_addr[reload_opnum[i]];
1160 this_groups = insn_groups_for_out_addr[reload_opnum[i]];
1161 this_total_groups
1162 = &insn_total_groups_for_out_addr[reload_opnum[i]];
1163 break;
1164
32131a9c 1165 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
1166 this_needs = insn_needs_for_op_addr;
1167 this_groups = insn_groups_for_op_addr;
1168 this_total_groups = &insn_total_groups_for_op_addr;
32131a9c
RK
1169 break;
1170 }
1171
1172 mode = reload_inmode[i];
1173 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1174 mode = reload_outmode[i];
e85ddd99 1175 size = CLASS_MAX_NREGS (class, mode);
32131a9c
RK
1176 if (size > 1)
1177 {
1178 enum machine_mode other_mode, allocate_mode;
1179
1180 /* Count number of groups needed separately from
1181 number of individual regs needed. */
e85ddd99
RK
1182 this_groups[(int) class]++;
1183 p = reg_class_superclasses[(int) class];
32131a9c
RK
1184 while (*p != LIM_REG_CLASSES)
1185 this_groups[(int) *p++]++;
1186 (*this_total_groups)++;
1187
1188 /* Record size and mode of a group of this class. */
1189 /* If more than one size group is needed,
1190 make all groups the largest needed size. */
e85ddd99 1191 if (group_size[(int) class] < size)
32131a9c 1192 {
e85ddd99 1193 other_mode = group_mode[(int) class];
32131a9c
RK
1194 allocate_mode = mode;
1195
e85ddd99
RK
1196 group_size[(int) class] = size;
1197 group_mode[(int) class] = mode;
32131a9c
RK
1198 }
1199 else
1200 {
1201 other_mode = mode;
e85ddd99 1202 allocate_mode = group_mode[(int) class];
32131a9c
RK
1203 }
1204
1205 /* Crash if two dissimilar machine modes both need
1206 groups of consecutive regs of the same class. */
1207
1208 if (other_mode != VOIDmode
1209 && other_mode != allocate_mode
1210 && ! modes_equiv_for_class_p (allocate_mode,
1211 other_mode,
e85ddd99 1212 class))
32131a9c
RK
1213 abort ();
1214 }
1215 else if (size == 1)
1216 {
e85ddd99
RK
1217 this_needs[(int) class] += 1;
1218 p = reg_class_superclasses[(int) class];
32131a9c
RK
1219 while (*p != LIM_REG_CLASSES)
1220 this_needs[(int) *p++] += 1;
1221 }
1222 else
1223 abort ();
1224 }
1225
1226 /* All reloads have been counted for this insn;
1227 now merge the various times of use.
1228 This sets insn_needs, etc., to the maximum total number
1229 of registers needed at any point in this insn. */
1230
1231 for (i = 0; i < N_REG_CLASSES; i++)
1232 {
546b63fb
RK
1233 int in_max, out_max;
1234
1235 for (in_max = 0, out_max = 0, j = 0;
1236 j < reload_n_operands; j++)
1237 {
1238 in_max = MAX (in_max, insn_needs_for_in_addr[j][i]);
1239 out_max = MAX (out_max, insn_needs_for_out_addr[j][i]);
1240 }
1241
1242 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1243 and operand addresses but not things used to reload them.
1244 Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads don't
1245 conflict with things needed to reload inputs or
1246 outputs. */
1247
1248 in_max = MAX (in_max, insn_needs_for_op_addr[i]);
1249 out_max = MAX (out_max, insn_needs_for_insn[i]);
1250
1251 insn_needs_for_inputs[i]
1252 = MAX (insn_needs_for_inputs[i]
1253 + insn_needs_for_op_addr[i]
1254 + insn_needs_for_insn[i],
1255 in_max + insn_needs_for_inputs[i]);
1256
1257 insn_needs_for_outputs[i] += out_max;
1258 insn_needs[i] += MAX (MAX (insn_needs_for_inputs[i],
1259 insn_needs_for_outputs[i]),
1260 insn_needs_for_other_addr[i]);
1261
1262 for (in_max = 0, out_max = 0, j = 0;
1263 j < reload_n_operands; j++)
1264 {
1265 in_max = MAX (in_max, insn_groups_for_in_addr[j][i]);
1266 out_max = MAX (out_max, insn_groups_for_out_addr[j][i]);
1267 }
1268
1269 in_max = MAX (in_max, insn_groups_for_op_addr[i]);
1270 out_max = MAX (out_max, insn_groups_for_insn[i]);
1271
1272 insn_groups_for_inputs[i]
1273 = MAX (insn_groups_for_inputs[i]
1274 + insn_groups_for_op_addr[i]
1275 + insn_groups_for_insn[i],
1276 in_max + insn_groups_for_inputs[i]);
1277
1278 insn_groups_for_outputs[i] += out_max;
1279 insn_groups[i] += MAX (MAX (insn_groups_for_inputs[i],
1280 insn_groups_for_outputs[i]),
1281 insn_groups_for_other_addr[i]);
1282 }
1283
1284 for (i = 0; i < reload_n_operands; i++)
1285 {
1286 max_total_input_groups
1287 = MAX (max_total_input_groups,
1288 insn_total_groups_for_in_addr[i]);
1289 max_total_output_groups
1290 = MAX (max_total_output_groups,
1291 insn_total_groups_for_out_addr[i]);
32131a9c 1292 }
a8efe40d 1293
546b63fb
RK
1294 max_total_input_groups = MAX (max_total_input_groups,
1295 insn_total_groups_for_op_addr);
1296 max_total_output_groups = MAX (max_total_output_groups,
1297 insn_total_groups_for_insn);
1298
1299 insn_total_groups_for_inputs
1300 = MAX (max_total_input_groups + insn_total_groups_for_op_addr
1301 + insn_total_groups_for_insn,
1302 max_total_input_groups + insn_total_groups_for_inputs);
1303
1304 insn_total_groups_for_outputs += max_total_output_groups;
1305
1306 insn_total_groups += MAX (MAX (insn_total_groups_for_outputs,
1307 insn_total_groups_for_inputs),
1308 insn_total_groups_for_other_addr);
32131a9c 1309
a8efe40d
RK
1310 /* If this is a CALL_INSN and caller-saves will need
1311 a spill register, act as if the spill register is
1312 needed for this insn. However, the spill register
1313 can be used by any reload of this insn, so we only
1314 need do something if no need for that class has
a8fdc208 1315 been recorded.
a8efe40d
RK
1316
1317 The assumption that every CALL_INSN will trigger a
1318 caller-save is highly conservative, however, the number
1319 of cases where caller-saves will need a spill register but
1320 a block containing a CALL_INSN won't need a spill register
1321 of that class should be quite rare.
1322
1323 If a group is needed, the size and mode of the group will
d45cf215 1324 have been set up at the beginning of this loop. */
a8efe40d
RK
1325
1326 if (GET_CODE (insn) == CALL_INSN
1327 && caller_save_spill_class != NO_REGS)
1328 {
1329 int *caller_save_needs
1330 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1331
1332 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1333 {
1334 register enum reg_class *p
1335 = reg_class_superclasses[(int) caller_save_spill_class];
1336
1337 caller_save_needs[(int) caller_save_spill_class]++;
1338
1339 while (*p != LIM_REG_CLASSES)
0aaa6af8 1340 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1341 }
1342
1343 if (caller_save_group_size > 1)
1344 insn_total_groups = MAX (insn_total_groups, 1);
d1c1397e
RS
1345
1346
1347 /* Show that this basic block will need a register of
1348 this class. */
1349
1350 if (global
1351 && ! (basic_block_needs[(int) caller_save_spill_class]
1352 [this_block]))
1353 {
1354 basic_block_needs[(int) caller_save_spill_class]
1355 [this_block] = 1;
1356 new_basic_block_needs = 1;
1357 }
a8efe40d
RK
1358 }
1359
32131a9c
RK
1360#ifdef SMALL_REGISTER_CLASSES
1361 /* If this insn stores the value of a function call,
1362 and that value is in a register that has been spilled,
1363 and if the insn needs a reload in a class
1364 that might use that register as the reload register,
1365 then add add an extra need in that class.
1366 This makes sure we have a register available that does
1367 not overlap the return value. */
1368 if (avoid_return_reg)
1369 {
1370 int regno = REGNO (avoid_return_reg);
1371 int nregs
1372 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1373 int r;
546b63fb
RK
1374 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1375
1376 /* First compute the "basic needs", which counts a
1377 need only in the smallest class in which it
1378 is required. */
1379
1380 bcopy (insn_needs, basic_needs, sizeof basic_needs);
1381 bcopy (insn_groups, basic_groups, sizeof basic_groups);
1382
1383 for (i = 0; i < N_REG_CLASSES; i++)
1384 {
1385 enum reg_class *p;
1386
1387 if (basic_needs[i] >= 0)
1388 for (p = reg_class_superclasses[i];
1389 *p != LIM_REG_CLASSES; p++)
1390 basic_needs[(int) *p] -= basic_needs[i];
1391
1392 if (basic_groups[i] >= 0)
1393 for (p = reg_class_superclasses[i];
1394 *p != LIM_REG_CLASSES; p++)
1395 basic_groups[(int) *p] -= basic_groups[i];
1396 }
1397
1398 /* Now count extra regs if there might be a conflict with
1399 the return value register.
1400
1401 ??? This is not quite correct because we don't properly
1402 handle the case of groups, but if we end up doing
1403 something wrong, it either will end up not mattering or
1404 we will abort elsewhere. */
1405
32131a9c
RK
1406 for (r = regno; r < regno + nregs; r++)
1407 if (spill_reg_order[r] >= 0)
1408 for (i = 0; i < N_REG_CLASSES; i++)
1409 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1410 {
546b63fb
RK
1411 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1412 {
1413 enum reg_class *p;
1414
1415 insn_needs[i]++;
1416 p = reg_class_superclasses[i];
1417 while (*p != LIM_REG_CLASSES)
1418 insn_needs[(int) *p++]++;
1419 }
32131a9c 1420 }
32131a9c
RK
1421 }
1422#endif /* SMALL_REGISTER_CLASSES */
1423
1424 /* For each class, collect maximum need of any insn. */
1425
1426 for (i = 0; i < N_REG_CLASSES; i++)
1427 {
1428 if (max_needs[i] < insn_needs[i])
5352b11a
RS
1429 {
1430 max_needs[i] = insn_needs[i];
1431 max_needs_insn[i] = insn;
1432 }
32131a9c 1433 if (max_groups[i] < insn_groups[i])
5352b11a
RS
1434 {
1435 max_groups[i] = insn_groups[i];
1436 max_groups_insn[i] = insn;
1437 }
32131a9c
RK
1438 if (insn_total_groups > 0)
1439 if (max_nongroups[i] < insn_needs[i])
5352b11a
RS
1440 {
1441 max_nongroups[i] = insn_needs[i];
1442 max_nongroups_insn[i] = insn;
1443 }
32131a9c
RK
1444 }
1445 }
1446 /* Note that there is a continue statement above. */
1447 }
1448
0dadecf6
RK
1449 /* If we allocated any new memory locations, make another pass
1450 since it might have changed elimination offsets. */
1451 if (starting_frame_size != get_frame_size ())
1452 something_changed = 1;
1453
e404a39a
RK
1454 if (dumpfile)
1455 for (i = 0; i < N_REG_CLASSES; i++)
1456 {
1457 if (max_needs[i] > 0)
1458 fprintf (dumpfile,
1459 ";; Need %d reg%s of class %s (for insn %d).\n",
1460 max_needs[i], max_needs[i] == 1 ? "" : "s",
1461 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1462 if (max_nongroups[i] > 0)
1463 fprintf (dumpfile,
1464 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1465 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1466 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1467 if (max_groups[i] > 0)
1468 fprintf (dumpfile,
1469 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1470 max_groups[i], max_groups[i] == 1 ? "" : "s",
1471 mode_name[(int) group_mode[i]],
1472 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1473 }
1474
d445b551 1475 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1476 will need a spill register. */
32131a9c 1477
d445b551 1478 if (caller_save_needed
a8efe40d
RK
1479 && ! setup_save_areas (&something_changed)
1480 && caller_save_spill_class == NO_REGS)
32131a9c 1481 {
a8efe40d
RK
1482 /* The class we will need depends on whether the machine
1483 supports the sum of two registers for an address; see
1484 find_address_reloads for details. */
1485
a8fdc208 1486 caller_save_spill_class
a8efe40d
RK
1487 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1488 caller_save_group_size
1489 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1490 something_changed = 1;
32131a9c
RK
1491 }
1492
5c23c401
RK
1493 /* See if anything that happened changes which eliminations are valid.
1494 For example, on the Sparc, whether or not the frame pointer can
1495 be eliminated can depend on what registers have been used. We need
1496 not check some conditions again (such as flag_omit_frame_pointer)
1497 since they can't have changed. */
1498
1499 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3ec2ea3e 1500 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5c23c401
RK
1501#ifdef ELIMINABLE_REGS
1502 || ! CAN_ELIMINATE (ep->from, ep->to)
1503#endif
1504 )
1505 ep->can_eliminate = 0;
1506
32131a9c
RK
1507 /* Look for the case where we have discovered that we can't replace
1508 register A with register B and that means that we will now be
1509 trying to replace register A with register C. This means we can
1510 no longer replace register C with register B and we need to disable
1511 such an elimination, if it exists. This occurs often with A == ap,
1512 B == sp, and C == fp. */
a8fdc208 1513
32131a9c
RK
1514 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1515 {
1516 struct elim_table *op;
1517 register int new_to = -1;
1518
1519 if (! ep->can_eliminate && ep->can_eliminate_previous)
1520 {
1521 /* Find the current elimination for ep->from, if there is a
1522 new one. */
1523 for (op = reg_eliminate;
1524 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1525 if (op->from == ep->from && op->can_eliminate)
1526 {
1527 new_to = op->to;
1528 break;
1529 }
1530
1531 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1532 disable it. */
1533 for (op = reg_eliminate;
1534 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1535 if (op->from == new_to && op->to == ep->to)
1536 op->can_eliminate = 0;
1537 }
1538 }
1539
1540 /* See if any registers that we thought we could eliminate the previous
1541 time are no longer eliminable. If so, something has changed and we
1542 must spill the register. Also, recompute the number of eliminable
1543 registers and see if the frame pointer is needed; it is if there is
1544 no elimination of the frame pointer that we can perform. */
1545
1546 frame_pointer_needed = 1;
1547 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1548 {
3ec2ea3e
DE
1549 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1550 && ep->to != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
1551 frame_pointer_needed = 0;
1552
1553 if (! ep->can_eliminate && ep->can_eliminate_previous)
1554 {
1555 ep->can_eliminate_previous = 0;
1556 spill_hard_reg (ep->from, global, dumpfile, 1);
1557 regs_ever_live[ep->from] = 1;
1558 something_changed = 1;
1559 num_eliminable--;
1560 }
1561 }
1562
1563 /* If all needs are met, we win. */
1564
1565 for (i = 0; i < N_REG_CLASSES; i++)
1566 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1567 break;
1568 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1569 break;
1570
546b63fb
RK
1571 /* Not all needs are met; must spill some hard regs. */
1572
1573 /* Put all registers spilled so far back in potential_reload_regs, but
1574 put them at the front, since we've already spilled most of the
1575 psuedos in them (we might have left some pseudos unspilled if they
1576 were in a block that didn't need any spill registers of a conflicting
1577 class. We used to try to mark off the need for those registers,
1578 but doing so properly is very complex and reallocating them is the
1579 simpler approach. First, "pack" potential_reload_regs by pushing
1580 any nonnegative entries towards the end. That will leave room
1581 for the registers we already spilled.
1582
1583 Also, undo the marking of the spill registers from the last time
1584 around in FORBIDDEN_REGS since we will be probably be allocating
1585 them again below.
1586
1587 ??? It is theoretically possible that we might end up not using one
1588 of our previously-spilled registers in this allocation, even though
1589 they are at the head of the list. It's not clear what to do about
1590 this, but it was no better before, when we marked off the needs met
1591 by the previously-spilled registers. With the current code, globals
1592 can be allocated into these registers, but locals cannot. */
1593
1594 if (n_spills)
1595 {
1596 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1597 if (potential_reload_regs[i] != -1)
1598 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1599
546b63fb
RK
1600 for (i = 0; i < n_spills; i++)
1601 {
1602 potential_reload_regs[i] = spill_regs[i];
1603 spill_reg_order[spill_regs[i]] = -1;
1604 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1605 }
32131a9c 1606
546b63fb
RK
1607 n_spills = 0;
1608 }
32131a9c
RK
1609
1610 /* Now find more reload regs to satisfy the remaining need
1611 Do it by ascending class number, since otherwise a reg
1612 might be spilled for a big class and might fail to count
1613 for a smaller class even though it belongs to that class.
1614
1615 Count spilled regs in `spills', and add entries to
1616 `spill_regs' and `spill_reg_order'.
1617
1618 ??? Note there is a problem here.
1619 When there is a need for a group in a high-numbered class,
1620 and also need for non-group regs that come from a lower class,
1621 the non-group regs are chosen first. If there aren't many regs,
1622 they might leave no room for a group.
1623
1624 This was happening on the 386. To fix it, we added the code
1625 that calls possible_group_p, so that the lower class won't
1626 break up the last possible group.
1627
1628 Really fixing the problem would require changes above
1629 in counting the regs already spilled, and in choose_reload_regs.
1630 It might be hard to avoid introducing bugs there. */
1631
546b63fb
RK
1632 CLEAR_HARD_REG_SET (counted_for_groups);
1633 CLEAR_HARD_REG_SET (counted_for_nongroups);
1634
32131a9c
RK
1635 for (class = 0; class < N_REG_CLASSES; class++)
1636 {
1637 /* First get the groups of registers.
1638 If we got single registers first, we might fragment
1639 possible groups. */
1640 while (max_groups[class] > 0)
1641 {
1642 /* If any single spilled regs happen to form groups,
1643 count them now. Maybe we don't really need
1644 to spill another group. */
1645 count_possible_groups (group_size, group_mode, max_groups);
1646
93193ab5
RK
1647 if (max_groups[class] <= 0)
1648 break;
1649
32131a9c
RK
1650 /* Groups of size 2 (the only groups used on most machines)
1651 are treated specially. */
1652 if (group_size[class] == 2)
1653 {
1654 /* First, look for a register that will complete a group. */
1655 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1656 {
32131a9c 1657 int other;
546b63fb
RK
1658
1659 j = potential_reload_regs[i];
32131a9c
RK
1660 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1661 &&
1662 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1663 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1664 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1665 && HARD_REGNO_MODE_OK (other, group_mode[class])
1666 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1667 other)
1668 /* We don't want one part of another group.
1669 We could get "two groups" that overlap! */
1670 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1671 ||
1672 (j < FIRST_PSEUDO_REGISTER - 1
1673 && (other = j + 1, spill_reg_order[other] >= 0)
1674 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1675 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1676 && HARD_REGNO_MODE_OK (j, group_mode[class])
1677 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1678 other)
1679 && ! TEST_HARD_REG_BIT (counted_for_groups,
1680 other))))
1681 {
1682 register enum reg_class *p;
1683
1684 /* We have found one that will complete a group,
1685 so count off one group as provided. */
1686 max_groups[class]--;
1687 p = reg_class_superclasses[class];
1688 while (*p != LIM_REG_CLASSES)
1689 max_groups[(int) *p++]--;
1690
1691 /* Indicate both these regs are part of a group. */
1692 SET_HARD_REG_BIT (counted_for_groups, j);
1693 SET_HARD_REG_BIT (counted_for_groups, other);
1694 break;
1695 }
1696 }
1697 /* We can't complete a group, so start one. */
1698 if (i == FIRST_PSEUDO_REGISTER)
1699 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1700 {
57697575 1701 int k;
546b63fb 1702 j = potential_reload_regs[i];
57697575
RS
1703 /* Verify that J+1 is a potential reload reg. */
1704 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1705 if (potential_reload_regs[k] == j + 1)
1706 break;
32131a9c 1707 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1708 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1709 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1710 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1711 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1712 && HARD_REGNO_MODE_OK (j, group_mode[class])
1713 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1714 j + 1)
1715 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1716 break;
1717 }
1718
1719 /* I should be the index in potential_reload_regs
1720 of the new reload reg we have found. */
1721
5352b11a
RS
1722 if (i >= FIRST_PSEUDO_REGISTER)
1723 {
1724 /* There are no groups left to spill. */
1725 spill_failure (max_groups_insn[class]);
1726 failure = 1;
1727 goto failed;
1728 }
1729 else
1730 something_changed
fb3821f7 1731 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1732 global, dumpfile);
32131a9c
RK
1733 }
1734 else
1735 {
1736 /* For groups of more than 2 registers,
1737 look for a sufficient sequence of unspilled registers,
1738 and spill them all at once. */
1739 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1740 {
32131a9c 1741 int k;
546b63fb
RK
1742
1743 j = potential_reload_regs[i];
9d1a4667
RS
1744 if (j >= 0
1745 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1746 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1747 {
1748 /* Check each reg in the sequence. */
1749 for (k = 0; k < group_size[class]; k++)
1750 if (! (spill_reg_order[j + k] < 0
1751 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1752 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1753 break;
1754 /* We got a full sequence, so spill them all. */
1755 if (k == group_size[class])
1756 {
1757 register enum reg_class *p;
1758 for (k = 0; k < group_size[class]; k++)
1759 {
1760 int idx;
1761 SET_HARD_REG_BIT (counted_for_groups, j + k);
1762 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1763 if (potential_reload_regs[idx] == j + k)
1764 break;
9d1a4667
RS
1765 something_changed
1766 |= new_spill_reg (idx, class,
1767 max_needs, NULL_PTR,
1768 global, dumpfile);
32131a9c
RK
1769 }
1770
1771 /* We have found one that will complete a group,
1772 so count off one group as provided. */
1773 max_groups[class]--;
1774 p = reg_class_superclasses[class];
1775 while (*p != LIM_REG_CLASSES)
1776 max_groups[(int) *p++]--;
1777
1778 break;
1779 }
1780 }
1781 }
fa52261e 1782 /* We couldn't find any registers for this reload.
9d1a4667
RS
1783 Avoid going into an infinite loop. */
1784 if (i >= FIRST_PSEUDO_REGISTER)
1785 {
1786 /* There are no groups left. */
1787 spill_failure (max_groups_insn[class]);
1788 failure = 1;
1789 goto failed;
1790 }
32131a9c
RK
1791 }
1792 }
1793
1794 /* Now similarly satisfy all need for single registers. */
1795
1796 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1797 {
9a6cde3a
RS
1798#ifdef SMALL_REGISTER_CLASSES
1799 /* This should be right for all machines, but only the 386
1800 is known to need it, so this conditional plays safe.
1801 ??? For 2.5, try making this unconditional. */
1802 /* If we spilled enough regs, but they weren't counted
1803 against the non-group need, see if we can count them now.
1804 If so, we can avoid some actual spilling. */
1805 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1806 for (i = 0; i < n_spills; i++)
1807 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1808 spill_regs[i])
1809 && !TEST_HARD_REG_BIT (counted_for_groups,
1810 spill_regs[i])
1811 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1812 spill_regs[i])
1813 && max_nongroups[class] > 0)
1814 {
1815 register enum reg_class *p;
1816
1817 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1818 max_nongroups[class]--;
1819 p = reg_class_superclasses[class];
1820 while (*p != LIM_REG_CLASSES)
1821 max_nongroups[(int) *p++]--;
1822 }
1823 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1824 break;
1825#endif
1826
32131a9c
RK
1827 /* Consider the potential reload regs that aren't
1828 yet in use as reload regs, in order of preference.
1829 Find the most preferred one that's in this class. */
1830
1831 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1832 if (potential_reload_regs[i] >= 0
1833 && TEST_HARD_REG_BIT (reg_class_contents[class],
1834 potential_reload_regs[i])
1835 /* If this reg will not be available for groups,
1836 pick one that does not foreclose possible groups.
1837 This is a kludge, and not very general,
1838 but it should be sufficient to make the 386 work,
1839 and the problem should not occur on machines with
1840 more registers. */
1841 && (max_nongroups[class] == 0
1842 || possible_group_p (potential_reload_regs[i], max_groups)))
1843 break;
1844
e404a39a
RK
1845 /* If we couldn't get a register, try to get one even if we
1846 might foreclose possible groups. This may cause problems
1847 later, but that's better than aborting now, since it is
1848 possible that we will, in fact, be able to form the needed
1849 group even with this allocation. */
1850
1851 if (i >= FIRST_PSEUDO_REGISTER
1852 && (asm_noperands (max_needs[class] > 0
1853 ? max_needs_insn[class]
1854 : max_nongroups_insn[class])
1855 < 0))
1856 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1857 if (potential_reload_regs[i] >= 0
1858 && TEST_HARD_REG_BIT (reg_class_contents[class],
1859 potential_reload_regs[i]))
1860 break;
1861
32131a9c
RK
1862 /* I should be the index in potential_reload_regs
1863 of the new reload reg we have found. */
1864
5352b11a
RS
1865 if (i >= FIRST_PSEUDO_REGISTER)
1866 {
1867 /* There are no possible registers left to spill. */
1868 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1869 : max_nongroups_insn[class]);
1870 failure = 1;
1871 goto failed;
1872 }
1873 else
1874 something_changed
1875 |= new_spill_reg (i, class, max_needs, max_nongroups,
1876 global, dumpfile);
32131a9c
RK
1877 }
1878 }
1879 }
1880
1881 /* If global-alloc was run, notify it of any register eliminations we have
1882 done. */
1883 if (global)
1884 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1885 if (ep->can_eliminate)
1886 mark_elimination (ep->from, ep->to);
1887
32131a9c 1888 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1889 around calls. Tell if what mode to use so that we will process
1890 those insns in reload_as_needed if we have to. */
32131a9c
RK
1891
1892 if (caller_save_needed)
a8efe40d
RK
1893 save_call_clobbered_regs (num_eliminable ? QImode
1894 : caller_save_spill_class != NO_REGS ? HImode
1895 : VOIDmode);
32131a9c
RK
1896
1897 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1898 If that insn didn't set the register (i.e., it copied the register to
1899 memory), just delete that insn instead of the equivalencing insn plus
1900 anything now dead. If we call delete_dead_insn on that insn, we may
1901 delete the insn that actually sets the register if the register die
1902 there and that is incorrect. */
1903
1904 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1905 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1906 && GET_CODE (reg_equiv_init[i]) != NOTE)
1907 {
1908 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1909 delete_dead_insn (reg_equiv_init[i]);
1910 else
1911 {
1912 PUT_CODE (reg_equiv_init[i], NOTE);
1913 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1914 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1915 }
1916 }
1917
1918 /* Use the reload registers where necessary
1919 by generating move instructions to move the must-be-register
1920 values into or out of the reload registers. */
1921
a8efe40d
RK
1922 if (something_needs_reloads || something_needs_elimination
1923 || (caller_save_needed && num_eliminable)
1924 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1925 reload_as_needed (first, global);
1926
2a1f8b6b 1927 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1928 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1929 virtue of being in a pseudo, that pseudo will be marked live
1930 and hence the frame pointer will be known to be live via that
1931 pseudo. */
1932
1933 if (! frame_pointer_needed)
1934 for (i = 0; i < n_basic_blocks; i++)
3ec2ea3e
DE
1935 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1936 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1937 % REGSET_ELT_BITS));
2a1f8b6b 1938
5352b11a
RS
1939 /* Come here (with failure set nonzero) if we can't get enough spill regs
1940 and we decide not to abort about it. */
1941 failed:
1942
a3ec87a8
RS
1943 reload_in_progress = 0;
1944
32131a9c
RK
1945 /* Now eliminate all pseudo regs by modifying them into
1946 their equivalent memory references.
1947 The REG-rtx's for the pseudos are modified in place,
1948 so all insns that used to refer to them now refer to memory.
1949
1950 For a reg that has a reg_equiv_address, all those insns
1951 were changed by reloading so that no insns refer to it any longer;
1952 but the DECL_RTL of a variable decl may refer to it,
1953 and if so this causes the debugging info to mention the variable. */
1954
1955 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1956 {
1957 rtx addr = 0;
ab1fd483 1958 int in_struct = 0;
32131a9c 1959 if (reg_equiv_mem[i])
ab1fd483
RS
1960 {
1961 addr = XEXP (reg_equiv_mem[i], 0);
1962 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1963 }
32131a9c
RK
1964 if (reg_equiv_address[i])
1965 addr = reg_equiv_address[i];
1966 if (addr)
1967 {
1968 if (reg_renumber[i] < 0)
1969 {
1970 rtx reg = regno_reg_rtx[i];
1971 XEXP (reg, 0) = addr;
1972 REG_USERVAR_P (reg) = 0;
ab1fd483 1973 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1974 PUT_CODE (reg, MEM);
1975 }
1976 else if (reg_equiv_mem[i])
1977 XEXP (reg_equiv_mem[i], 0) = addr;
1978 }
1979 }
1980
1981#ifdef PRESERVE_DEATH_INFO_REGNO_P
1982 /* Make a pass over all the insns and remove death notes for things that
1983 are no longer registers or no longer die in the insn (e.g., an input
1984 and output pseudo being tied). */
1985
1986 for (insn = first; insn; insn = NEXT_INSN (insn))
1987 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1988 {
1989 rtx note, next;
1990
1991 for (note = REG_NOTES (insn); note; note = next)
1992 {
1993 next = XEXP (note, 1);
1994 if (REG_NOTE_KIND (note) == REG_DEAD
1995 && (GET_CODE (XEXP (note, 0)) != REG
1996 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1997 remove_note (insn, note);
1998 }
1999 }
2000#endif
2001
2002 /* Indicate that we no longer have known memory locations or constants. */
2003 reg_equiv_constant = 0;
2004 reg_equiv_memory_loc = 0;
5352b11a 2005
c8ab4464
RS
2006 if (scratch_list)
2007 free (scratch_list);
c307c237 2008 scratch_list = 0;
c8ab4464
RS
2009 if (scratch_block)
2010 free (scratch_block);
c307c237
RK
2011 scratch_block = 0;
2012
5352b11a 2013 return failure;
32131a9c
RK
2014}
2015\f
2016/* Nonzero if, after spilling reg REGNO for non-groups,
2017 it will still be possible to find a group if we still need one. */
2018
2019static int
2020possible_group_p (regno, max_groups)
2021 int regno;
2022 int *max_groups;
2023{
2024 int i;
2025 int class = (int) NO_REGS;
2026
2027 for (i = 0; i < (int) N_REG_CLASSES; i++)
2028 if (max_groups[i] > 0)
2029 {
2030 class = i;
2031 break;
2032 }
2033
2034 if (class == (int) NO_REGS)
2035 return 1;
2036
2037 /* Consider each pair of consecutive registers. */
2038 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2039 {
2040 /* Ignore pairs that include reg REGNO. */
2041 if (i == regno || i + 1 == regno)
2042 continue;
2043
2044 /* Ignore pairs that are outside the class that needs the group.
2045 ??? Here we fail to handle the case where two different classes
2046 independently need groups. But this never happens with our
2047 current machine descriptions. */
2048 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2049 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2050 continue;
2051
2052 /* A pair of consecutive regs we can still spill does the trick. */
2053 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2054 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2055 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2056 return 1;
2057
2058 /* A pair of one already spilled and one we can spill does it
2059 provided the one already spilled is not otherwise reserved. */
2060 if (spill_reg_order[i] < 0
2061 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2062 && spill_reg_order[i + 1] >= 0
2063 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2064 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2065 return 1;
2066 if (spill_reg_order[i + 1] < 0
2067 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2068 && spill_reg_order[i] >= 0
2069 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2070 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2071 return 1;
2072 }
2073
2074 return 0;
2075}
2076\f
2077/* Count any groups that can be formed from the registers recently spilled.
2078 This is done class by class, in order of ascending class number. */
2079
2080static void
2081count_possible_groups (group_size, group_mode, max_groups)
546b63fb 2082 int *group_size;
32131a9c 2083 enum machine_mode *group_mode;
546b63fb 2084 int *max_groups;
32131a9c
RK
2085{
2086 int i;
2087 /* Now find all consecutive groups of spilled registers
2088 and mark each group off against the need for such groups.
2089 But don't count them against ordinary need, yet. */
2090
2091 for (i = 0; i < N_REG_CLASSES; i++)
2092 if (group_size[i] > 1)
2093 {
93193ab5 2094 HARD_REG_SET new;
32131a9c
RK
2095 int j;
2096
93193ab5
RK
2097 CLEAR_HARD_REG_SET (new);
2098
32131a9c
RK
2099 /* Make a mask of all the regs that are spill regs in class I. */
2100 for (j = 0; j < n_spills; j++)
2101 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2102 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2103 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2104 spill_regs[j]))
93193ab5
RK
2105 SET_HARD_REG_BIT (new, spill_regs[j]);
2106
32131a9c
RK
2107 /* Find each consecutive group of them. */
2108 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
93193ab5
RK
2109 if (TEST_HARD_REG_BIT (new, j)
2110 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
2111 /* Next line in case group-mode for this class
2112 demands an even-odd pair. */
2113 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2114 {
2115 int k;
2116 for (k = 1; k < group_size[i]; k++)
93193ab5 2117 if (! TEST_HARD_REG_BIT (new, j + k))
32131a9c
RK
2118 break;
2119 if (k == group_size[i])
2120 {
2121 /* We found a group. Mark it off against this class's
2122 need for groups, and against each superclass too. */
2123 register enum reg_class *p;
2124 max_groups[i]--;
2125 p = reg_class_superclasses[i];
2126 while (*p != LIM_REG_CLASSES)
2127 max_groups[(int) *p++]--;
a8fdc208 2128 /* Don't count these registers again. */
32131a9c
RK
2129 for (k = 0; k < group_size[i]; k++)
2130 SET_HARD_REG_BIT (counted_for_groups, j + k);
2131 }
fa52261e
RS
2132 /* Skip to the last reg in this group. When j is incremented
2133 above, it will then point to the first reg of the next
2134 possible group. */
2135 j += k - 1;
32131a9c
RK
2136 }
2137 }
2138
2139}
2140\f
2141/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2142 another mode that needs to be reloaded for the same register class CLASS.
2143 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2144 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2145
2146 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2147 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2148 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2149 causes unnecessary failures on machines requiring alignment of register
2150 groups when the two modes are different sizes, because the larger mode has
2151 more strict alignment rules than the smaller mode. */
2152
2153static int
2154modes_equiv_for_class_p (allocate_mode, other_mode, class)
2155 enum machine_mode allocate_mode, other_mode;
2156 enum reg_class class;
2157{
2158 register int regno;
2159 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2160 {
2161 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2162 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2163 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2164 return 0;
2165 }
2166 return 1;
2167}
2168
5352b11a
RS
2169/* Handle the failure to find a register to spill.
2170 INSN should be one of the insns which needed this particular spill reg. */
2171
2172static void
2173spill_failure (insn)
2174 rtx insn;
2175{
2176 if (asm_noperands (PATTERN (insn)) >= 0)
2177 error_for_asm (insn, "`asm' needs too many reloads");
2178 else
2179 abort ();
2180}
2181
32131a9c
RK
2182/* Add a new register to the tables of available spill-registers
2183 (as well as spilling all pseudos allocated to the register).
2184 I is the index of this register in potential_reload_regs.
2185 CLASS is the regclass whose need is being satisfied.
2186 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2187 so that this register can count off against them.
2188 MAX_NONGROUPS is 0 if this register is part of a group.
2189 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2190
2191static int
2192new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2193 int i;
2194 int class;
2195 int *max_needs;
2196 int *max_nongroups;
2197 int global;
2198 FILE *dumpfile;
2199{
2200 register enum reg_class *p;
2201 int val;
2202 int regno = potential_reload_regs[i];
2203
2204 if (i >= FIRST_PSEUDO_REGISTER)
2205 abort (); /* Caller failed to find any register. */
2206
2207 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2208 fatal ("fixed or forbidden register was spilled.\n\
2209This may be due to a compiler bug or to impossible asm statements.");
2210
2211 /* Make reg REGNO an additional reload reg. */
2212
2213 potential_reload_regs[i] = -1;
2214 spill_regs[n_spills] = regno;
2215 spill_reg_order[regno] = n_spills;
2216 if (dumpfile)
2217 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2218
2219 /* Clear off the needs we just satisfied. */
2220
2221 max_needs[class]--;
2222 p = reg_class_superclasses[class];
2223 while (*p != LIM_REG_CLASSES)
2224 max_needs[(int) *p++]--;
2225
2226 if (max_nongroups && max_nongroups[class] > 0)
2227 {
2228 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2229 max_nongroups[class]--;
2230 p = reg_class_superclasses[class];
2231 while (*p != LIM_REG_CLASSES)
2232 max_nongroups[(int) *p++]--;
2233 }
2234
2235 /* Spill every pseudo reg that was allocated to this reg
2236 or to something that overlaps this reg. */
2237
2238 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2239
2240 /* If there are some registers still to eliminate and this register
2241 wasn't ever used before, additional stack space may have to be
2242 allocated to store this register. Thus, we may have changed the offset
2243 between the stack and frame pointers, so mark that something has changed.
2244 (If new pseudos were spilled, thus requiring more space, VAL would have
2245 been set non-zero by the call to spill_hard_reg above since additional
2246 reloads may be needed in that case.
2247
2248 One might think that we need only set VAL to 1 if this is a call-used
2249 register. However, the set of registers that must be saved by the
2250 prologue is not identical to the call-used set. For example, the
2251 register used by the call insn for the return PC is a call-used register,
2252 but must be saved by the prologue. */
2253 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2254 val = 1;
2255
2256 regs_ever_live[spill_regs[n_spills]] = 1;
2257 n_spills++;
2258
2259 return val;
2260}
2261\f
2262/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2263 data that is dead in INSN. */
2264
2265static void
2266delete_dead_insn (insn)
2267 rtx insn;
2268{
2269 rtx prev = prev_real_insn (insn);
2270 rtx prev_dest;
2271
2272 /* If the previous insn sets a register that dies in our insn, delete it
2273 too. */
2274 if (prev && GET_CODE (PATTERN (prev)) == SET
2275 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2276 && reg_mentioned_p (prev_dest, PATTERN (insn))
2277 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2278 delete_dead_insn (prev);
2279
2280 PUT_CODE (insn, NOTE);
2281 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2282 NOTE_SOURCE_FILE (insn) = 0;
2283}
2284
2285/* Modify the home of pseudo-reg I.
2286 The new home is present in reg_renumber[I].
2287
2288 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2289 or it may be -1, meaning there is none or it is not relevant.
2290 This is used so that all pseudos spilled from a given hard reg
2291 can share one stack slot. */
2292
2293static void
2294alter_reg (i, from_reg)
2295 register int i;
2296 int from_reg;
2297{
2298 /* When outputting an inline function, this can happen
2299 for a reg that isn't actually used. */
2300 if (regno_reg_rtx[i] == 0)
2301 return;
2302
2303 /* If the reg got changed to a MEM at rtl-generation time,
2304 ignore it. */
2305 if (GET_CODE (regno_reg_rtx[i]) != REG)
2306 return;
2307
2308 /* Modify the reg-rtx to contain the new hard reg
2309 number or else to contain its pseudo reg number. */
2310 REGNO (regno_reg_rtx[i])
2311 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2312
2313 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2314 allocate a stack slot for it. */
2315
2316 if (reg_renumber[i] < 0
2317 && reg_n_refs[i] > 0
2318 && reg_equiv_constant[i] == 0
2319 && reg_equiv_memory_loc[i] == 0)
2320 {
2321 register rtx x;
2322 int inherent_size = PSEUDO_REGNO_BYTES (i);
2323 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2324 int adjust = 0;
2325
2326 /* Each pseudo reg has an inherent size which comes from its own mode,
2327 and a total size which provides room for paradoxical subregs
2328 which refer to the pseudo reg in wider modes.
2329
2330 We can use a slot already allocated if it provides both
2331 enough inherent space and enough total space.
2332 Otherwise, we allocate a new slot, making sure that it has no less
2333 inherent space, and no less total space, then the previous slot. */
2334 if (from_reg == -1)
2335 {
2336 /* No known place to spill from => no slot to reuse. */
2337 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2338#if BYTES_BIG_ENDIAN
2339 /* Cancel the big-endian correction done in assign_stack_local.
2340 Get the address of the beginning of the slot.
2341 This is so we can do a big-endian correction unconditionally
2342 below. */
2343 adjust = inherent_size - total_size;
2344#endif
2345 }
2346 /* Reuse a stack slot if possible. */
2347 else if (spill_stack_slot[from_reg] != 0
2348 && spill_stack_slot_width[from_reg] >= total_size
2349 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2350 >= inherent_size))
2351 x = spill_stack_slot[from_reg];
2352 /* Allocate a bigger slot. */
2353 else
2354 {
2355 /* Compute maximum size needed, both for inherent size
2356 and for total size. */
2357 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2358 if (spill_stack_slot[from_reg])
2359 {
2360 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2361 > inherent_size)
2362 mode = GET_MODE (spill_stack_slot[from_reg]);
2363 if (spill_stack_slot_width[from_reg] > total_size)
2364 total_size = spill_stack_slot_width[from_reg];
2365 }
2366 /* Make a slot with that size. */
2367 x = assign_stack_local (mode, total_size, -1);
2368#if BYTES_BIG_ENDIAN
2369 /* Cancel the big-endian correction done in assign_stack_local.
2370 Get the address of the beginning of the slot.
2371 This is so we can do a big-endian correction unconditionally
2372 below. */
2373 adjust = GET_MODE_SIZE (mode) - total_size;
2374#endif
2375 spill_stack_slot[from_reg] = x;
2376 spill_stack_slot_width[from_reg] = total_size;
2377 }
2378
2379#if BYTES_BIG_ENDIAN
2380 /* On a big endian machine, the "address" of the slot
2381 is the address of the low part that fits its inherent mode. */
2382 if (inherent_size < total_size)
2383 adjust += (total_size - inherent_size);
2384#endif /* BYTES_BIG_ENDIAN */
2385
2386 /* If we have any adjustment to make, or if the stack slot is the
2387 wrong mode, make a new stack slot. */
2388 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2389 {
2390 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2391 plus_constant (XEXP (x, 0), adjust));
2392 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2393 }
2394
2395 /* Save the stack slot for later. */
2396 reg_equiv_memory_loc[i] = x;
2397 }
2398}
2399
2400/* Mark the slots in regs_ever_live for the hard regs
2401 used by pseudo-reg number REGNO. */
2402
2403void
2404mark_home_live (regno)
2405 int regno;
2406{
2407 register int i, lim;
2408 i = reg_renumber[regno];
2409 if (i < 0)
2410 return;
2411 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2412 while (i < lim)
2413 regs_ever_live[i++] = 1;
2414}
c307c237
RK
2415
2416/* Mark the registers used in SCRATCH as being live. */
2417
2418static void
2419mark_scratch_live (scratch)
2420 rtx scratch;
2421{
2422 register int i;
2423 int regno = REGNO (scratch);
2424 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2425
2426 for (i = regno; i < lim; i++)
2427 regs_ever_live[i] = 1;
2428}
32131a9c
RK
2429\f
2430/* This function handles the tracking of elimination offsets around branches.
2431
2432 X is a piece of RTL being scanned.
2433
2434 INSN is the insn that it came from, if any.
2435
2436 INITIAL_P is non-zero if we are to set the offset to be the initial
2437 offset and zero if we are setting the offset of the label to be the
2438 current offset. */
2439
2440static void
2441set_label_offsets (x, insn, initial_p)
2442 rtx x;
2443 rtx insn;
2444 int initial_p;
2445{
2446 enum rtx_code code = GET_CODE (x);
2447 rtx tem;
2448 int i;
2449 struct elim_table *p;
2450
2451 switch (code)
2452 {
2453 case LABEL_REF:
8be386d9
RS
2454 if (LABEL_REF_NONLOCAL_P (x))
2455 return;
2456
32131a9c
RK
2457 x = XEXP (x, 0);
2458
2459 /* ... fall through ... */
2460
2461 case CODE_LABEL:
2462 /* If we know nothing about this label, set the desired offsets. Note
2463 that this sets the offset at a label to be the offset before a label
2464 if we don't know anything about the label. This is not correct for
2465 the label after a BARRIER, but is the best guess we can make. If
2466 we guessed wrong, we will suppress an elimination that might have
2467 been possible had we been able to guess correctly. */
2468
2469 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2470 {
2471 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2472 offsets_at[CODE_LABEL_NUMBER (x)][i]
2473 = (initial_p ? reg_eliminate[i].initial_offset
2474 : reg_eliminate[i].offset);
2475 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2476 }
2477
2478 /* Otherwise, if this is the definition of a label and it is
d45cf215 2479 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2480 that label. */
2481
2482 else if (x == insn
2483 && (tem = prev_nonnote_insn (insn)) != 0
2484 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2485 {
2486 num_not_at_initial_offset = 0;
2487 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2488 {
2489 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2490 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2491 if (reg_eliminate[i].can_eliminate
2492 && (reg_eliminate[i].offset
2493 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2494 num_not_at_initial_offset++;
2495 }
2496 }
32131a9c
RK
2497
2498 else
2499 /* If neither of the above cases is true, compare each offset
2500 with those previously recorded and suppress any eliminations
2501 where the offsets disagree. */
a8fdc208 2502
32131a9c
RK
2503 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2504 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2505 != (initial_p ? reg_eliminate[i].initial_offset
2506 : reg_eliminate[i].offset))
2507 reg_eliminate[i].can_eliminate = 0;
2508
2509 return;
2510
2511 case JUMP_INSN:
2512 set_label_offsets (PATTERN (insn), insn, initial_p);
2513
2514 /* ... fall through ... */
2515
2516 case INSN:
2517 case CALL_INSN:
2518 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2519 and hence must have all eliminations at their initial offsets. */
2520 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2521 if (REG_NOTE_KIND (tem) == REG_LABEL)
2522 set_label_offsets (XEXP (tem, 0), insn, 1);
2523 return;
2524
2525 case ADDR_VEC:
2526 case ADDR_DIFF_VEC:
2527 /* Each of the labels in the address vector must be at their initial
2528 offsets. We want the first first for ADDR_VEC and the second
2529 field for ADDR_DIFF_VEC. */
2530
2531 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2532 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2533 insn, initial_p);
2534 return;
2535
2536 case SET:
2537 /* We only care about setting PC. If the source is not RETURN,
2538 IF_THEN_ELSE, or a label, disable any eliminations not at
2539 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2540 isn't one of those possibilities. For branches to a label,
2541 call ourselves recursively.
2542
2543 Note that this can disable elimination unnecessarily when we have
2544 a non-local goto since it will look like a non-constant jump to
2545 someplace in the current function. This isn't a significant
2546 problem since such jumps will normally be when all elimination
2547 pairs are back to their initial offsets. */
2548
2549 if (SET_DEST (x) != pc_rtx)
2550 return;
2551
2552 switch (GET_CODE (SET_SRC (x)))
2553 {
2554 case PC:
2555 case RETURN:
2556 return;
2557
2558 case LABEL_REF:
2559 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2560 return;
2561
2562 case IF_THEN_ELSE:
2563 tem = XEXP (SET_SRC (x), 1);
2564 if (GET_CODE (tem) == LABEL_REF)
2565 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2566 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2567 break;
2568
2569 tem = XEXP (SET_SRC (x), 2);
2570 if (GET_CODE (tem) == LABEL_REF)
2571 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2572 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2573 break;
2574 return;
2575 }
2576
2577 /* If we reach here, all eliminations must be at their initial
2578 offset because we are doing a jump to a variable address. */
2579 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2580 if (p->offset != p->initial_offset)
2581 p->can_eliminate = 0;
2582 }
2583}
2584\f
2585/* Used for communication between the next two function to properly share
2586 the vector for an ASM_OPERANDS. */
2587
2588static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2589
a8fdc208 2590/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2591 replacement (such as sp), plus an offset.
2592
2593 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2594 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2595 MEM, we are allowed to replace a sum of a register and the constant zero
2596 with the register, which we cannot do outside a MEM. In addition, we need
2597 to record the fact that a register is referenced outside a MEM.
2598
ff32812a 2599 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2600 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2601 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2602 that the REG is being modified.
2603
ff32812a
RS
2604 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2605 That's used when we eliminate in expressions stored in notes.
2606 This means, do not set ref_outside_mem even if the reference
2607 is outside of MEMs.
2608
32131a9c
RK
2609 If we see a modification to a register we know about, take the
2610 appropriate action (see case SET, below).
2611
2612 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2613 replacements done assuming all offsets are at their initial values. If
2614 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2615 encounter, return the actual location so that find_reloads will do
2616 the proper thing. */
2617
2618rtx
2619eliminate_regs (x, mem_mode, insn)
2620 rtx x;
2621 enum machine_mode mem_mode;
2622 rtx insn;
2623{
2624 enum rtx_code code = GET_CODE (x);
2625 struct elim_table *ep;
2626 int regno;
2627 rtx new;
2628 int i, j;
2629 char *fmt;
2630 int copied = 0;
2631
2632 switch (code)
2633 {
2634 case CONST_INT:
2635 case CONST_DOUBLE:
2636 case CONST:
2637 case SYMBOL_REF:
2638 case CODE_LABEL:
2639 case PC:
2640 case CC0:
2641 case ASM_INPUT:
2642 case ADDR_VEC:
2643 case ADDR_DIFF_VEC:
2644 case RETURN:
2645 return x;
2646
2647 case REG:
2648 regno = REGNO (x);
2649
2650 /* First handle the case where we encounter a bare register that
2651 is eliminable. Replace it with a PLUS. */
2652 if (regno < FIRST_PSEUDO_REGISTER)
2653 {
2654 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2655 ep++)
2656 if (ep->from_rtx == x && ep->can_eliminate)
2657 {
ff32812a
RS
2658 if (! mem_mode
2659 /* Refs inside notes don't count for this purpose. */
fe089a90 2660 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2661 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2662 ep->ref_outside_mem = 1;
2663 return plus_constant (ep->to_rtx, ep->previous_offset);
2664 }
2665
2666 }
2667 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2668 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2669 {
2670 /* In this case, find_reloads would attempt to either use an
2671 incorrect address (if something is not at its initial offset)
2672 or substitute an replaced address into an insn (which loses
2673 if the offset is changed by some later action). So we simply
2674 return the replaced stack slot (assuming it is changed by
2675 elimination) and ignore the fact that this is actually a
2676 reference to the pseudo. Ensure we make a copy of the
2677 address in case it is shared. */
fb3821f7 2678 new = eliminate_regs (reg_equiv_memory_loc[regno],
e5687447 2679 mem_mode, insn);
32131a9c 2680 if (new != reg_equiv_memory_loc[regno])
208dffa5
RS
2681 {
2682 cannot_omit_stores[regno] = 1;
2683 return copy_rtx (new);
2684 }
32131a9c
RK
2685 }
2686 return x;
2687
2688 case PLUS:
2689 /* If this is the sum of an eliminable register and a constant, rework
2690 the sum. */
2691 if (GET_CODE (XEXP (x, 0)) == REG
2692 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2693 && CONSTANT_P (XEXP (x, 1)))
2694 {
2695 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2696 ep++)
2697 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2698 {
e5687447
JW
2699 if (! mem_mode
2700 /* Refs inside notes don't count for this purpose. */
2701 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2702 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2703 ep->ref_outside_mem = 1;
2704
2705 /* The only time we want to replace a PLUS with a REG (this
2706 occurs when the constant operand of the PLUS is the negative
2707 of the offset) is when we are inside a MEM. We won't want
2708 to do so at other times because that would change the
2709 structure of the insn in a way that reload can't handle.
2710 We special-case the commonest situation in
2711 eliminate_regs_in_insn, so just replace a PLUS with a
2712 PLUS here, unless inside a MEM. */
a23b64d5 2713 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2714 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2715 return ep->to_rtx;
2716 else
2717 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2718 plus_constant (XEXP (x, 1),
2719 ep->previous_offset));
2720 }
2721
2722 /* If the register is not eliminable, we are done since the other
2723 operand is a constant. */
2724 return x;
2725 }
2726
2727 /* If this is part of an address, we want to bring any constant to the
2728 outermost PLUS. We will do this by doing register replacement in
2729 our operands and seeing if a constant shows up in one of them.
2730
2731 We assume here this is part of an address (or a "load address" insn)
2732 since an eliminable register is not likely to appear in any other
2733 context.
2734
2735 If we have (plus (eliminable) (reg)), we want to produce
2736 (plus (plus (replacement) (reg) (const))). If this was part of a
2737 normal add insn, (plus (replacement) (reg)) will be pushed as a
2738 reload. This is the desired action. */
2739
2740 {
e5687447
JW
2741 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2742 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2743
2744 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2745 {
2746 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2747 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2748 we must replace the constant here since it may no longer
2749 be in the position of any operand. */
2750 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2751 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2752 && reg_renumber[REGNO (new1)] < 0
2753 && reg_equiv_constant != 0
2754 && reg_equiv_constant[REGNO (new1)] != 0)
2755 new1 = reg_equiv_constant[REGNO (new1)];
2756 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2757 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2758 && reg_renumber[REGNO (new0)] < 0
2759 && reg_equiv_constant[REGNO (new0)] != 0)
2760 new0 = reg_equiv_constant[REGNO (new0)];
2761
2762 new = form_sum (new0, new1);
2763
2764 /* As above, if we are not inside a MEM we do not want to
2765 turn a PLUS into something else. We might try to do so here
2766 for an addition of 0 if we aren't optimizing. */
2767 if (! mem_mode && GET_CODE (new) != PLUS)
2768 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2769 else
2770 return new;
2771 }
2772 }
2773 return x;
2774
2775 case EXPR_LIST:
2776 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2777 if (XEXP (x, 0))
2778 {
e5687447 2779 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2780 if (new != XEXP (x, 0))
2781 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2782 }
2783
2784 /* ... fall through ... */
2785
2786 case INSN_LIST:
2787 /* Now do eliminations in the rest of the chain. If this was
2788 an EXPR_LIST, this might result in allocating more memory than is
2789 strictly needed, but it simplifies the code. */
2790 if (XEXP (x, 1))
2791 {
e5687447 2792 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2793 if (new != XEXP (x, 1))
2794 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2795 }
2796 return x;
2797
2798 case CALL:
2799 case COMPARE:
2800 case MINUS:
2801 case MULT:
2802 case DIV: case UDIV:
2803 case MOD: case UMOD:
2804 case AND: case IOR: case XOR:
2805 case LSHIFT: case ASHIFT: case ROTATE:
2806 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2807 case NE: case EQ:
2808 case GE: case GT: case GEU: case GTU:
2809 case LE: case LT: case LEU: case LTU:
2810 {
e5687447 2811 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2812 rtx new1
e5687447 2813 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2814
2815 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2816 return gen_rtx (code, GET_MODE (x), new0, new1);
2817 }
2818 return x;
2819
2820 case PRE_INC:
2821 case POST_INC:
2822 case PRE_DEC:
2823 case POST_DEC:
2824 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2825 if (ep->to_rtx == XEXP (x, 0))
2826 {
4c05b187
RK
2827 int size = GET_MODE_SIZE (mem_mode);
2828
2829 /* If more bytes than MEM_MODE are pushed, account for them. */
2830#ifdef PUSH_ROUNDING
2831 if (ep->to_rtx == stack_pointer_rtx)
2832 size = PUSH_ROUNDING (size);
2833#endif
32131a9c 2834 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2835 ep->offset += size;
32131a9c 2836 else
4c05b187 2837 ep->offset -= size;
32131a9c
RK
2838 }
2839
2840 /* Fall through to generic unary operation case. */
2841 case USE:
2842 case STRICT_LOW_PART:
2843 case NEG: case NOT:
2844 case SIGN_EXTEND: case ZERO_EXTEND:
2845 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2846 case FLOAT: case FIX:
2847 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2848 case ABS:
2849 case SQRT:
2850 case FFS:
e5687447 2851 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2852 if (new != XEXP (x, 0))
2853 return gen_rtx (code, GET_MODE (x), new);
2854 return x;
2855
2856 case SUBREG:
2857 /* Similar to above processing, but preserve SUBREG_WORD.
2858 Convert (subreg (mem)) to (mem) if not paradoxical.
2859 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2860 pseudo didn't get a hard reg, we must replace this with the
2861 eliminated version of the memory location because push_reloads
2862 may do the replacement in certain circumstances. */
2863 if (GET_CODE (SUBREG_REG (x)) == REG
2864 && (GET_MODE_SIZE (GET_MODE (x))
2865 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2866 && reg_equiv_memory_loc != 0
2867 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2868 {
2869 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
e5687447 2870 mem_mode, insn);
32131a9c
RK
2871
2872 /* If we didn't change anything, we must retain the pseudo. */
2873 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2874 new = XEXP (x, 0);
2875 else
2876 /* Otherwise, ensure NEW isn't shared in case we have to reload
2877 it. */
2878 new = copy_rtx (new);
2879 }
2880 else
e5687447 2881 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
2882
2883 if (new != XEXP (x, 0))
2884 {
2885 if (GET_CODE (new) == MEM
2886 && (GET_MODE_SIZE (GET_MODE (x))
a3b75c07 2887 <= GET_MODE_SIZE (GET_MODE (new)))
e90d3cbb 2888#ifdef LOAD_EXTEND_OP
a3b75c07
RS
2889 /* On these machines we will be reloading what is
2890 inside the SUBREG if it originally was a pseudo and
2891 the inner and outer modes are both a word or
2892 smaller. So leave the SUBREG then. */
2893 && ! (GET_CODE (SUBREG_REG (x)) == REG
2894 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2895 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2896#endif
2897 )
32131a9c
RK
2898 {
2899 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2900 enum machine_mode mode = GET_MODE (x);
2901
2902#if BYTES_BIG_ENDIAN
2903 offset += (MIN (UNITS_PER_WORD,
2904 GET_MODE_SIZE (GET_MODE (new)))
2905 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2906#endif
2907
2908 PUT_MODE (new, mode);
2909 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2910 return new;
2911 }
2912 else
2913 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2914 }
2915
2916 return x;
2917
2918 case CLOBBER:
2919 /* If clobbering a register that is the replacement register for an
d45cf215 2920 elimination we still think can be performed, note that it cannot
32131a9c
RK
2921 be performed. Otherwise, we need not be concerned about it. */
2922 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2923 if (ep->to_rtx == XEXP (x, 0))
2924 ep->can_eliminate = 0;
2925
e5687447 2926 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c
JVA
2927 if (new != XEXP (x, 0))
2928 return gen_rtx (code, GET_MODE (x), new);
32131a9c
RK
2929 return x;
2930
2931 case ASM_OPERANDS:
2932 {
2933 rtx *temp_vec;
2934 /* Properly handle sharing input and constraint vectors. */
2935 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2936 {
2937 /* When we come to a new vector not seen before,
2938 scan all its elements; keep the old vector if none
2939 of them changes; otherwise, make a copy. */
2940 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2941 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2942 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2943 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
e5687447 2944 mem_mode, insn);
32131a9c
RK
2945
2946 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2947 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2948 break;
2949
2950 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2951 new_asm_operands_vec = old_asm_operands_vec;
2952 else
2953 new_asm_operands_vec
2954 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2955 }
2956
2957 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2958 if (new_asm_operands_vec == old_asm_operands_vec)
2959 return x;
2960
2961 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2962 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2963 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2964 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2965 ASM_OPERANDS_SOURCE_FILE (x),
2966 ASM_OPERANDS_SOURCE_LINE (x));
2967 new->volatil = x->volatil;
2968 return new;
2969 }
2970
2971 case SET:
2972 /* Check for setting a register that we know about. */
2973 if (GET_CODE (SET_DEST (x)) == REG)
2974 {
2975 /* See if this is setting the replacement register for an
a8fdc208 2976 elimination.
32131a9c 2977
3ec2ea3e
DE
2978 If DEST is the hard frame pointer, we do nothing because we
2979 assume that all assignments to the frame pointer are for
2980 non-local gotos and are being done at a time when they are valid
2981 and do not disturb anything else. Some machines want to
2982 eliminate a fake argument pointer (or even a fake frame pointer)
2983 with either the real frame or the stack pointer. Assignments to
2984 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
2985
2986 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2987 ep++)
2988 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 2989 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 2990 {
6dc42e49 2991 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2992 this elimination can't be done. */
2993 rtx src = SET_SRC (x);
2994
2995 if (GET_CODE (src) == PLUS
2996 && XEXP (src, 0) == SET_DEST (x)
2997 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2998 ep->offset -= INTVAL (XEXP (src, 1));
2999 else
3000 ep->can_eliminate = 0;
3001 }
3002
3003 /* Now check to see we are assigning to a register that can be
3004 eliminated. If so, it must be as part of a PARALLEL, since we
3005 will not have been called if this is a single SET. So indicate
3006 that we can no longer eliminate this reg. */
3007 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3008 ep++)
3009 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3010 ep->can_eliminate = 0;
3011 }
3012
3013 /* Now avoid the loop below in this common case. */
3014 {
e5687447
JW
3015 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3016 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3017
ff32812a 3018 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3019 write a CLOBBER insn. */
3020 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3021 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3022 && GET_CODE (insn) != INSN_LIST)
32131a9c
RK
3023 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3024
3025 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3026 return gen_rtx (SET, VOIDmode, new0, new1);
3027 }
3028
3029 return x;
3030
3031 case MEM:
3032 /* Our only special processing is to pass the mode of the MEM to our
3033 recursive call and copy the flags. While we are here, handle this
3034 case more efficiently. */
e5687447 3035 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3036 if (new != XEXP (x, 0))
3037 {
3038 new = gen_rtx (MEM, GET_MODE (x), new);
3039 new->volatil = x->volatil;
3040 new->unchanging = x->unchanging;
3041 new->in_struct = x->in_struct;
3042 return new;
3043 }
3044 else
3045 return x;
3046 }
3047
3048 /* Process each of our operands recursively. If any have changed, make a
3049 copy of the rtx. */
3050 fmt = GET_RTX_FORMAT (code);
3051 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3052 {
3053 if (*fmt == 'e')
3054 {
e5687447 3055 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3056 if (new != XEXP (x, i) && ! copied)
3057 {
3058 rtx new_x = rtx_alloc (code);
3059 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3060 + (sizeof (new_x->fld[0])
3061 * GET_RTX_LENGTH (code))));
3062 x = new_x;
3063 copied = 1;
3064 }
3065 XEXP (x, i) = new;
3066 }
3067 else if (*fmt == 'E')
3068 {
3069 int copied_vec = 0;
3070 for (j = 0; j < XVECLEN (x, i); j++)
3071 {
3072 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3073 if (new != XVECEXP (x, i, j) && ! copied_vec)
3074 {
3075 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3076 &XVECEXP (x, i, 0));
3077 if (! copied)
3078 {
3079 rtx new_x = rtx_alloc (code);
3080 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3081 + (sizeof (new_x->fld[0])
3082 * GET_RTX_LENGTH (code))));
3083 x = new_x;
3084 copied = 1;
3085 }
3086 XVEC (x, i) = new_v;
3087 copied_vec = 1;
3088 }
3089 XVECEXP (x, i, j) = new;
3090 }
3091 }
3092 }
3093
3094 return x;
3095}
3096\f
3097/* Scan INSN and eliminate all eliminable registers in it.
3098
3099 If REPLACE is nonzero, do the replacement destructively. Also
3100 delete the insn as dead it if it is setting an eliminable register.
3101
3102 If REPLACE is zero, do all our allocations in reload_obstack.
3103
3104 If no eliminations were done and this insn doesn't require any elimination
3105 processing (these are not identical conditions: it might be updating sp,
3106 but not referencing fp; this needs to be seen during reload_as_needed so
3107 that the offset between fp and sp can be taken into consideration), zero
3108 is returned. Otherwise, 1 is returned. */
3109
3110static int
3111eliminate_regs_in_insn (insn, replace)
3112 rtx insn;
3113 int replace;
3114{
3115 rtx old_body = PATTERN (insn);
3116 rtx new_body;
3117 int val = 0;
3118 struct elim_table *ep;
3119
3120 if (! replace)
3121 push_obstacks (&reload_obstack, &reload_obstack);
3122
3123 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3124 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3125 {
3126 /* Check for setting an eliminable register. */
3127 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3128 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3129 {
3130 /* In this case this insn isn't serving a useful purpose. We
3131 will delete it in reload_as_needed once we know that this
3132 elimination is, in fact, being done.
3133
3134 If REPLACE isn't set, we can't delete this insn, but neededn't
3135 process it since it won't be used unless something changes. */
3136 if (replace)
3137 delete_dead_insn (insn);
3138 val = 1;
3139 goto done;
3140 }
3141
3142 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3143 in the insn is the negative of the offset in FROM. Substitute
3144 (set (reg) (reg to)) for the insn and change its code.
3145
3146 We have to do this here, rather than in eliminate_regs, do that we can
3147 change the insn code. */
3148
3149 if (GET_CODE (SET_SRC (old_body)) == PLUS
3150 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3151 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3152 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3153 ep++)
3154 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
922d9d40 3155 && ep->can_eliminate)
32131a9c 3156 {
922d9d40
RK
3157 /* We must stop at the first elimination that will be used.
3158 If this one would replace the PLUS with a REG, do it
3159 now. Otherwise, quit the loop and let eliminate_regs
3160 do its normal replacement. */
3161 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3162 {
3163 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3164 SET_DEST (old_body), ep->to_rtx);
3165 INSN_CODE (insn) = -1;
3166 val = 1;
3167 goto done;
3168 }
3169
3170 break;
32131a9c
RK
3171 }
3172 }
3173
3174 old_asm_operands_vec = 0;
3175
3176 /* Replace the body of this insn with a substituted form. If we changed
05b4c365 3177 something, return non-zero.
32131a9c
RK
3178
3179 If we are replacing a body that was a (set X (plus Y Z)), try to
3180 re-recognize the insn. We do this in case we had a simple addition
3181 but now can do this as a load-address. This saves an insn in this
3182 common case. */
3183
fb3821f7 3184 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3185 if (new_body != old_body)
3186 {
7c791b13
RK
3187 /* If we aren't replacing things permanently and we changed something,
3188 make another copy to ensure that all the RTL is new. Otherwise
3189 things can go wrong if find_reload swaps commutative operands
3190 and one is inside RTL that has been copied while the other is not. */
3191
4d411872
RS
3192 /* Don't copy an asm_operands because (1) there's no need and (2)
3193 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3194 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3195 new_body = copy_rtx (new_body);
3196
4a5d0fb5 3197 /* If we had a move insn but now we don't, rerecognize it. */
0ba846c7
RS
3198 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3199 && (GET_CODE (new_body) != SET
3200 || GET_CODE (SET_SRC (new_body)) != REG))
3201 /* If this was an add insn before, rerecognize. */
3202 ||
3203 (GET_CODE (old_body) == SET
3204 && GET_CODE (SET_SRC (old_body)) == PLUS))
4a5d0fb5
RS
3205 {
3206 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3207 /* If recognition fails, store the new body anyway.
3208 It's normal to have recognition failures here
3209 due to bizarre memory addresses; reloading will fix them. */
3210 PATTERN (insn) = new_body;
4a5d0fb5 3211 }
0ba846c7 3212 else
32131a9c
RK
3213 PATTERN (insn) = new_body;
3214
32131a9c
RK
3215 val = 1;
3216 }
a8fdc208 3217
32131a9c
RK
3218 /* Loop through all elimination pairs. See if any have changed and
3219 recalculate the number not at initial offset.
3220
a8efe40d
RK
3221 Compute the maximum offset (minimum offset if the stack does not
3222 grow downward) for each elimination pair.
3223
32131a9c
RK
3224 We also detect a cases where register elimination cannot be done,
3225 namely, if a register would be both changed and referenced outside a MEM
3226 in the resulting insn since such an insn is often undefined and, even if
3227 not, we cannot know what meaning will be given to it. Note that it is
3228 valid to have a register used in an address in an insn that changes it
3229 (presumably with a pre- or post-increment or decrement).
3230
3231 If anything changes, return nonzero. */
3232
3233 num_not_at_initial_offset = 0;
3234 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3235 {
3236 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3237 ep->can_eliminate = 0;
3238
3239 ep->ref_outside_mem = 0;
3240
3241 if (ep->previous_offset != ep->offset)
3242 val = 1;
3243
3244 ep->previous_offset = ep->offset;
3245 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3246 num_not_at_initial_offset++;
a8efe40d
RK
3247
3248#ifdef STACK_GROWS_DOWNWARD
3249 ep->max_offset = MAX (ep->max_offset, ep->offset);
3250#else
3251 ep->max_offset = MIN (ep->max_offset, ep->offset);
3252#endif
32131a9c
RK
3253 }
3254
3255 done:
05b4c365
RK
3256 /* If we changed something, perform elmination in REG_NOTES. This is
3257 needed even when REPLACE is zero because a REG_DEAD note might refer
3258 to a register that we eliminate and could cause a different number
3259 of spill registers to be needed in the final reload pass than in
3260 the pre-passes. */
20748cab 3261 if (val && REG_NOTES (insn) != 0)
ff32812a 3262 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3263
32131a9c
RK
3264 if (! replace)
3265 pop_obstacks ();
3266
3267 return val;
3268}
3269
3270/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3271 replacement we currently believe is valid, mark it as not eliminable if X
3272 modifies DEST in any way other than by adding a constant integer to it.
3273
3274 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3275 all assignments to the hard frame pointer are nonlocal gotos and are being
3276 done at a time when they are valid and do not disturb anything else.
32131a9c 3277 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3278 frame or stack pointer. Assignments to the hard frame pointer must not
3279 prevent this elimination.
32131a9c
RK
3280
3281 Called via note_stores from reload before starting its passes to scan
3282 the insns of the function. */
3283
3284static void
3285mark_not_eliminable (dest, x)
3286 rtx dest;
3287 rtx x;
3288{
3289 register int i;
3290
3291 /* A SUBREG of a hard register here is just changing its mode. We should
3292 not see a SUBREG of an eliminable hard register, but check just in
3293 case. */
3294 if (GET_CODE (dest) == SUBREG)
3295 dest = SUBREG_REG (dest);
3296
3ec2ea3e 3297 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3298 return;
3299
3300 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3301 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3302 && (GET_CODE (x) != SET
3303 || GET_CODE (SET_SRC (x)) != PLUS
3304 || XEXP (SET_SRC (x), 0) != dest
3305 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3306 {
3307 reg_eliminate[i].can_eliminate_previous
3308 = reg_eliminate[i].can_eliminate = 0;
3309 num_eliminable--;
3310 }
3311}
3312\f
3313/* Kick all pseudos out of hard register REGNO.
3314 If GLOBAL is nonzero, try to find someplace else to put them.
3315 If DUMPFILE is nonzero, log actions taken on that file.
3316
3317 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3318 because we found we can't eliminate some register. In the case, no pseudos
3319 are allowed to be in the register, even if they are only in a block that
3320 doesn't require spill registers, unlike the case when we are spilling this
3321 hard reg to produce another spill register.
3322
3323 Return nonzero if any pseudos needed to be kicked out. */
3324
3325static int
3326spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3327 register int regno;
3328 int global;
3329 FILE *dumpfile;
3330 int cant_eliminate;
3331{
c307c237 3332 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3333 int something_changed = 0;
3334 register int i;
3335
3336 SET_HARD_REG_BIT (forbidden_regs, regno);
3337
3338 /* Spill every pseudo reg that was allocated to this reg
3339 or to something that overlaps this reg. */
3340
3341 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3342 if (reg_renumber[i] >= 0
3343 && reg_renumber[i] <= regno
a8fdc208 3344 && (reg_renumber[i]
32131a9c
RK
3345 + HARD_REGNO_NREGS (reg_renumber[i],
3346 PSEUDO_REGNO_MODE (i))
3347 > regno))
3348 {
32131a9c
RK
3349 /* If this register belongs solely to a basic block which needed no
3350 spilling of any class that this register is contained in,
3351 leave it be, unless we are spilling this register because
3352 it was a hard register that can't be eliminated. */
3353
3354 if (! cant_eliminate
3355 && basic_block_needs[0]
3356 && reg_basic_block[i] >= 0
3357 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3358 {
3359 enum reg_class *p;
3360
3361 for (p = reg_class_superclasses[(int) class];
3362 *p != LIM_REG_CLASSES; p++)
3363 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3364 break;
a8fdc208 3365
32131a9c
RK
3366 if (*p == LIM_REG_CLASSES)
3367 continue;
3368 }
3369
3370 /* Mark it as no longer having a hard register home. */
3371 reg_renumber[i] = -1;
3372 /* We will need to scan everything again. */
3373 something_changed = 1;
3374 if (global)
3375 retry_global_alloc (i, forbidden_regs);
3376
3377 alter_reg (i, regno);
3378 if (dumpfile)
3379 {
3380 if (reg_renumber[i] == -1)
3381 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3382 else
3383 fprintf (dumpfile, " Register %d now in %d.\n\n",
3384 i, reg_renumber[i]);
3385 }
3386 }
c307c237
RK
3387 for (i = 0; i < scratch_list_length; i++)
3388 {
3389 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3390 {
3391 if (! cant_eliminate && basic_block_needs[0]
3392 && ! basic_block_needs[(int) class][scratch_block[i]])
3393 {
3394 enum reg_class *p;
3395
3396 for (p = reg_class_superclasses[(int) class];
3397 *p != LIM_REG_CLASSES; p++)
3398 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3399 break;
3400
3401 if (*p == LIM_REG_CLASSES)
3402 continue;
3403 }
3404 PUT_CODE (scratch_list[i], SCRATCH);
3405 scratch_list[i] = 0;
3406 something_changed = 1;
3407 continue;
3408 }
3409 }
32131a9c
RK
3410
3411 return something_changed;
3412}
3413\f
3414/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3415
3416static void
3417scan_paradoxical_subregs (x)
3418 register rtx x;
3419{
3420 register int i;
3421 register char *fmt;
3422 register enum rtx_code code = GET_CODE (x);
3423
3424 switch (code)
3425 {
3426 case CONST_INT:
3427 case CONST:
3428 case SYMBOL_REF:
3429 case LABEL_REF:
3430 case CONST_DOUBLE:
3431 case CC0:
3432 case PC:
3433 case REG:
3434 case USE:
3435 case CLOBBER:
3436 return;
3437
3438 case SUBREG:
3439 if (GET_CODE (SUBREG_REG (x)) == REG
3440 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3441 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3442 = GET_MODE_SIZE (GET_MODE (x));
3443 return;
3444 }
3445
3446 fmt = GET_RTX_FORMAT (code);
3447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3448 {
3449 if (fmt[i] == 'e')
3450 scan_paradoxical_subregs (XEXP (x, i));
3451 else if (fmt[i] == 'E')
3452 {
3453 register int j;
3454 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3455 scan_paradoxical_subregs (XVECEXP (x, i, j));
3456 }
3457 }
3458}
3459\f
32131a9c
RK
3460static int
3461hard_reg_use_compare (p1, p2)
3462 struct hard_reg_n_uses *p1, *p2;
3463{
3464 int tem = p1->uses - p2->uses;
3465 if (tem != 0) return tem;
3466 /* If regs are equally good, sort by regno,
3467 so that the results of qsort leave nothing to chance. */
3468 return p1->regno - p2->regno;
3469}
3470
3471/* Choose the order to consider regs for use as reload registers
3472 based on how much trouble would be caused by spilling one.
3473 Store them in order of decreasing preference in potential_reload_regs. */
3474
3475static void
3476order_regs_for_reload ()
3477{
3478 register int i;
3479 register int o = 0;
3480 int large = 0;
3481
3482 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3483
3484 CLEAR_HARD_REG_SET (bad_spill_regs);
3485
3486 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3487 potential_reload_regs[i] = -1;
3488
3489 /* Count number of uses of each hard reg by pseudo regs allocated to it
3490 and then order them by decreasing use. */
3491
3492 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3493 {
3494 hard_reg_n_uses[i].uses = 0;
3495 hard_reg_n_uses[i].regno = i;
3496 }
3497
3498 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3499 {
3500 int regno = reg_renumber[i];
3501 if (regno >= 0)
3502 {
3503 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3504 while (regno < lim)
3505 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3506 }
3507 large += reg_n_refs[i];
3508 }
3509
3510 /* Now fixed registers (which cannot safely be used for reloading)
3511 get a very high use count so they will be considered least desirable.
3512 Registers used explicitly in the rtl code are almost as bad. */
3513
3514 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3515 {
3516 if (fixed_regs[i])
3517 {
3518 hard_reg_n_uses[i].uses += 2 * large + 2;
3519 SET_HARD_REG_BIT (bad_spill_regs, i);
3520 }
3521 else if (regs_explicitly_used[i])
3522 {
3523 hard_reg_n_uses[i].uses += large + 1;
546b63fb 3524#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
3525 /* ??? We are doing this here because of the potential that
3526 bad code may be generated if a register explicitly used in
3527 an insn was used as a spill register for that insn. But
3528 not using these are spill registers may lose on some machine.
3529 We'll have to see how this works out. */
3530 SET_HARD_REG_BIT (bad_spill_regs, i);
546b63fb 3531#endif
32131a9c
RK
3532 }
3533 }
3ec2ea3e
DE
3534 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3535 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
32131a9c
RK
3536
3537#ifdef ELIMINABLE_REGS
3538 /* If registers other than the frame pointer are eliminable, mark them as
3539 poor choices. */
3540 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3541 {
3542 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3543 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3544 }
3545#endif
3546
3547 /* Prefer registers not so far used, for use in temporary loading.
3548 Among them, if REG_ALLOC_ORDER is defined, use that order.
3549 Otherwise, prefer registers not preserved by calls. */
3550
3551#ifdef REG_ALLOC_ORDER
3552 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3553 {
3554 int regno = reg_alloc_order[i];
3555
3556 if (hard_reg_n_uses[regno].uses == 0)
3557 potential_reload_regs[o++] = regno;
3558 }
3559#else
3560 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3561 {
3562 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3563 potential_reload_regs[o++] = i;
3564 }
3565 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3566 {
3567 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3568 potential_reload_regs[o++] = i;
3569 }
3570#endif
3571
3572 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3573 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3574
3575 /* Now add the regs that are already used,
3576 preferring those used less often. The fixed and otherwise forbidden
3577 registers will be at the end of this list. */
3578
3579 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3580 if (hard_reg_n_uses[i].uses != 0)
3581 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3582}
3583\f
3584/* Reload pseudo-registers into hard regs around each insn as needed.
3585 Additional register load insns are output before the insn that needs it
3586 and perhaps store insns after insns that modify the reloaded pseudo reg.
3587
3588 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3589 which registers are already available in reload registers.
32131a9c
RK
3590 We update these for the reloads that we perform,
3591 as the insns are scanned. */
3592
3593static void
3594reload_as_needed (first, live_known)
3595 rtx first;
3596 int live_known;
3597{
3598 register rtx insn;
3599 register int i;
3600 int this_block = 0;
3601 rtx x;
3602 rtx after_call = 0;
3603
3604 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3605 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3606 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3607 reg_has_output_reload = (char *) alloca (max_regno);
3608 for (i = 0; i < n_spills; i++)
3609 {
3610 reg_reloaded_contents[i] = -1;
3611 reg_reloaded_insn[i] = 0;
3612 }
3613
3614 /* Reset all offsets on eliminable registers to their initial values. */
3615#ifdef ELIMINABLE_REGS
3616 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3617 {
3618 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3619 reg_eliminate[i].initial_offset);
32131a9c
RK
3620 reg_eliminate[i].previous_offset
3621 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3622 }
3623#else
3624 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3625 reg_eliminate[0].previous_offset
3626 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3627#endif
3628
3629 num_not_at_initial_offset = 0;
3630
3631 for (insn = first; insn;)
3632 {
3633 register rtx next = NEXT_INSN (insn);
3634
3635 /* Notice when we move to a new basic block. */
aa2c50d6 3636 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3637 && insn == basic_block_head[this_block+1])
3638 ++this_block;
3639
3640 /* If we pass a label, copy the offsets from the label information
3641 into the current offsets of each elimination. */
3642 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3643 {
3644 num_not_at_initial_offset = 0;
3645 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3646 {
3647 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3648 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3649 if (reg_eliminate[i].can_eliminate
3650 && (reg_eliminate[i].offset
3651 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3652 num_not_at_initial_offset++;
3653 }
3654 }
32131a9c
RK
3655
3656 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3657 {
3658 rtx avoid_return_reg = 0;
3659
3660#ifdef SMALL_REGISTER_CLASSES
3661 /* Set avoid_return_reg if this is an insn
3662 that might use the value of a function call. */
3663 if (GET_CODE (insn) == CALL_INSN)
3664 {
3665 if (GET_CODE (PATTERN (insn)) == SET)
3666 after_call = SET_DEST (PATTERN (insn));
3667 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3668 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3669 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3670 else
3671 after_call = 0;
3672 }
3673 else if (after_call != 0
3674 && !(GET_CODE (PATTERN (insn)) == SET
3675 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3676 {
3677 if (reg_mentioned_p (after_call, PATTERN (insn)))
3678 avoid_return_reg = after_call;
3679 after_call = 0;
3680 }
3681#endif /* SMALL_REGISTER_CLASSES */
3682
2758481d
RS
3683 /* If this is a USE and CLOBBER of a MEM, ensure that any
3684 references to eliminable registers have been removed. */
3685
3686 if ((GET_CODE (PATTERN (insn)) == USE
3687 || GET_CODE (PATTERN (insn)) == CLOBBER)
3688 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3689 XEXP (XEXP (PATTERN (insn), 0), 0)
3690 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3691 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3692
32131a9c
RK
3693 /* If we need to do register elimination processing, do so.
3694 This might delete the insn, in which case we are done. */
3695 if (num_eliminable && GET_MODE (insn) == QImode)
3696 {
3697 eliminate_regs_in_insn (insn, 1);
3698 if (GET_CODE (insn) == NOTE)
3699 {
3700 insn = next;
3701 continue;
3702 }
3703 }
3704
3705 if (GET_MODE (insn) == VOIDmode)
3706 n_reloads = 0;
3707 /* First find the pseudo regs that must be reloaded for this insn.
3708 This info is returned in the tables reload_... (see reload.h).
3709 Also modify the body of INSN by substituting RELOAD
3710 rtx's for those pseudo regs. */
3711 else
3712 {
3713 bzero (reg_has_output_reload, max_regno);
3714 CLEAR_HARD_REG_SET (reg_is_output_reload);
3715
3716 find_reloads (insn, 1, spill_indirect_levels, live_known,
3717 spill_reg_order);
3718 }
3719
3720 if (n_reloads > 0)
3721 {
3c3eeea6
RK
3722 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3723 rtx p;
32131a9c
RK
3724 int class;
3725
3726 /* If this block has not had spilling done for a
546b63fb
RK
3727 particular clas and we have any non-optionals that need a
3728 spill reg in that class, abort. */
32131a9c
RK
3729
3730 for (class = 0; class < N_REG_CLASSES; class++)
3731 if (basic_block_needs[class] != 0
3732 && basic_block_needs[class][this_block] == 0)
3733 for (i = 0; i < n_reloads; i++)
546b63fb
RK
3734 if (class == (int) reload_reg_class[i]
3735 && reload_reg_rtx[i] == 0
3736 && ! reload_optional[i]
3737 && (reload_in[i] != 0 || reload_out[i] != 0
3738 || reload_secondary_p[i] != 0))
3739 abort ();
32131a9c
RK
3740
3741 /* Now compute which reload regs to reload them into. Perhaps
3742 reusing reload regs from previous insns, or else output
3743 load insns to reload them. Maybe output store insns too.
3744 Record the choices of reload reg in reload_reg_rtx. */
3745 choose_reload_regs (insn, avoid_return_reg);
3746
546b63fb
RK
3747#ifdef SMALL_REGISTER_CLASSES
3748 /* Merge any reloads that we didn't combine for fear of
3749 increasing the number of spill registers needed but now
3750 discover can be safely merged. */
3751 merge_assigned_reloads (insn);
3752#endif
3753
32131a9c
RK
3754 /* Generate the insns to reload operands into or out of
3755 their reload regs. */
3756 emit_reload_insns (insn);
3757
3758 /* Substitute the chosen reload regs from reload_reg_rtx
3759 into the insn's body (or perhaps into the bodies of other
3760 load and store insn that we just made for reloading
3761 and that we moved the structure into). */
3762 subst_reloads ();
3c3eeea6
RK
3763
3764 /* If this was an ASM, make sure that all the reload insns
3765 we have generated are valid. If not, give an error
3766 and delete them. */
3767
3768 if (asm_noperands (PATTERN (insn)) >= 0)
3769 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3770 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3771 && (recog_memoized (p) < 0
3772 || (insn_extract (p),
3773 ! constrain_operands (INSN_CODE (p), 1))))
3774 {
3775 error_for_asm (insn,
3776 "`asm' operand requires impossible reload");
3777 PUT_CODE (p, NOTE);
3778 NOTE_SOURCE_FILE (p) = 0;
3779 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3780 }
32131a9c
RK
3781 }
3782 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3783 is no longer validly lying around to save a future reload.
3784 Note that this does not detect pseudos that were reloaded
3785 for this insn in order to be stored in
3786 (obeying register constraints). That is correct; such reload
3787 registers ARE still valid. */
3788 note_stores (PATTERN (insn), forget_old_reloads_1);
3789
3790 /* There may have been CLOBBER insns placed after INSN. So scan
3791 between INSN and NEXT and use them to forget old reloads. */
3792 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3793 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3794 note_stores (PATTERN (x), forget_old_reloads_1);
3795
3796#ifdef AUTO_INC_DEC
3797 /* Likewise for regs altered by auto-increment in this insn.
3798 But note that the reg-notes are not changed by reloading:
3799 they still contain the pseudo-regs, not the spill regs. */
3800 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3801 if (REG_NOTE_KIND (x) == REG_INC)
3802 {
3803 /* See if this pseudo reg was reloaded in this insn.
3804 If so, its last-reload info is still valid
3805 because it is based on this insn's reload. */
3806 for (i = 0; i < n_reloads; i++)
3807 if (reload_out[i] == XEXP (x, 0))
3808 break;
3809
08fb99fa 3810 if (i == n_reloads)
9a881562 3811 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
3812 }
3813#endif
3814 }
3815 /* A reload reg's contents are unknown after a label. */
3816 if (GET_CODE (insn) == CODE_LABEL)
3817 for (i = 0; i < n_spills; i++)
3818 {
3819 reg_reloaded_contents[i] = -1;
3820 reg_reloaded_insn[i] = 0;
3821 }
3822
3823 /* Don't assume a reload reg is still good after a call insn
3824 if it is a call-used reg. */
546b63fb 3825 else if (GET_CODE (insn) == CALL_INSN)
32131a9c
RK
3826 for (i = 0; i < n_spills; i++)
3827 if (call_used_regs[spill_regs[i]])
3828 {
3829 reg_reloaded_contents[i] = -1;
3830 reg_reloaded_insn[i] = 0;
3831 }
3832
3833 /* In case registers overlap, allow certain insns to invalidate
3834 particular hard registers. */
3835
3836#ifdef INSN_CLOBBERS_REGNO_P
3837 for (i = 0 ; i < n_spills ; i++)
3838 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3839 {
3840 reg_reloaded_contents[i] = -1;
3841 reg_reloaded_insn[i] = 0;
3842 }
3843#endif
3844
3845 insn = next;
3846
3847#ifdef USE_C_ALLOCA
3848 alloca (0);
3849#endif
3850 }
3851}
3852
3853/* Discard all record of any value reloaded from X,
3854 or reloaded in X from someplace else;
3855 unless X is an output reload reg of the current insn.
3856
3857 X may be a hard reg (the reload reg)
3858 or it may be a pseudo reg that was reloaded from. */
3859
3860static void
9a881562 3861forget_old_reloads_1 (x, ignored)
32131a9c 3862 rtx x;
9a881562 3863 rtx ignored;
32131a9c
RK
3864{
3865 register int regno;
3866 int nr;
0a2e51a9
RS
3867 int offset = 0;
3868
3869 /* note_stores does give us subregs of hard regs. */
3870 while (GET_CODE (x) == SUBREG)
3871 {
3872 offset += SUBREG_WORD (x);
3873 x = SUBREG_REG (x);
3874 }
32131a9c
RK
3875
3876 if (GET_CODE (x) != REG)
3877 return;
3878
0a2e51a9 3879 regno = REGNO (x) + offset;
32131a9c
RK
3880
3881 if (regno >= FIRST_PSEUDO_REGISTER)
3882 nr = 1;
3883 else
3884 {
3885 int i;
3886 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3887 /* Storing into a spilled-reg invalidates its contents.
3888 This can happen if a block-local pseudo is allocated to that reg
3889 and it wasn't spilled because this block's total need is 0.
3890 Then some insn might have an optional reload and use this reg. */
3891 for (i = 0; i < nr; i++)
3892 if (spill_reg_order[regno + i] >= 0
3893 /* But don't do this if the reg actually serves as an output
3894 reload reg in the current instruction. */
3895 && (n_reloads == 0
3896 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3897 {
3898 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3899 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3900 }
3901 }
3902
3903 /* Since value of X has changed,
3904 forget any value previously copied from it. */
3905
3906 while (nr-- > 0)
3907 /* But don't forget a copy if this is the output reload
3908 that establishes the copy's validity. */
3909 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3910 reg_last_reload_reg[regno + nr] = 0;
3911}
3912\f
3913/* For each reload, the mode of the reload register. */
3914static enum machine_mode reload_mode[MAX_RELOADS];
3915
3916/* For each reload, the largest number of registers it will require. */
3917static int reload_nregs[MAX_RELOADS];
3918
3919/* Comparison function for qsort to decide which of two reloads
3920 should be handled first. *P1 and *P2 are the reload numbers. */
3921
3922static int
3923reload_reg_class_lower (p1, p2)
3924 short *p1, *p2;
3925{
3926 register int r1 = *p1, r2 = *p2;
3927 register int t;
a8fdc208 3928
32131a9c
RK
3929 /* Consider required reloads before optional ones. */
3930 t = reload_optional[r1] - reload_optional[r2];
3931 if (t != 0)
3932 return t;
3933
3934 /* Count all solitary classes before non-solitary ones. */
3935 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3936 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3937 if (t != 0)
3938 return t;
3939
3940 /* Aside from solitaires, consider all multi-reg groups first. */
3941 t = reload_nregs[r2] - reload_nregs[r1];
3942 if (t != 0)
3943 return t;
3944
3945 /* Consider reloads in order of increasing reg-class number. */
3946 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3947 if (t != 0)
3948 return t;
3949
3950 /* If reloads are equally urgent, sort by reload number,
3951 so that the results of qsort leave nothing to chance. */
3952 return r1 - r2;
3953}
3954\f
3955/* The following HARD_REG_SETs indicate when each hard register is
3956 used for a reload of various parts of the current insn. */
3957
3958/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3959static HARD_REG_SET reload_reg_used;
546b63fb
RK
3960/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3961static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3962/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3963static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
3964/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
3965static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
3966/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
3967static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
3968/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3969static HARD_REG_SET reload_reg_used_in_op_addr;
546b63fb
RK
3970/* If reg is in use for a RELOAD_FOR_INSN reload. */
3971static HARD_REG_SET reload_reg_used_in_insn;
3972/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
3973static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
3974
3975/* If reg is in use as a reload reg for any sort of reload. */
3976static HARD_REG_SET reload_reg_used_at_all;
3977
be7ae2a4
RK
3978/* If reg is use as an inherited reload. We just mark the first register
3979 in the group. */
3980static HARD_REG_SET reload_reg_used_for_inherit;
3981
546b63fb
RK
3982/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
3983 TYPE. MODE is used to indicate how many consecutive regs are
3984 actually used. */
32131a9c
RK
3985
3986static void
546b63fb 3987mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 3988 int regno;
546b63fb
RK
3989 int opnum;
3990 enum reload_type type;
32131a9c
RK
3991 enum machine_mode mode;
3992{
3993 int nregs = HARD_REGNO_NREGS (regno, mode);
3994 int i;
3995
3996 for (i = regno; i < nregs + regno; i++)
3997 {
546b63fb 3998 switch (type)
32131a9c
RK
3999 {
4000 case RELOAD_OTHER:
4001 SET_HARD_REG_BIT (reload_reg_used, i);
4002 break;
4003
546b63fb
RK
4004 case RELOAD_FOR_INPUT_ADDRESS:
4005 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4006 break;
4007
546b63fb
RK
4008 case RELOAD_FOR_OUTPUT_ADDRESS:
4009 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4010 break;
4011
4012 case RELOAD_FOR_OPERAND_ADDRESS:
4013 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4014 break;
4015
546b63fb
RK
4016 case RELOAD_FOR_OTHER_ADDRESS:
4017 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4018 break;
4019
32131a9c 4020 case RELOAD_FOR_INPUT:
546b63fb 4021 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4022 break;
4023
4024 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4025 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4026 break;
4027
4028 case RELOAD_FOR_INSN:
4029 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4030 break;
4031 }
4032
4033 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4034 }
4035}
4036
be7ae2a4
RK
4037/* Similarly, but show REGNO is no longer in use for a reload. */
4038
4039static void
4040clear_reload_reg_in_use (regno, opnum, type, mode)
4041 int regno;
4042 int opnum;
4043 enum reload_type type;
4044 enum machine_mode mode;
4045{
4046 int nregs = HARD_REGNO_NREGS (regno, mode);
4047 int i;
4048
4049 for (i = regno; i < nregs + regno; i++)
4050 {
4051 switch (type)
4052 {
4053 case RELOAD_OTHER:
4054 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4055 break;
4056
4057 case RELOAD_FOR_INPUT_ADDRESS:
4058 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4059 break;
4060
4061 case RELOAD_FOR_OUTPUT_ADDRESS:
4062 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4063 break;
4064
4065 case RELOAD_FOR_OPERAND_ADDRESS:
4066 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4067 break;
4068
4069 case RELOAD_FOR_OTHER_ADDRESS:
4070 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4071 break;
4072
4073 case RELOAD_FOR_INPUT:
4074 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4075 break;
4076
4077 case RELOAD_FOR_OUTPUT:
4078 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4079 break;
4080
4081 case RELOAD_FOR_INSN:
4082 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4083 break;
4084 }
4085 }
4086}
4087
32131a9c 4088/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4089 specified by OPNUM and TYPE. */
32131a9c
RK
4090
4091static int
546b63fb 4092reload_reg_free_p (regno, opnum, type)
32131a9c 4093 int regno;
546b63fb
RK
4094 int opnum;
4095 enum reload_type type;
32131a9c 4096{
546b63fb
RK
4097 int i;
4098
4099 /* In use for a RELOAD_OTHER means it's not available for anything except
4100 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4101 to be used only for inputs. */
4102
4103 if (type != RELOAD_FOR_OTHER_ADDRESS
4104 && TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4105 return 0;
546b63fb
RK
4106
4107 switch (type)
32131a9c
RK
4108 {
4109 case RELOAD_OTHER:
4110 /* In use for anything means not available for a RELOAD_OTHER. */
4111 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4112
4113 /* The other kinds of use can sometimes share a register. */
4114 case RELOAD_FOR_INPUT:
546b63fb
RK
4115 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4116 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4117 return 0;
4118
4119 /* If it is used for some other input, can't use it. */
4120 for (i = 0; i < reload_n_operands; i++)
4121 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4122 return 0;
4123
4124 /* If it is used in a later operand's address, can't use it. */
4125 for (i = opnum + 1; i < reload_n_operands; i++)
4126 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4127 return 0;
4128
4129 return 1;
4130
4131 case RELOAD_FOR_INPUT_ADDRESS:
4132 /* Can't use a register if it is used for an input address for this
4133 operand or used as an input in an earlier one. */
4134 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4135 return 0;
4136
4137 for (i = 0; i < opnum; i++)
4138 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4139 return 0;
4140
4141 return 1;
4142
4143 case RELOAD_FOR_OUTPUT_ADDRESS:
4144 /* Can't use a register if it is used for an output address for this
4145 operand or used as an output in this or a later operand. */
4146 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4147 return 0;
4148
4149 for (i = opnum; i < reload_n_operands; i++)
4150 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4151 return 0;
4152
4153 return 1;
4154
32131a9c 4155 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4156 for (i = 0; i < reload_n_operands; i++)
4157 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4158 return 0;
4159
4160 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4161 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4162
32131a9c 4163 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4164 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4165 outputs, or an operand address for this or an earlier output. */
4166 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4167 return 0;
4168
4169 for (i = 0; i < reload_n_operands; i++)
4170 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4171 return 0;
4172
4173 for (i = 0; i <= opnum; i++)
4174 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4175 return 0;
4176
4177 return 1;
4178
4179 case RELOAD_FOR_INSN:
4180 for (i = 0; i < reload_n_operands; i++)
4181 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4182 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4183 return 0;
4184
4185 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4186 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4187
4188 case RELOAD_FOR_OTHER_ADDRESS:
4189 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4190 }
4191 abort ();
4192}
4193
4194/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4195 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4196 is not in use for a reload in any prior part of the insn.
4197
4198 We can assume that the reload reg was already tested for availability
4199 at the time it is needed, and we should not check this again,
4200 in case the reg has already been marked in use. */
4201
4202static int
546b63fb 4203reload_reg_free_before_p (regno, opnum, type)
32131a9c 4204 int regno;
546b63fb
RK
4205 int opnum;
4206 enum reload_type type;
32131a9c 4207{
546b63fb
RK
4208 int i;
4209
4210 switch (type)
32131a9c 4211 {
546b63fb
RK
4212 case RELOAD_FOR_OTHER_ADDRESS:
4213 /* These always come first. */
32131a9c
RK
4214 return 1;
4215
546b63fb
RK
4216 case RELOAD_OTHER:
4217 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4218
32131a9c 4219 /* If this use is for part of the insn,
546b63fb
RK
4220 check the reg is not in use for any prior part. It is tempting
4221 to try to do this by falling through from objecs that occur
4222 later in the insn to ones that occur earlier, but that will not
4223 correctly take into account the fact that here we MUST ignore
4224 things that would prevent the register from being allocated in
4225 the first place, since we know that it was allocated. */
4226
4227 case RELOAD_FOR_OUTPUT_ADDRESS:
4228 /* Earlier reloads are for earlier outputs or their addresses,
4229 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4230 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4231 RELOAD_OTHER).. */
4232 for (i = 0; i < opnum; i++)
4233 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4234 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4235 return 0;
4236
4237 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4238 return 0;
546b63fb
RK
4239
4240 for (i = 0; i < reload_n_operands; i++)
4241 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4242 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4243 return 0;
4244
4245 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4246 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4247 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4248
32131a9c 4249 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4250 /* This can't be used in the output address for this operand and
4251 anything that can't be used for it, except that we've already
4252 tested for RELOAD_FOR_INSN objects. */
4253
4254 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
32131a9c 4255 return 0;
546b63fb
RK
4256
4257 for (i = 0; i < opnum; i++)
4258 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4259 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4260 return 0;
4261
4262 for (i = 0; i < reload_n_operands; i++)
4263 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4264 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4265 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4266 return 0;
4267
4268 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4269
32131a9c 4270 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4271 case RELOAD_FOR_INSN:
4272 /* These can't conflict with inputs, or each other, so all we have to
4273 test is input addresses and the addresses of OTHER items. */
4274
4275 for (i = 0; i < reload_n_operands; i++)
4276 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4277 return 0;
4278
4279 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4280
32131a9c 4281 case RELOAD_FOR_INPUT:
546b63fb
RK
4282 /* The only things earlier are the address for this and
4283 earlier inputs, other inputs (which we know we don't conflict
4284 with), and addresses of RELOAD_OTHER objects. */
4285
4286 for (i = 0; i <= opnum; i++)
4287 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4288 return 0;
4289
4290 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4291
4292 case RELOAD_FOR_INPUT_ADDRESS:
4293 /* Similarly, all we have to check is for use in earlier inputs'
4294 addresses. */
4295 for (i = 0; i < opnum; i++)
4296 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4297 return 0;
4298
4299 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4300 }
4301 abort ();
4302}
4303
4304/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4305 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4306 is still available in REGNO at the end of the insn.
4307
4308 We can assume that the reload reg was already tested for availability
4309 at the time it is needed, and we should not check this again,
4310 in case the reg has already been marked in use. */
4311
4312static int
546b63fb 4313reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4314 int regno;
546b63fb
RK
4315 int opnum;
4316 enum reload_type type;
32131a9c 4317{
546b63fb
RK
4318 int i;
4319
4320 switch (type)
32131a9c
RK
4321 {
4322 case RELOAD_OTHER:
4323 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4324 its value must reach the end. */
4325 return 1;
4326
4327 /* If this use is for part of the insn,
546b63fb
RK
4328 its value reaches if no subsequent part uses the same register.
4329 Just like the above function, don't try to do this with lots
4330 of fallthroughs. */
4331
4332 case RELOAD_FOR_OTHER_ADDRESS:
4333 /* Here we check for everything else, since these don't conflict
4334 with anything else and everything comes later. */
4335
4336 for (i = 0; i < reload_n_operands; i++)
4337 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4338 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4339 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4340 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4341 return 0;
4342
4343 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4344 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4345 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4346
4347 case RELOAD_FOR_INPUT_ADDRESS:
4348 /* Similar, except that we check only for this and subsequent inputs
4349 and the address of only subsequent inputs and we do not need
4350 to check for RELOAD_OTHER objects since they are known not to
4351 conflict. */
4352
4353 for (i = opnum; i < reload_n_operands; i++)
4354 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4355 return 0;
4356
4357 for (i = opnum + 1; i < reload_n_operands; i++)
4358 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4359 return 0;
4360
4361 for (i = 0; i < reload_n_operands; i++)
4362 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4363 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4364 return 0;
4365
4366 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4367 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4368
32131a9c 4369 case RELOAD_FOR_INPUT:
546b63fb
RK
4370 /* Similar to input address, except we start at the next operand for
4371 both input and input address and we do not check for
4372 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4373 would conflict. */
4374
4375 for (i = opnum + 1; i < reload_n_operands; i++)
4376 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4377 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4378 return 0;
4379
4380 /* ... fall through ... */
4381
32131a9c 4382 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4383 /* Check outputs and their addresses. */
4384
4385 for (i = 0; i < reload_n_operands; i++)
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4387 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4388 return 0;
4389
4390 return 1;
4391
4392 case RELOAD_FOR_INSN:
4393 /* These conflict with other outputs with with RELOAD_OTHER. So
4394 we need only check for output addresses. */
4395
4396 opnum = -1;
4397
4398 /* ... fall through ... */
4399
32131a9c 4400 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4401 case RELOAD_FOR_OUTPUT_ADDRESS:
4402 /* We already know these can't conflict with a later output. So the
4403 only thing to check are later output addresses. */
4404 for (i = opnum + 1; i < reload_n_operands; i++)
4405 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4406 return 0;
4407
32131a9c
RK
4408 return 1;
4409 }
546b63fb 4410
32131a9c
RK
4411 abort ();
4412}
4413\f
4414/* Vector of reload-numbers showing the order in which the reloads should
4415 be processed. */
4416short reload_order[MAX_RELOADS];
4417
4418/* Indexed by reload number, 1 if incoming value
4419 inherited from previous insns. */
4420char reload_inherited[MAX_RELOADS];
4421
4422/* For an inherited reload, this is the insn the reload was inherited from,
4423 if we know it. Otherwise, this is 0. */
4424rtx reload_inheritance_insn[MAX_RELOADS];
4425
4426/* If non-zero, this is a place to get the value of the reload,
4427 rather than using reload_in. */
4428rtx reload_override_in[MAX_RELOADS];
4429
4430/* For each reload, the index in spill_regs of the spill register used,
4431 or -1 if we did not need one of the spill registers for this reload. */
4432int reload_spill_index[MAX_RELOADS];
4433
4434/* Index of last register assigned as a spill register. We allocate in
4435 a round-robin fashio. */
4436
1d2310f3 4437static int last_spill_reg = 0;
32131a9c
RK
4438
4439/* Find a spill register to use as a reload register for reload R.
4440 LAST_RELOAD is non-zero if this is the last reload for the insn being
4441 processed.
4442
4443 Set reload_reg_rtx[R] to the register allocated.
4444
4445 If NOERROR is nonzero, we return 1 if successful,
4446 or 0 if we couldn't find a spill reg and we didn't change anything. */
4447
4448static int
4449allocate_reload_reg (r, insn, last_reload, noerror)
4450 int r;
4451 rtx insn;
4452 int last_reload;
4453 int noerror;
4454{
4455 int i;
4456 int pass;
4457 int count;
4458 rtx new;
4459 int regno;
4460
4461 /* If we put this reload ahead, thinking it is a group,
4462 then insist on finding a group. Otherwise we can grab a
a8fdc208 4463 reg that some other reload needs.
32131a9c
RK
4464 (That can happen when we have a 68000 DATA_OR_FP_REG
4465 which is a group of data regs or one fp reg.)
4466 We need not be so restrictive if there are no more reloads
4467 for this insn.
4468
4469 ??? Really it would be nicer to have smarter handling
4470 for that kind of reg class, where a problem like this is normal.
4471 Perhaps those classes should be avoided for reloading
4472 by use of more alternatives. */
4473
4474 int force_group = reload_nregs[r] > 1 && ! last_reload;
4475
4476 /* If we want a single register and haven't yet found one,
4477 take any reg in the right class and not in use.
4478 If we want a consecutive group, here is where we look for it.
4479
4480 We use two passes so we can first look for reload regs to
4481 reuse, which are already in use for other reloads in this insn,
4482 and only then use additional registers.
4483 I think that maximizing reuse is needed to make sure we don't
4484 run out of reload regs. Suppose we have three reloads, and
4485 reloads A and B can share regs. These need two regs.
4486 Suppose A and B are given different regs.
4487 That leaves none for C. */
4488 for (pass = 0; pass < 2; pass++)
4489 {
4490 /* I is the index in spill_regs.
4491 We advance it round-robin between insns to use all spill regs
4492 equally, so that inherited reloads have a chance
4493 of leapfrogging each other. */
4494
4495 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4496 {
4497 int class = (int) reload_reg_class[r];
4498
4499 i = (i + 1) % n_spills;
4500
546b63fb
RK
4501 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4502 reload_when_needed[r])
32131a9c
RK
4503 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4504 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
4505 /* Look first for regs to share, then for unshared. But
4506 don't share regs used for inherited reloads; they are
4507 the ones we want to preserve. */
4508 && (pass
4509 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4510 spill_regs[i])
4511 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4512 spill_regs[i]))))
32131a9c
RK
4513 {
4514 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4515 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4516 (on 68000) got us two FP regs. If NR is 1,
4517 we would reject both of them. */
4518 if (force_group)
4519 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4520 /* If we need only one reg, we have already won. */
4521 if (nr == 1)
4522 {
4523 /* But reject a single reg if we demand a group. */
4524 if (force_group)
4525 continue;
4526 break;
4527 }
4528 /* Otherwise check that as many consecutive regs as we need
4529 are available here.
4530 Also, don't use for a group registers that are
4531 needed for nongroups. */
4532 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4533 while (nr > 1)
4534 {
4535 regno = spill_regs[i] + nr - 1;
4536 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4537 && spill_reg_order[regno] >= 0
546b63fb
RK
4538 && reload_reg_free_p (regno, reload_opnum[r],
4539 reload_when_needed[r])
32131a9c
RK
4540 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4541 regno)))
4542 break;
4543 nr--;
4544 }
4545 if (nr == 1)
4546 break;
4547 }
4548 }
4549
4550 /* If we found something on pass 1, omit pass 2. */
4551 if (count < n_spills)
4552 break;
4553 }
4554
4555 /* We should have found a spill register by now. */
4556 if (count == n_spills)
4557 {
4558 if (noerror)
4559 return 0;
139fc12e 4560 goto failure;
32131a9c
RK
4561 }
4562
be7ae2a4
RK
4563 /* I is the index in SPILL_REG_RTX of the reload register we are to
4564 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
4565
4566 new = spill_reg_rtx[i];
4567
4568 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4
RK
4569 spill_reg_rtx[i] = new
4570 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4571
32131a9c
RK
4572 regno = true_regnum (new);
4573
4574 /* Detect when the reload reg can't hold the reload mode.
4575 This used to be one `if', but Sequent compiler can't handle that. */
4576 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4577 {
4578 enum machine_mode test_mode = VOIDmode;
4579 if (reload_in[r])
4580 test_mode = GET_MODE (reload_in[r]);
4581 /* If reload_in[r] has VOIDmode, it means we will load it
4582 in whatever mode the reload reg has: to wit, reload_mode[r].
4583 We have already tested that for validity. */
4584 /* Aside from that, we need to test that the expressions
4585 to reload from or into have modes which are valid for this
4586 reload register. Otherwise the reload insns would be invalid. */
4587 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4588 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4589 if (! (reload_out[r] != 0
4590 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
4591 {
4592 /* The reg is OK. */
4593 last_spill_reg = i;
4594
4595 /* Mark as in use for this insn the reload regs we use
4596 for this. */
4597 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4598 reload_when_needed[r], reload_mode[r]);
4599
4600 reload_reg_rtx[r] = new;
4601 reload_spill_index[r] = i;
4602 return 1;
4603 }
32131a9c
RK
4604 }
4605
4606 /* The reg is not OK. */
4607 if (noerror)
4608 return 0;
4609
139fc12e 4610 failure:
32131a9c
RK
4611 if (asm_noperands (PATTERN (insn)) < 0)
4612 /* It's the compiler's fault. */
4613 abort ();
4614
4615 /* It's the user's fault; the operand's mode and constraint
4616 don't match. Disable this reload so we don't crash in final. */
4617 error_for_asm (insn,
4618 "`asm' operand constraint incompatible with operand size");
4619 reload_in[r] = 0;
4620 reload_out[r] = 0;
4621 reload_reg_rtx[r] = 0;
4622 reload_optional[r] = 1;
4623 reload_secondary_p[r] = 1;
4624
4625 return 1;
4626}
4627\f
4628/* Assign hard reg targets for the pseudo-registers we must reload
4629 into hard regs for this insn.
4630 Also output the instructions to copy them in and out of the hard regs.
4631
4632 For machines with register classes, we are responsible for
4633 finding a reload reg in the proper class. */
4634
4635static void
4636choose_reload_regs (insn, avoid_return_reg)
4637 rtx insn;
32131a9c
RK
4638 rtx avoid_return_reg;
4639{
4640 register int i, j;
4641 int max_group_size = 1;
4642 enum reg_class group_class = NO_REGS;
4643 int inheritance;
4644
4645 rtx save_reload_reg_rtx[MAX_RELOADS];
4646 char save_reload_inherited[MAX_RELOADS];
4647 rtx save_reload_inheritance_insn[MAX_RELOADS];
4648 rtx save_reload_override_in[MAX_RELOADS];
4649 int save_reload_spill_index[MAX_RELOADS];
4650 HARD_REG_SET save_reload_reg_used;
546b63fb
RK
4651 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4652 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4653 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4654 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 4655 HARD_REG_SET save_reload_reg_used_in_op_addr;
546b63fb
RK
4656 HARD_REG_SET save_reload_reg_used_in_insn;
4657 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
4658 HARD_REG_SET save_reload_reg_used_at_all;
4659
4660 bzero (reload_inherited, MAX_RELOADS);
4661 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4662 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4663
4664 CLEAR_HARD_REG_SET (reload_reg_used);
4665 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 4666 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
546b63fb
RK
4667 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4668 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 4669
546b63fb
RK
4670 for (i = 0; i < reload_n_operands; i++)
4671 {
4672 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4673 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4674 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4675 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4676 }
32131a9c
RK
4677
4678#ifdef SMALL_REGISTER_CLASSES
4679 /* Don't bother with avoiding the return reg
4680 if we have no mandatory reload that could use it. */
4681 if (avoid_return_reg)
4682 {
4683 int do_avoid = 0;
4684 int regno = REGNO (avoid_return_reg);
4685 int nregs
4686 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4687 int r;
4688
4689 for (r = regno; r < regno + nregs; r++)
4690 if (spill_reg_order[r] >= 0)
4691 for (j = 0; j < n_reloads; j++)
4692 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4693 && (reload_in[j] != 0 || reload_out[j] != 0
4694 || reload_secondary_p[j])
4695 &&
4696 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4697 do_avoid = 1;
4698 if (!do_avoid)
4699 avoid_return_reg = 0;
4700 }
4701#endif /* SMALL_REGISTER_CLASSES */
4702
4703#if 0 /* Not needed, now that we can always retry without inheritance. */
4704 /* See if we have more mandatory reloads than spill regs.
4705 If so, then we cannot risk optimizations that could prevent
a8fdc208 4706 reloads from sharing one spill register.
32131a9c
RK
4707
4708 Since we will try finding a better register than reload_reg_rtx
4709 unless it is equal to reload_in or reload_out, count such reloads. */
4710
4711 {
4712 int tem = 0;
4713#ifdef SMALL_REGISTER_CLASSES
4714 int tem = (avoid_return_reg != 0);
a8fdc208 4715#endif
32131a9c
RK
4716 for (j = 0; j < n_reloads; j++)
4717 if (! reload_optional[j]
4718 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4719 && (reload_reg_rtx[j] == 0
4720 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4721 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4722 tem++;
4723 if (tem > n_spills)
4724 must_reuse = 1;
4725 }
4726#endif
4727
4728#ifdef SMALL_REGISTER_CLASSES
4729 /* Don't use the subroutine call return reg for a reload
4730 if we are supposed to avoid it. */
4731 if (avoid_return_reg)
4732 {
4733 int regno = REGNO (avoid_return_reg);
4734 int nregs
4735 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4736 int r;
4737
4738 for (r = regno; r < regno + nregs; r++)
4739 if (spill_reg_order[r] >= 0)
4740 SET_HARD_REG_BIT (reload_reg_used, r);
4741 }
4742#endif /* SMALL_REGISTER_CLASSES */
4743
4744 /* In order to be certain of getting the registers we need,
4745 we must sort the reloads into order of increasing register class.
4746 Then our grabbing of reload registers will parallel the process
a8fdc208 4747 that provided the reload registers.
32131a9c
RK
4748
4749 Also note whether any of the reloads wants a consecutive group of regs.
4750 If so, record the maximum size of the group desired and what
4751 register class contains all the groups needed by this insn. */
4752
4753 for (j = 0; j < n_reloads; j++)
4754 {
4755 reload_order[j] = j;
4756 reload_spill_index[j] = -1;
4757
4758 reload_mode[j]
546b63fb
RK
4759 = (reload_inmode[j] == VOIDmode
4760 || (GET_MODE_SIZE (reload_outmode[j])
4761 > GET_MODE_SIZE (reload_inmode[j])))
4762 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
4763
4764 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4765
4766 if (reload_nregs[j] > 1)
4767 {
4768 max_group_size = MAX (reload_nregs[j], max_group_size);
4769 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4770 }
4771
4772 /* If we have already decided to use a certain register,
4773 don't use it in another way. */
4774 if (reload_reg_rtx[j])
546b63fb 4775 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
4776 reload_when_needed[j], reload_mode[j]);
4777 }
4778
4779 if (n_reloads > 1)
4780 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4781
4782 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4783 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4784 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4785 sizeof reload_inheritance_insn);
4786 bcopy (reload_override_in, save_reload_override_in,
4787 sizeof reload_override_in);
4788 bcopy (reload_spill_index, save_reload_spill_index,
4789 sizeof reload_spill_index);
4790 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4791 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
4792 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4793 reload_reg_used_in_op_addr);
546b63fb
RK
4794 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4795 reload_reg_used_in_insn);
4796 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4797 reload_reg_used_in_other_addr);
4798
4799 for (i = 0; i < reload_n_operands; i++)
4800 {
4801 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4802 reload_reg_used_in_output[i]);
4803 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4804 reload_reg_used_in_input[i]);
4805 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4806 reload_reg_used_in_input_addr[i]);
4807 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4808 reload_reg_used_in_output_addr[i]);
4809 }
32131a9c 4810
58b1581b
RS
4811 /* If -O, try first with inheritance, then turning it off.
4812 If not -O, don't do inheritance.
4813 Using inheritance when not optimizing leads to paradoxes
4814 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4815 because one side of the comparison might be inherited. */
32131a9c 4816
58b1581b 4817 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
4818 {
4819 /* Process the reloads in order of preference just found.
4820 Beyond this point, subregs can be found in reload_reg_rtx.
4821
4822 This used to look for an existing reloaded home for all
4823 of the reloads, and only then perform any new reloads.
4824 But that could lose if the reloads were done out of reg-class order
4825 because a later reload with a looser constraint might have an old
4826 home in a register needed by an earlier reload with a tighter constraint.
4827
4828 To solve this, we make two passes over the reloads, in the order
4829 described above. In the first pass we try to inherit a reload
4830 from a previous insn. If there is a later reload that needs a
4831 class that is a proper subset of the class being processed, we must
4832 also allocate a spill register during the first pass.
4833
4834 Then make a second pass over the reloads to allocate any reloads
4835 that haven't been given registers yet. */
4836
be7ae2a4
RK
4837 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4838
32131a9c
RK
4839 for (j = 0; j < n_reloads; j++)
4840 {
4841 register int r = reload_order[j];
4842
4843 /* Ignore reloads that got marked inoperative. */
4844 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4845 continue;
4846
4847 /* If find_reloads chose a to use reload_in or reload_out as a reload
4848 register, we don't need to chose one. Otherwise, try even if it found
4849 one since we might save an insn if we find the value lying around. */
4850 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4851 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4852 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4853 continue;
4854
4855#if 0 /* No longer needed for correct operation.
4856 It might give better code, or might not; worth an experiment? */
4857 /* If this is an optional reload, we can't inherit from earlier insns
4858 until we are sure that any non-optional reloads have been allocated.
4859 The following code takes advantage of the fact that optional reloads
4860 are at the end of reload_order. */
4861 if (reload_optional[r] != 0)
4862 for (i = 0; i < j; i++)
4863 if ((reload_out[reload_order[i]] != 0
4864 || reload_in[reload_order[i]] != 0
4865 || reload_secondary_p[reload_order[i]])
4866 && ! reload_optional[reload_order[i]]
4867 && reload_reg_rtx[reload_order[i]] == 0)
4868 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4869#endif
4870
4871 /* First see if this pseudo is already available as reloaded
4872 for a previous insn. We cannot try to inherit for reloads
4873 that are smaller than the maximum number of registers needed
4874 for groups unless the register we would allocate cannot be used
4875 for the groups.
4876
4877 We could check here to see if this is a secondary reload for
4878 an object that is already in a register of the desired class.
4879 This would avoid the need for the secondary reload register.
4880 But this is complex because we can't easily determine what
4881 objects might want to be loaded via this reload. So let a register
4882 be allocated here. In `emit_reload_insns' we suppress one of the
4883 loads in the case described above. */
4884
4885 if (inheritance)
4886 {
4887 register int regno = -1;
db660765 4888 enum machine_mode mode;
32131a9c
RK
4889
4890 if (reload_in[r] == 0)
4891 ;
4892 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
4893 {
4894 regno = REGNO (reload_in[r]);
4895 mode = GET_MODE (reload_in[r]);
4896 }
32131a9c 4897 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
4898 {
4899 regno = REGNO (reload_in_reg[r]);
4900 mode = GET_MODE (reload_in_reg[r]);
4901 }
32131a9c
RK
4902#if 0
4903 /* This won't work, since REGNO can be a pseudo reg number.
4904 Also, it takes much more hair to keep track of all the things
4905 that can invalidate an inherited reload of part of a pseudoreg. */
4906 else if (GET_CODE (reload_in[r]) == SUBREG
4907 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4908 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4909#endif
4910
4911 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4912 {
4913 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4914
4915 if (reg_reloaded_contents[i] == regno
db660765
TW
4916 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4917 >= GET_MODE_SIZE (mode))
32131a9c
RK
4918 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4919 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4920 spill_regs[i])
4921 && (reload_nregs[r] == max_group_size
4922 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4923 spill_regs[i]))
546b63fb
RK
4924 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
4925 reload_when_needed[r])
32131a9c 4926 && reload_reg_free_before_p (spill_regs[i],
546b63fb 4927 reload_opnum[r],
32131a9c
RK
4928 reload_when_needed[r]))
4929 {
4930 /* If a group is needed, verify that all the subsequent
4931 registers still have their values intact. */
4932 int nr
4933 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4934 int k;
4935
4936 for (k = 1; k < nr; k++)
4937 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4938 != regno)
4939 break;
4940
4941 if (k == nr)
4942 {
c74fa651
RS
4943 int i1;
4944
4945 /* We found a register that contains the
4946 value we need. If this register is the
4947 same as an `earlyclobber' operand of the
4948 current insn, just mark it as a place to
4949 reload from since we can't use it as the
4950 reload register itself. */
4951
4952 for (i1 = 0; i1 < n_earlyclobbers; i1++)
4953 if (reg_overlap_mentioned_for_reload_p
4954 (reg_last_reload_reg[regno],
4955 reload_earlyclobbers[i1]))
4956 break;
4957
8908158d
RS
4958 if (i1 != n_earlyclobbers
4959 /* Don't really use the inherited spill reg
4960 if we need it wider than we've got it. */
4961 || (GET_MODE_SIZE (reload_mode[r])
4962 > GET_MODE_SIZE (mode)))
c74fa651
RS
4963 reload_override_in[r] = reg_last_reload_reg[regno];
4964 else
4965 {
4966 /* We can use this as a reload reg. */
4967 /* Mark the register as in use for this part of
4968 the insn. */
4969 mark_reload_reg_in_use (spill_regs[i],
4970 reload_opnum[r],
4971 reload_when_needed[r],
4972 reload_mode[r]);
4973 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4974 reload_inherited[r] = 1;
4975 reload_inheritance_insn[r]
4976 = reg_reloaded_insn[i];
4977 reload_spill_index[r] = i;
4978 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
4979 spill_regs[i]);
4980 }
32131a9c
RK
4981 }
4982 }
4983 }
4984 }
4985
4986 /* Here's another way to see if the value is already lying around. */
4987 if (inheritance
4988 && reload_in[r] != 0
4989 && ! reload_inherited[r]
4990 && reload_out[r] == 0
4991 && (CONSTANT_P (reload_in[r])
4992 || GET_CODE (reload_in[r]) == PLUS
4993 || GET_CODE (reload_in[r]) == REG
4994 || GET_CODE (reload_in[r]) == MEM)
4995 && (reload_nregs[r] == max_group_size
4996 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4997 {
4998 register rtx equiv
4999 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 5000 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
5001 int regno;
5002
5003 if (equiv != 0)
5004 {
5005 if (GET_CODE (equiv) == REG)
5006 regno = REGNO (equiv);
5007 else if (GET_CODE (equiv) == SUBREG)
5008 {
5009 regno = REGNO (SUBREG_REG (equiv));
5010 if (regno < FIRST_PSEUDO_REGISTER)
5011 regno += SUBREG_WORD (equiv);
5012 }
5013 else
5014 abort ();
5015 }
5016
5017 /* If we found a spill reg, reject it unless it is free
5018 and of the desired class. */
5019 if (equiv != 0
5020 && ((spill_reg_order[regno] >= 0
546b63fb 5021 && ! reload_reg_free_before_p (regno, reload_opnum[r],
32131a9c
RK
5022 reload_when_needed[r]))
5023 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5024 regno)))
5025 equiv = 0;
5026
5027 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5028 equiv = 0;
5029
5030 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5031 equiv = 0;
5032
5033 /* We found a register that contains the value we need.
5034 If this register is the same as an `earlyclobber' operand
5035 of the current insn, just mark it as a place to reload from
5036 since we can't use it as the reload register itself. */
5037
5038 if (equiv != 0)
5039 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5040 if (reg_overlap_mentioned_for_reload_p (equiv,
5041 reload_earlyclobbers[i]))
32131a9c
RK
5042 {
5043 reload_override_in[r] = equiv;
5044 equiv = 0;
5045 break;
5046 }
5047
5048 /* JRV: If the equiv register we have found is explicitly
5049 clobbered in the current insn, mark but don't use, as above. */
5050
5051 if (equiv != 0 && regno_clobbered_p (regno, insn))
5052 {
5053 reload_override_in[r] = equiv;
5054 equiv = 0;
5055 }
5056
5057 /* If we found an equivalent reg, say no code need be generated
5058 to load it, and use it as our reload reg. */
3ec2ea3e 5059 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
5060 {
5061 reload_reg_rtx[r] = equiv;
5062 reload_inherited[r] = 1;
5063 /* If it is a spill reg,
5064 mark the spill reg as in use for this insn. */
5065 i = spill_reg_order[regno];
5066 if (i >= 0)
be7ae2a4
RK
5067 {
5068 mark_reload_reg_in_use (regno, reload_opnum[r],
5069 reload_when_needed[r],
5070 reload_mode[r]);
5071 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno);
5072 }
32131a9c
RK
5073 }
5074 }
5075
5076 /* If we found a register to use already, or if this is an optional
5077 reload, we are done. */
5078 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5079 continue;
5080
5081#if 0 /* No longer needed for correct operation. Might or might not
5082 give better code on the average. Want to experiment? */
5083
5084 /* See if there is a later reload that has a class different from our
5085 class that intersects our class or that requires less register
5086 than our reload. If so, we must allocate a register to this
5087 reload now, since that reload might inherit a previous reload
5088 and take the only available register in our class. Don't do this
5089 for optional reloads since they will force all previous reloads
5090 to be allocated. Also don't do this for reloads that have been
5091 turned off. */
5092
5093 for (i = j + 1; i < n_reloads; i++)
5094 {
5095 int s = reload_order[i];
5096
d45cf215
RS
5097 if ((reload_in[s] == 0 && reload_out[s] == 0
5098 && ! reload_secondary_p[s])
32131a9c
RK
5099 || reload_optional[s])
5100 continue;
5101
5102 if ((reload_reg_class[s] != reload_reg_class[r]
5103 && reg_classes_intersect_p (reload_reg_class[r],
5104 reload_reg_class[s]))
5105 || reload_nregs[s] < reload_nregs[r])
5106 break;
5107 }
5108
5109 if (i == n_reloads)
5110 continue;
5111
5112 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5113#endif
5114 }
5115
5116 /* Now allocate reload registers for anything non-optional that
5117 didn't get one yet. */
5118 for (j = 0; j < n_reloads; j++)
5119 {
5120 register int r = reload_order[j];
5121
5122 /* Ignore reloads that got marked inoperative. */
5123 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5124 continue;
5125
5126 /* Skip reloads that already have a register allocated or are
5127 optional. */
5128 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5129 continue;
5130
5131 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5132 break;
5133 }
5134
5135 /* If that loop got all the way, we have won. */
5136 if (j == n_reloads)
5137 break;
5138
5139 fail:
5140 /* Loop around and try without any inheritance. */
5141 /* First undo everything done by the failed attempt
5142 to allocate with inheritance. */
5143 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5144 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5145 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5146 sizeof reload_inheritance_insn);
5147 bcopy (save_reload_override_in, reload_override_in,
5148 sizeof reload_override_in);
5149 bcopy (save_reload_spill_index, reload_spill_index,
5150 sizeof reload_spill_index);
5151 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5152 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5153 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5154 save_reload_reg_used_in_op_addr);
546b63fb
RK
5155 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5156 save_reload_reg_used_in_insn);
5157 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5158 save_reload_reg_used_in_other_addr);
5159
5160 for (i = 0; i < reload_n_operands; i++)
5161 {
5162 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5163 save_reload_reg_used_in_input[i]);
5164 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5165 save_reload_reg_used_in_output[i]);
5166 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5167 save_reload_reg_used_in_input_addr[i]);
5168 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5169 save_reload_reg_used_in_output_addr[i]);
5170 }
32131a9c
RK
5171 }
5172
5173 /* If we thought we could inherit a reload, because it seemed that
5174 nothing else wanted the same reload register earlier in the insn,
5175 verify that assumption, now that all reloads have been assigned. */
5176
5177 for (j = 0; j < n_reloads; j++)
5178 {
5179 register int r = reload_order[j];
5180
5181 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5182 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
546b63fb 5183 reload_opnum[r],
32131a9c
RK
5184 reload_when_needed[r]))
5185 reload_inherited[r] = 0;
5186
5187 /* If we found a better place to reload from,
5188 validate it in the same fashion, if it is a reload reg. */
5189 if (reload_override_in[r]
5190 && (GET_CODE (reload_override_in[r]) == REG
5191 || GET_CODE (reload_override_in[r]) == SUBREG))
5192 {
5193 int regno = true_regnum (reload_override_in[r]);
5194 if (spill_reg_order[regno] >= 0
546b63fb
RK
5195 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5196 reload_when_needed[r]))
32131a9c
RK
5197 reload_override_in[r] = 0;
5198 }
5199 }
5200
5201 /* Now that reload_override_in is known valid,
5202 actually override reload_in. */
5203 for (j = 0; j < n_reloads; j++)
5204 if (reload_override_in[j])
5205 reload_in[j] = reload_override_in[j];
5206
5207 /* If this reload won't be done because it has been cancelled or is
5208 optional and not inherited, clear reload_reg_rtx so other
5209 routines (such as subst_reloads) don't get confused. */
5210 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
5211 if (reload_reg_rtx[j] != 0
5212 && ((reload_optional[j] && ! reload_inherited[j])
5213 || (reload_in[j] == 0 && reload_out[j] == 0
5214 && ! reload_secondary_p[j])))
5215 {
5216 int regno = true_regnum (reload_reg_rtx[j]);
5217
5218 if (spill_reg_order[regno] >= 0)
5219 clear_reload_reg_in_use (regno, reload_opnum[j],
5220 reload_when_needed[j], reload_mode[j]);
5221 reload_reg_rtx[j] = 0;
5222 }
32131a9c
RK
5223
5224 /* Record which pseudos and which spill regs have output reloads. */
5225 for (j = 0; j < n_reloads; j++)
5226 {
5227 register int r = reload_order[j];
5228
5229 i = reload_spill_index[r];
5230
5231 /* I is nonneg if this reload used one of the spill regs.
5232 If reload_reg_rtx[r] is 0, this is an optional reload
5233 that we opted to ignore. */
5234 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5235 && reload_reg_rtx[r] != 0)
5236 {
5237 register int nregno = REGNO (reload_out[r]);
372e033b
RS
5238 int nr = 1;
5239
5240 if (nregno < FIRST_PSEUDO_REGISTER)
5241 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
5242
5243 while (--nr >= 0)
372e033b
RS
5244 reg_has_output_reload[nregno + nr] = 1;
5245
5246 if (i >= 0)
32131a9c 5247 {
372e033b
RS
5248 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5249 while (--nr >= 0)
32131a9c
RK
5250 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5251 }
5252
5253 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
5254 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5255 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
5256 abort ();
5257 }
5258 }
5259}
5260\f
546b63fb
RK
5261/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5262 reloads of the same item for fear that we might not have enough reload
5263 registers. However, normally they will get the same reload register
5264 and hence actually need not be loaded twice.
5265
5266 Here we check for the most common case of this phenomenon: when we have
5267 a number of reloads for the same object, each of which were allocated
5268 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5269 reload, and is not modified in the insn itself. If we find such,
5270 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5271 This will not increase the number of spill registers needed and will
5272 prevent redundant code. */
5273
5274#ifdef SMALL_REGISTER_CLASSES
5275
5276static void
5277merge_assigned_reloads (insn)
5278 rtx insn;
5279{
5280 int i, j;
5281
5282 /* Scan all the reloads looking for ones that only load values and
5283 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5284 assigned and not modified by INSN. */
5285
5286 for (i = 0; i < n_reloads; i++)
5287 {
5288 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5289 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5290 || reg_set_p (reload_reg_rtx[i], insn))
5291 continue;
5292
5293 /* Look at all other reloads. Ensure that the only use of this
5294 reload_reg_rtx is in a reload that just loads the same value
5295 as we do. Note that any secondary reloads must be of the identical
5296 class since the values, modes, and result registers are the
5297 same, so we need not do anything with any secondary reloads. */
5298
5299 for (j = 0; j < n_reloads; j++)
5300 {
5301 if (i == j || reload_reg_rtx[j] == 0
5302 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5303 reload_reg_rtx[i]))
5304 continue;
5305
5306 /* If the reload regs aren't exactly the same (e.g, different modes)
5307 or if the values are different, we can't merge anything with this
5308 reload register. */
5309
5310 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5311 || reload_out[j] != 0 || reload_in[j] == 0
5312 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5313 break;
5314 }
5315
5316 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5317 we, in fact, found any matching reloads. */
5318
5319 if (j == n_reloads)
5320 {
5321 for (j = 0; j < n_reloads; j++)
5322 if (i != j && reload_reg_rtx[j] != 0
5323 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5324 {
5325 reload_when_needed[i] = RELOAD_OTHER;
5326 reload_in[j] = 0;
5327 transfer_replacements (i, j);
5328 }
5329
5330 /* If this is now RELOAD_OTHER, look for any reloads that load
5331 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5332 if they were for inputs, RELOAD_OTHER for outputs. Note that
5333 this test is equivalent to looking for reloads for this operand
5334 number. */
5335
5336 if (reload_when_needed[i] == RELOAD_OTHER)
5337 for (j = 0; j < n_reloads; j++)
5338 if (reload_in[j] != 0
5339 && reload_when_needed[i] != RELOAD_OTHER
5340 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5341 reload_in[i]))
5342 reload_when_needed[j]
5343 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5344 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5345 }
5346 }
5347}
5348#endif /* SMALL_RELOAD_CLASSES */
5349\f
32131a9c
RK
5350/* Output insns to reload values in and out of the chosen reload regs. */
5351
5352static void
5353emit_reload_insns (insn)
5354 rtx insn;
5355{
5356 register int j;
546b63fb
RK
5357 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5358 rtx other_input_address_reload_insns = 0;
5359 rtx other_input_reload_insns = 0;
5360 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5361 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5362 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5363 rtx operand_reload_insns = 0;
32131a9c 5364 rtx following_insn = NEXT_INSN (insn);
a8efe40d 5365 rtx before_insn = insn;
32131a9c
RK
5366 int special;
5367 /* Values to be put in spill_reg_store are put here first. */
5368 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5369
546b63fb
RK
5370 for (j = 0; j < reload_n_operands; j++)
5371 input_reload_insns[j] = input_address_reload_insns[j]
5372 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5373
d45cf215 5374 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
5375 must go in front of the first USE insn, not in front of INSN. */
5376
5377 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
5378 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
5379 while (GET_CODE (PREV_INSN (before_insn)) == INSN
5380 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
546b63fb
RK
5381 before_insn = PREV_INSN (before_insn);
5382
a34a369b 5383 /* If INSN is followed by any CLOBBER insns made by find_reloads,
546b63fb
RK
5384 put our reloads after them since they may otherwise be
5385 misinterpreted. */
5386
a34a369b
DE
5387 while (GET_CODE (following_insn) == INSN
5388 && GET_MODE (following_insn) == DImode
5389 && GET_CODE (PATTERN (following_insn)) == CLOBBER
5390 && NEXT_INSN (following_insn) != 0)
546b63fb 5391 following_insn = NEXT_INSN (following_insn);
a8efe40d 5392
32131a9c
RK
5393 /* Now output the instructions to copy the data into and out of the
5394 reload registers. Do these in the order that the reloads were reported,
5395 since reloads of base and index registers precede reloads of operands
5396 and the operands may need the base and index registers reloaded. */
5397
5398 for (j = 0; j < n_reloads; j++)
5399 {
5400 register rtx old;
5401 rtx oldequiv_reg = 0;
32131a9c
RK
5402 rtx store_insn = 0;
5403
5404 old = reload_in[j];
5405 if (old != 0 && ! reload_inherited[j]
5406 && ! rtx_equal_p (reload_reg_rtx[j], old)
5407 && reload_reg_rtx[j] != 0)
5408 {
5409 register rtx reloadreg = reload_reg_rtx[j];
5410 rtx oldequiv = 0;
5411 enum machine_mode mode;
546b63fb 5412 rtx *where;
32131a9c
RK
5413
5414 /* Determine the mode to reload in.
5415 This is very tricky because we have three to choose from.
5416 There is the mode the insn operand wants (reload_inmode[J]).
5417 There is the mode of the reload register RELOADREG.
5418 There is the intrinsic mode of the operand, which we could find
5419 by stripping some SUBREGs.
5420 It turns out that RELOADREG's mode is irrelevant:
5421 we can change that arbitrarily.
5422
5423 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5424 then the reload reg may not support QImode moves, so use SImode.
5425 If foo is in memory due to spilling a pseudo reg, this is safe,
5426 because the QImode value is in the least significant part of a
5427 slot big enough for a SImode. If foo is some other sort of
5428 memory reference, then it is impossible to reload this case,
5429 so previous passes had better make sure this never happens.
5430
5431 Then consider a one-word union which has SImode and one of its
5432 members is a float, being fetched as (SUBREG:SF union:SI).
5433 We must fetch that as SFmode because we could be loading into
5434 a float-only register. In this case OLD's mode is correct.
5435
5436 Consider an immediate integer: it has VOIDmode. Here we need
5437 to get a mode from something else.
5438
5439 In some cases, there is a fourth mode, the operand's
5440 containing mode. If the insn specifies a containing mode for
5441 this operand, it overrides all others.
5442
5443 I am not sure whether the algorithm here is always right,
5444 but it does the right things in those cases. */
5445
5446 mode = GET_MODE (old);
5447 if (mode == VOIDmode)
5448 mode = reload_inmode[j];
32131a9c
RK
5449
5450#ifdef SECONDARY_INPUT_RELOAD_CLASS
5451 /* If we need a secondary register for this operation, see if
5452 the value is already in a register in that class. Don't
5453 do this if the secondary register will be used as a scratch
5454 register. */
5455
5456 if (reload_secondary_reload[j] >= 0
58b1581b
RS
5457 && reload_secondary_icode[j] == CODE_FOR_nothing
5458 && optimize)
32131a9c
RK
5459 oldequiv
5460 = find_equiv_reg (old, insn,
5461 reload_reg_class[reload_secondary_reload[j]],
fb3821f7 5462 -1, NULL_PTR, 0, mode);
32131a9c
RK
5463#endif
5464
5465 /* If reloading from memory, see if there is a register
5466 that already holds the same value. If so, reload from there.
5467 We can pass 0 as the reload_reg_p argument because
5468 any other reload has either already been emitted,
5469 in which case find_equiv_reg will see the reload-insn,
5470 or has yet to be emitted, in which case it doesn't matter
5471 because we will use this equiv reg right away. */
5472
58b1581b 5473 if (oldequiv == 0 && optimize
32131a9c
RK
5474 && (GET_CODE (old) == MEM
5475 || (GET_CODE (old) == REG
5476 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5477 && reg_renumber[REGNO (old)] < 0)))
546b63fb 5478 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 5479 -1, NULL_PTR, 0, mode);
32131a9c
RK
5480
5481 if (oldequiv)
5482 {
5483 int regno = true_regnum (oldequiv);
5484
5485 /* If OLDEQUIV is a spill register, don't use it for this
5486 if any other reload needs it at an earlier stage of this insn
a8fdc208 5487 or at this stage. */
32131a9c 5488 if (spill_reg_order[regno] >= 0
546b63fb
RK
5489 && (! reload_reg_free_p (regno, reload_opnum[j],
5490 reload_when_needed[j])
5491 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
5492 reload_when_needed[j])))
5493 oldequiv = 0;
5494
5495 /* If OLDEQUIV is not a spill register,
5496 don't use it if any other reload wants it. */
5497 if (spill_reg_order[regno] < 0)
5498 {
5499 int k;
5500 for (k = 0; k < n_reloads; k++)
5501 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
5502 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5503 oldequiv))
32131a9c
RK
5504 {
5505 oldequiv = 0;
5506 break;
5507 }
5508 }
546b63fb
RK
5509
5510 /* If it is no cheaper to copy from OLDEQUIV into the
5511 reload register than it would be to move from memory,
5512 don't use it. Likewise, if we need a secondary register
5513 or memory. */
5514
5515 if (oldequiv != 0
5516 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5517 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5518 reload_reg_class[j])
5519 >= MEMORY_MOVE_COST (mode)))
5520#ifdef SECONDARY_INPUT_RELOAD_CLASS
5521 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5522 mode, oldequiv)
5523 != NO_REGS)
5524#endif
5525#ifdef SECONDARY_MEMORY_NEEDED
5526 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5527 REGNO_REG_CLASS (regno),
5528 mode)
5529#endif
5530 ))
5531 oldequiv = 0;
32131a9c
RK
5532 }
5533
5534 if (oldequiv == 0)
5535 oldequiv = old;
5536 else if (GET_CODE (oldequiv) == REG)
5537 oldequiv_reg = oldequiv;
5538 else if (GET_CODE (oldequiv) == SUBREG)
5539 oldequiv_reg = SUBREG_REG (oldequiv);
5540
5541 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
5542 then load RELOADREG from OLDEQUIV. Note that we cannot use
5543 gen_lowpart_common since it can do the wrong thing when
5544 RELOADREG has a multi-word mode. Note that RELOADREG
5545 must always be a REG here. */
32131a9c
RK
5546
5547 if (GET_MODE (reloadreg) != mode)
3abe6f90 5548 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
5549 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5550 oldequiv = SUBREG_REG (oldequiv);
5551 if (GET_MODE (oldequiv) != VOIDmode
5552 && mode != GET_MODE (oldequiv))
5553 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5554
546b63fb 5555 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
5556 switch (reload_when_needed[j])
5557 {
32131a9c 5558 case RELOAD_OTHER:
546b63fb
RK
5559 where = &other_input_reload_insns;
5560 break;
5561 case RELOAD_FOR_INPUT:
5562 where = &input_reload_insns[reload_opnum[j]];
32131a9c 5563 break;
546b63fb
RK
5564 case RELOAD_FOR_INPUT_ADDRESS:
5565 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 5566 break;
546b63fb
RK
5567 case RELOAD_FOR_OUTPUT_ADDRESS:
5568 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c
RK
5569 break;
5570 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5571 where = &operand_reload_insns;
5572 break;
5573 case RELOAD_FOR_OTHER_ADDRESS:
5574 where = &other_input_address_reload_insns;
5575 break;
5576 default:
5577 abort ();
32131a9c
RK
5578 }
5579
546b63fb 5580 push_to_sequence (*where);
32131a9c
RK
5581 special = 0;
5582
5583 /* Auto-increment addresses must be reloaded in a special way. */
5584 if (GET_CODE (oldequiv) == POST_INC
5585 || GET_CODE (oldequiv) == POST_DEC
5586 || GET_CODE (oldequiv) == PRE_INC
5587 || GET_CODE (oldequiv) == PRE_DEC)
5588 {
5589 /* We are not going to bother supporting the case where a
5590 incremented register can't be copied directly from
5591 OLDEQUIV since this seems highly unlikely. */
5592 if (reload_secondary_reload[j] >= 0)
5593 abort ();
5594 /* Prevent normal processing of this reload. */
5595 special = 1;
5596 /* Output a special code sequence for this case. */
546b63fb 5597 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
5598 }
5599
5600 /* If we are reloading a pseudo-register that was set by the previous
5601 insn, see if we can get rid of that pseudo-register entirely
5602 by redirecting the previous insn into our reload register. */
5603
5604 else if (optimize && GET_CODE (old) == REG
5605 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5606 && dead_or_set_p (insn, old)
5607 /* This is unsafe if some other reload
5608 uses the same reg first. */
546b63fb
RK
5609 && reload_reg_free_before_p (REGNO (reloadreg),
5610 reload_opnum[j],
5611 reload_when_needed[j]))
32131a9c
RK
5612 {
5613 rtx temp = PREV_INSN (insn);
5614 while (temp && GET_CODE (temp) == NOTE)
5615 temp = PREV_INSN (temp);
5616 if (temp
5617 && GET_CODE (temp) == INSN
5618 && GET_CODE (PATTERN (temp)) == SET
5619 && SET_DEST (PATTERN (temp)) == old
5620 /* Make sure we can access insn_operand_constraint. */
5621 && asm_noperands (PATTERN (temp)) < 0
5622 /* This is unsafe if prev insn rejects our reload reg. */
5623 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5624 reloadreg)
5625 /* This is unsafe if operand occurs more than once in current
5626 insn. Perhaps some occurrences aren't reloaded. */
5627 && count_occurrences (PATTERN (insn), old) == 1
5628 /* Don't risk splitting a matching pair of operands. */
5629 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5630 {
5631 /* Store into the reload register instead of the pseudo. */
5632 SET_DEST (PATTERN (temp)) = reloadreg;
5633 /* If these are the only uses of the pseudo reg,
5634 pretend for GDB it lives in the reload reg we used. */
5635 if (reg_n_deaths[REGNO (old)] == 1
5636 && reg_n_sets[REGNO (old)] == 1)
5637 {
5638 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5639 alter_reg (REGNO (old), -1);
5640 }
5641 special = 1;
5642 }
5643 }
5644
546b63fb
RK
5645 /* We can't do that, so output an insn to load RELOADREG. */
5646
32131a9c
RK
5647 if (! special)
5648 {
5649#ifdef SECONDARY_INPUT_RELOAD_CLASS
5650 rtx second_reload_reg = 0;
5651 enum insn_code icode;
5652
5653 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
5654 and icode, if any. If OLDEQUIV and OLD are different or
5655 if this is an in-out reload, recompute whether or not we
5656 still need a secondary register and what the icode should
5657 be. If we still need a secondary register and the class or
5658 icode is different, go back to reloading from OLD if using
5659 OLDEQUIV means that we got the wrong type of register. We
5660 cannot have different class or icode due to an in-out reload
5661 because we don't make such reloads when both the input and
5662 output need secondary reload registers. */
32131a9c
RK
5663
5664 if (reload_secondary_reload[j] >= 0)
5665 {
5666 int secondary_reload = reload_secondary_reload[j];
1554c2c6
RK
5667 rtx real_oldequiv = oldequiv;
5668 rtx real_old = old;
5669
5670 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5671 and similarly for OLD.
5672 See comments in find_secondary_reload in reload.c. */
5673 if (GET_CODE (oldequiv) == REG
5674 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5675 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5676 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5677
5678 if (GET_CODE (old) == REG
5679 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5680 && reg_equiv_mem[REGNO (old)] != 0)
5681 real_old = reg_equiv_mem[REGNO (old)];
5682
32131a9c
RK
5683 second_reload_reg = reload_reg_rtx[secondary_reload];
5684 icode = reload_secondary_icode[j];
5685
d445b551
RK
5686 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5687 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
5688 {
5689 enum reg_class new_class
5690 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 5691 mode, real_oldequiv);
32131a9c
RK
5692
5693 if (new_class == NO_REGS)
5694 second_reload_reg = 0;
5695 else
5696 {
5697 enum insn_code new_icode;
5698 enum machine_mode new_mode;
5699
5700 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5701 REGNO (second_reload_reg)))
1554c2c6 5702 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5703 else
5704 {
5705 new_icode = reload_in_optab[(int) mode];
5706 if (new_icode != CODE_FOR_nothing
5707 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 5708 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 5709 (reloadreg, mode)))
a8fdc208
RS
5710 || (insn_operand_predicate[(int) new_icode][1]
5711 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 5712 (real_oldequiv, mode)))))
32131a9c
RK
5713 new_icode = CODE_FOR_nothing;
5714
5715 if (new_icode == CODE_FOR_nothing)
5716 new_mode = mode;
5717 else
196ddf8a 5718 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
5719
5720 if (GET_MODE (second_reload_reg) != new_mode)
5721 {
5722 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5723 new_mode))
1554c2c6 5724 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5725 else
5726 second_reload_reg
3aaa90c7
MM
5727 = gen_rtx (REG, new_mode,
5728 REGNO (second_reload_reg));
32131a9c
RK
5729 }
5730 }
5731 }
5732 }
5733
5734 /* If we still need a secondary reload register, check
5735 to see if it is being used as a scratch or intermediate
1554c2c6
RK
5736 register and generate code appropriately. If we need
5737 a scratch register, use REAL_OLDEQUIV since the form of
5738 the insn may depend on the actual address if it is
5739 a MEM. */
32131a9c
RK
5740
5741 if (second_reload_reg)
5742 {
5743 if (icode != CODE_FOR_nothing)
5744 {
546b63fb
RK
5745 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5746 second_reload_reg));
32131a9c
RK
5747 special = 1;
5748 }
5749 else
5750 {
5751 /* See if we need a scratch register to load the
5752 intermediate register (a tertiary reload). */
5753 enum insn_code tertiary_icode
5754 = reload_secondary_icode[secondary_reload];
5755
5756 if (tertiary_icode != CODE_FOR_nothing)
5757 {
5758 rtx third_reload_reg
5759 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5760
546b63fb
RK
5761 emit_insn ((GEN_FCN (tertiary_icode)
5762 (second_reload_reg, real_oldequiv,
5763 third_reload_reg)));
32131a9c
RK
5764 }
5765 else
546b63fb
RK
5766 gen_input_reload (second_reload_reg, oldequiv,
5767 reload_opnum[j],
5768 reload_when_needed[j]);
5769
5770 oldequiv = second_reload_reg;
32131a9c
RK
5771 }
5772 }
5773 }
5774#endif
5775
5776 if (! special)
546b63fb
RK
5777 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5778 reload_when_needed[j]);
32131a9c
RK
5779
5780#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5781 /* We may have to make a REG_DEAD note for the secondary reload
5782 register in the insns we just made. Find the last insn that
5783 mentioned the register. */
5784 if (! special && second_reload_reg
5785 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5786 {
5787 rtx prev;
5788
546b63fb 5789 for (prev = get_last_insn (); prev;
32131a9c
RK
5790 prev = PREV_INSN (prev))
5791 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5792 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5793 PATTERN (prev)))
32131a9c
RK
5794 {
5795 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5796 second_reload_reg,
5797 REG_NOTES (prev));
5798 break;
5799 }
5800 }
5801#endif
5802 }
5803
546b63fb
RK
5804 /* End this sequence. */
5805 *where = get_insns ();
5806 end_sequence ();
32131a9c
RK
5807 }
5808
5809 /* Add a note saying the input reload reg
5810 dies in this insn, if anyone cares. */
5811#ifdef PRESERVE_DEATH_INFO_REGNO_P
5812 if (old != 0
5813 && reload_reg_rtx[j] != old
5814 && reload_reg_rtx[j] != 0
5815 && reload_out[j] == 0
5816 && ! reload_inherited[j]
5817 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5818 {
5819 register rtx reloadreg = reload_reg_rtx[j];
5820
a8fdc208 5821#if 0
32131a9c
RK
5822 /* We can't abort here because we need to support this for sched.c.
5823 It's not terrible to miss a REG_DEAD note, but we should try
5824 to figure out how to do this correctly. */
5825 /* The code below is incorrect for address-only reloads. */
5826 if (reload_when_needed[j] != RELOAD_OTHER
5827 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5828 abort ();
5829#endif
5830
5831 /* Add a death note to this insn, for an input reload. */
5832
5833 if ((reload_when_needed[j] == RELOAD_OTHER
5834 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5835 && ! dead_or_set_p (insn, reloadreg))
5836 REG_NOTES (insn)
5837 = gen_rtx (EXPR_LIST, REG_DEAD,
5838 reloadreg, REG_NOTES (insn));
5839 }
5840
5841 /* When we inherit a reload, the last marked death of the reload reg
5842 may no longer really be a death. */
5843 if (reload_reg_rtx[j] != 0
5844 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5845 && reload_inherited[j])
5846 {
5847 /* Handle inheriting an output reload.
5848 Remove the death note from the output reload insn. */
5849 if (reload_spill_index[j] >= 0
5850 && GET_CODE (reload_in[j]) == REG
5851 && spill_reg_store[reload_spill_index[j]] != 0
5852 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5853 REG_DEAD, REGNO (reload_reg_rtx[j])))
5854 remove_death (REGNO (reload_reg_rtx[j]),
5855 spill_reg_store[reload_spill_index[j]]);
5856 /* Likewise for input reloads that were inherited. */
5857 else if (reload_spill_index[j] >= 0
5858 && GET_CODE (reload_in[j]) == REG
5859 && spill_reg_store[reload_spill_index[j]] == 0
5860 && reload_inheritance_insn[j] != 0
a8fdc208 5861 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5862 REGNO (reload_reg_rtx[j])))
5863 remove_death (REGNO (reload_reg_rtx[j]),
5864 reload_inheritance_insn[j]);
5865 else
5866 {
5867 rtx prev;
5868
5869 /* We got this register from find_equiv_reg.
5870 Search back for its last death note and get rid of it.
5871 But don't search back too far.
5872 Don't go past a place where this reg is set,
5873 since a death note before that remains valid. */
5874 for (prev = PREV_INSN (insn);
5875 prev && GET_CODE (prev) != CODE_LABEL;
5876 prev = PREV_INSN (prev))
5877 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5878 && dead_or_set_p (prev, reload_reg_rtx[j]))
5879 {
5880 if (find_regno_note (prev, REG_DEAD,
5881 REGNO (reload_reg_rtx[j])))
5882 remove_death (REGNO (reload_reg_rtx[j]), prev);
5883 break;
5884 }
5885 }
5886 }
5887
5888 /* We might have used find_equiv_reg above to choose an alternate
5889 place from which to reload. If so, and it died, we need to remove
5890 that death and move it to one of the insns we just made. */
5891
5892 if (oldequiv_reg != 0
5893 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5894 {
5895 rtx prev, prev1;
5896
5897 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5898 prev = PREV_INSN (prev))
5899 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5900 && dead_or_set_p (prev, oldequiv_reg))
5901 {
5902 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5903 {
5904 for (prev1 = this_reload_insn;
5905 prev1; prev1 = PREV_INSN (prev1))
5906 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5907 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5908 PATTERN (prev1)))
32131a9c
RK
5909 {
5910 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5911 oldequiv_reg,
5912 REG_NOTES (prev1));
5913 break;
5914 }
5915 remove_death (REGNO (oldequiv_reg), prev);
5916 }
5917 break;
5918 }
5919 }
5920#endif
5921
5922 /* If we are reloading a register that was recently stored in with an
5923 output-reload, see if we can prove there was
5924 actually no need to store the old value in it. */
5925
5926 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 5927 && reload_in[j] != 0
32131a9c
RK
5928 && GET_CODE (reload_in[j]) == REG
5929#if 0
5930 /* There doesn't seem to be any reason to restrict this to pseudos
5931 and doing so loses in the case where we are copying from a
5932 register of the wrong class. */
5933 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5934#endif
5935 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb
RK
5936 /* This is unsafe if some other reload uses the same reg first. */
5937 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
5938 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
5939 && dead_or_set_p (insn, reload_in[j])
5940 /* This is unsafe if operand occurs more than once in current
5941 insn. Perhaps some occurrences weren't reloaded. */
5942 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5943 delete_output_reload (insn, j,
5944 spill_reg_store[reload_spill_index[j]]);
5945
5946 /* Input-reloading is done. Now do output-reloading,
5947 storing the value from the reload-register after the main insn
5948 if reload_out[j] is nonzero.
5949
5950 ??? At some point we need to support handling output reloads of
5951 JUMP_INSNs or insns that set cc0. */
5952 old = reload_out[j];
5953 if (old != 0
5954 && reload_reg_rtx[j] != old
5955 && reload_reg_rtx[j] != 0)
5956 {
5957 register rtx reloadreg = reload_reg_rtx[j];
5958 register rtx second_reloadreg = 0;
32131a9c
RK
5959 rtx note, p;
5960 enum machine_mode mode;
5961 int special = 0;
5962
5963 /* An output operand that dies right away does need a reload,
5964 but need not be copied from it. Show the new location in the
5965 REG_UNUSED note. */
5966 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5967 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5968 {
5969 XEXP (note, 0) = reload_reg_rtx[j];
5970 continue;
5971 }
5972 else if (GET_CODE (old) == SCRATCH)
5973 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5974 but we don't want to make an output reload. */
5975 continue;
5976
5977#if 0
5978 /* Strip off of OLD any size-increasing SUBREGs such as
5979 (SUBREG:SI foo:QI 0). */
5980
5981 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5982 && (GET_MODE_SIZE (GET_MODE (old))
5983 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5984 old = SUBREG_REG (old);
5985#endif
5986
5987 /* If is a JUMP_INSN, we can't support output reloads yet. */
5988 if (GET_CODE (insn) == JUMP_INSN)
5989 abort ();
5990
546b63fb
RK
5991 push_to_sequence (output_reload_insns[reload_opnum[j]]);
5992
32131a9c
RK
5993 /* Determine the mode to reload in.
5994 See comments above (for input reloading). */
5995
5996 mode = GET_MODE (old);
5997 if (mode == VOIDmode)
79a365a7
RS
5998 {
5999 /* VOIDmode should never happen for an output. */
6000 if (asm_noperands (PATTERN (insn)) < 0)
6001 /* It's the compiler's fault. */
6002 abort ();
6003 error_for_asm (insn, "output operand is constant in `asm'");
6004 /* Prevent crash--use something we know is valid. */
6005 mode = word_mode;
6006 old = gen_rtx (REG, mode, REGNO (reloadreg));
6007 }
32131a9c 6008
32131a9c 6009 if (GET_MODE (reloadreg) != mode)
3abe6f90 6010 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
6011
6012#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6013
6014 /* If we need two reload regs, set RELOADREG to the intermediate
6015 one, since it will be stored into OUT. We might need a secondary
6016 register only for an input reload, so check again here. */
6017
1554c2c6 6018 if (reload_secondary_reload[j] >= 0)
32131a9c 6019 {
1554c2c6 6020 rtx real_old = old;
32131a9c 6021
1554c2c6
RK
6022 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6023 && reg_equiv_mem[REGNO (old)] != 0)
6024 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6025
1554c2c6
RK
6026 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6027 mode, real_old)
6028 != NO_REGS))
6029 {
6030 second_reloadreg = reloadreg;
6031 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
32131a9c 6032
1554c2c6
RK
6033 /* See if RELOADREG is to be used as a scratch register
6034 or as an intermediate register. */
6035 if (reload_secondary_icode[j] != CODE_FOR_nothing)
32131a9c 6036 {
546b63fb
RK
6037 emit_insn ((GEN_FCN (reload_secondary_icode[j])
6038 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6039 special = 1;
32131a9c
RK
6040 }
6041 else
1554c2c6
RK
6042 {
6043 /* See if we need both a scratch and intermediate reload
6044 register. */
6045 int secondary_reload = reload_secondary_reload[j];
6046 enum insn_code tertiary_icode
6047 = reload_secondary_icode[secondary_reload];
6048 rtx pat;
32131a9c 6049
1554c2c6
RK
6050 if (GET_MODE (reloadreg) != mode)
6051 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6052
6053 if (tertiary_icode != CODE_FOR_nothing)
6054 {
6055 rtx third_reloadreg
6056 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
6057 pat = (GEN_FCN (tertiary_icode)
6058 (reloadreg, second_reloadreg, third_reloadreg));
6059 }
9ad5f9f6
JW
6060#ifdef SECONDARY_MEMORY_NEEDED
6061 /* If we need a memory location to do the move, do it that way. */
6062 else if (GET_CODE (reloadreg) == REG
6063 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6064 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6065 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6066 GET_MODE (second_reloadreg)))
6067 {
6068 /* Get the memory to use and rewrite both registers
6069 to its mode. */
546b63fb
RK
6070 rtx loc
6071 = get_secondary_mem (reloadreg,
6072 GET_MODE (second_reloadreg),
6073 reload_opnum[j],
6074 reload_when_needed[j]);
9ad5f9f6
JW
6075 rtx tmp_reloadreg;
6076
6077 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6078 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6079 REGNO (second_reloadreg));
6080
6081 if (GET_MODE (loc) != GET_MODE (reloadreg))
6082 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6083 REGNO (reloadreg));
6084 else
6085 tmp_reloadreg = reloadreg;
6086
546b63fb 6087 emit_move_insn (loc, second_reloadreg);
9ad5f9f6
JW
6088 pat = gen_move_insn (tmp_reloadreg, loc);
6089 }
6090#endif
1554c2c6
RK
6091 else
6092 pat = gen_move_insn (reloadreg, second_reloadreg);
6093
546b63fb 6094 emit_insn (pat);
1554c2c6 6095 }
32131a9c
RK
6096 }
6097 }
6098#endif
6099
6100 /* Output the last reload insn. */
6101 if (! special)
0dadecf6
RK
6102 {
6103#ifdef SECONDARY_MEMORY_NEEDED
6104 /* If we need a memory location to do the move, do it that way. */
6105 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6106 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6107 REGNO_REG_CLASS (REGNO (reloadreg)),
6108 GET_MODE (reloadreg)))
6109 {
6110 /* Get the memory to use and rewrite both registers to
6111 its mode. */
546b63fb
RK
6112 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6113 reload_opnum[j],
6114 reload_when_needed[j]);
0dadecf6
RK
6115
6116 if (GET_MODE (loc) != GET_MODE (reloadreg))
6117 reloadreg = gen_rtx (REG, GET_MODE (loc),
6118 REGNO (reloadreg));
6119
6120 if (GET_MODE (loc) != GET_MODE (old))
6121 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6122
546b63fb
RK
6123 emit_insn (gen_move_insn (loc, reloadreg));
6124 emit_insn (gen_move_insn (old, loc));
0dadecf6
RK
6125 }
6126 else
6127#endif
546b63fb 6128 emit_insn (gen_move_insn (old, reloadreg));
0dadecf6 6129 }
32131a9c
RK
6130
6131#ifdef PRESERVE_DEATH_INFO_REGNO_P
6132 /* If final will look at death notes for this reg,
6133 put one on the last output-reload insn to use it. Similarly
6134 for any secondary register. */
6135 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 6136 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6137 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6138 && reg_overlap_mentioned_for_reload_p (reloadreg,
6139 PATTERN (p)))
32131a9c
RK
6140 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6141 reloadreg, REG_NOTES (p));
6142
6143#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6144 if (! special
6145 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 6146 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6147 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6148 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6149 PATTERN (p)))
32131a9c
RK
6150 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6151 second_reloadreg, REG_NOTES (p));
6152#endif
6153#endif
6154 /* Look at all insns we emitted, just to be safe. */
546b63fb 6155 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
6156 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6157 {
6158 /* If this output reload doesn't come from a spill reg,
6159 clear any memory of reloaded copies of the pseudo reg.
6160 If this output reload comes from a spill reg,
6161 reg_has_output_reload will make this do nothing. */
6162 note_stores (PATTERN (p), forget_old_reloads_1);
6163
6164 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6165 store_insn = p;
6166 }
6167
546b63fb
RK
6168 output_reload_insns[reload_opnum[j]] = get_insns ();
6169 end_sequence ();
6170
32131a9c
RK
6171 }
6172
6173 if (reload_spill_index[j] >= 0)
6174 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6175 }
6176
546b63fb
RK
6177 /* Now write all the insns we made for reloads in the order expected by
6178 the allocation functions. Prior to the insn being reloaded, we write
6179 the following reloads:
6180
6181 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6182
6183 RELOAD_OTHER reloads.
6184
6185 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6186 the RELOAD_FOR_INPUT reload for the operand.
6187
6188 RELOAD_FOR_OPERAND_ADDRESS reloads.
6189
6190 After the insn being reloaded, we write the following:
6191
6192 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6193 the RELOAD_FOR_OUTPUT reload for that operand. */
6194
6195 emit_insns_before (other_input_address_reload_insns, before_insn);
6196 emit_insns_before (other_input_reload_insns, before_insn);
6197
6198 for (j = 0; j < reload_n_operands; j++)
6199 {
6200 emit_insns_before (input_address_reload_insns[j], before_insn);
6201 emit_insns_before (input_reload_insns[j], before_insn);
6202 }
6203
6204 emit_insns_before (operand_reload_insns, before_insn);
6205
6206 for (j = 0; j < reload_n_operands; j++)
6207 {
6208 emit_insns_before (output_address_reload_insns[j], following_insn);
6209 emit_insns_before (output_reload_insns[j], following_insn);
6210 }
6211
32131a9c
RK
6212 /* Move death notes from INSN
6213 to output-operand-address and output reload insns. */
6214#ifdef PRESERVE_DEATH_INFO_REGNO_P
6215 {
6216 rtx insn1;
6217 /* Loop over those insns, last ones first. */
6218 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6219 insn1 = PREV_INSN (insn1))
6220 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6221 {
6222 rtx source = SET_SRC (PATTERN (insn1));
6223 rtx dest = SET_DEST (PATTERN (insn1));
6224
6225 /* The note we will examine next. */
6226 rtx reg_notes = REG_NOTES (insn);
6227 /* The place that pointed to this note. */
6228 rtx *prev_reg_note = &REG_NOTES (insn);
6229
6230 /* If the note is for something used in the source of this
6231 reload insn, or in the output address, move the note. */
6232 while (reg_notes)
6233 {
6234 rtx next_reg_notes = XEXP (reg_notes, 1);
6235 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6236 && GET_CODE (XEXP (reg_notes, 0)) == REG
6237 && ((GET_CODE (dest) != REG
bfa30b22
RK
6238 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6239 dest))
6240 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6241 source)))
32131a9c
RK
6242 {
6243 *prev_reg_note = next_reg_notes;
6244 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6245 REG_NOTES (insn1) = reg_notes;
6246 }
6247 else
6248 prev_reg_note = &XEXP (reg_notes, 1);
6249
6250 reg_notes = next_reg_notes;
6251 }
6252 }
6253 }
6254#endif
6255
6256 /* For all the spill regs newly reloaded in this instruction,
6257 record what they were reloaded from, so subsequent instructions
d445b551
RK
6258 can inherit the reloads.
6259
6260 Update spill_reg_store for the reloads of this insn.
e9e79d69 6261 Copy the elements that were updated in the loop above. */
32131a9c
RK
6262
6263 for (j = 0; j < n_reloads; j++)
6264 {
6265 register int r = reload_order[j];
6266 register int i = reload_spill_index[r];
6267
6268 /* I is nonneg if this reload used one of the spill regs.
6269 If reload_reg_rtx[r] is 0, this is an optional reload
546b63fb
RK
6270 that we opted to ignore.
6271
6272 Also ignore reloads that don't reach the end of the insn,
6273 since we will eventually see the one that does. */
d445b551 6274
546b63fb
RK
6275 if (i >= 0 && reload_reg_rtx[r] != 0
6276 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6277 reload_when_needed[r]))
32131a9c
RK
6278 {
6279 /* First, clear out memory of what used to be in this spill reg.
6280 If consecutive registers are used, clear them all. */
6281 int nr
6282 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6283 int k;
6284
6285 for (k = 0; k < nr; k++)
6286 {
6287 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6288 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6289 }
6290
6291 /* Maybe the spill reg contains a copy of reload_out. */
6292 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6293 {
6294 register int nregno = REGNO (reload_out[r]);
d08ea79f
RK
6295 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6296 : HARD_REGNO_NREGS (nregno,
6297 GET_MODE (reload_reg_rtx[r])));
d445b551
RK
6298
6299 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 6300 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6301
d08ea79f
RK
6302 /* If NREGNO is a hard register, it may occupy more than
6303 one register. If it does, say what is in the
6304 rest of the registers assuming that both registers
6305 agree on how many words the object takes. If not,
6306 invalidate the subsequent registers. */
6307
6308 if (nregno < FIRST_PSEUDO_REGISTER)
6309 for (k = 1; k < nnr; k++)
6310 reg_last_reload_reg[nregno + k]
6311 = (nr == nnr ? gen_rtx (REG, word_mode,
6312 REGNO (reload_reg_rtx[r]) + k)
6313 : 0);
6314
6315 /* Now do the inverse operation. */
32131a9c
RK
6316 for (k = 0; k < nr; k++)
6317 {
6318 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6319 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6320 : nregno + k);
32131a9c
RK
6321 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6322 }
6323 }
d445b551 6324
2c9ce2ef
RK
6325 /* Maybe the spill reg contains a copy of reload_in. Only do
6326 something if there will not be an output reload for
6327 the register being reloaded. */
32131a9c
RK
6328 else if (reload_out[r] == 0
6329 && reload_in[r] != 0
2c9ce2ef
RK
6330 && ((GET_CODE (reload_in[r]) == REG
6331 && ! reg_has_output_reload[REGNO (reload_in[r])]
6332 || (GET_CODE (reload_in_reg[r]) == REG
6333 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
32131a9c
RK
6334 {
6335 register int nregno;
d08ea79f
RK
6336 int nnr;
6337
32131a9c
RK
6338 if (GET_CODE (reload_in[r]) == REG)
6339 nregno = REGNO (reload_in[r]);
6340 else
6341 nregno = REGNO (reload_in_reg[r]);
6342
d08ea79f
RK
6343 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6344 : HARD_REGNO_NREGS (nregno,
6345 GET_MODE (reload_reg_rtx[r])));
6346
546b63fb 6347 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6348
d08ea79f
RK
6349 if (nregno < FIRST_PSEUDO_REGISTER)
6350 for (k = 1; k < nnr; k++)
6351 reg_last_reload_reg[nregno + k]
6352 = (nr == nnr ? gen_rtx (REG, word_mode,
6353 REGNO (reload_reg_rtx[r]) + k)
6354 : 0);
6355
546b63fb
RK
6356 /* Unless we inherited this reload, show we haven't
6357 recently done a store. */
6358 if (! reload_inherited[r])
6359 spill_reg_store[i] = 0;
d445b551 6360
546b63fb
RK
6361 for (k = 0; k < nr; k++)
6362 {
6363 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6364 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6365 : nregno + k);
546b63fb
RK
6366 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6367 = insn;
32131a9c
RK
6368 }
6369 }
6370 }
6371
6372 /* The following if-statement was #if 0'd in 1.34 (or before...).
6373 It's reenabled in 1.35 because supposedly nothing else
6374 deals with this problem. */
6375
6376 /* If a register gets output-reloaded from a non-spill register,
6377 that invalidates any previous reloaded copy of it.
6378 But forget_old_reloads_1 won't get to see it, because
6379 it thinks only about the original insn. So invalidate it here. */
6380 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6381 {
6382 register int nregno = REGNO (reload_out[r]);
6383 reg_last_reload_reg[nregno] = 0;
6384 }
6385 }
6386}
6387\f
546b63fb
RK
6388/* Emit code to perform an input reload of IN to RELOADREG. IN is from
6389 operand OPNUM with reload type TYPE.
6390
3c3eeea6 6391 Returns first insn emitted. */
32131a9c
RK
6392
6393rtx
546b63fb 6394gen_input_reload (reloadreg, in, opnum, type)
32131a9c
RK
6395 rtx reloadreg;
6396 rtx in;
546b63fb
RK
6397 int opnum;
6398 enum reload_type type;
32131a9c 6399{
546b63fb 6400 rtx last = get_last_insn ();
32131a9c 6401
a8fdc208 6402 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
6403 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6404 register that didn't get a hard register. In that case we can just
6405 call emit_move_insn.
6406
3002e160
JW
6407 We can also be asked to reload a PLUS that adds either two registers, or
6408 a register and a constant or MEM, or a MEM and a constant. This can
6409 occur during frame pointer elimination and while reloading addresses.
6410 This case is handled by trying to emit a single insn
32131a9c
RK
6411 to perform the add. If it is not valid, we use a two insn sequence.
6412
6413 Finally, we could be called to handle an 'o' constraint by putting
6414 an address into a register. In that case, we first try to do this
6415 with a named pattern of "reload_load_address". If no such pattern
6416 exists, we just emit a SET insn and hope for the best (it will normally
6417 be valid on machines that use 'o').
6418
6419 This entire process is made complex because reload will never
6420 process the insns we generate here and so we must ensure that
6421 they will fit their constraints and also by the fact that parts of
6422 IN might be being reloaded separately and replaced with spill registers.
6423 Because of this, we are, in some sense, just guessing the right approach
6424 here. The one listed above seems to work.
6425
6426 ??? At some point, this whole thing needs to be rethought. */
6427
6428 if (GET_CODE (in) == PLUS
3002e160
JW
6429 && ((GET_CODE (XEXP (in, 0)) == REG
6430 && (GET_CODE (XEXP (in, 1)) == REG
6431 || CONSTANT_P (XEXP (in, 1))
6432 || GET_CODE (XEXP (in, 1)) == MEM))
6433 || (GET_CODE (XEXP (in, 0)) == MEM
6434 && CONSTANT_P (XEXP (in, 1)))))
32131a9c
RK
6435 {
6436 /* We need to compute the sum of what is either a register and a
3002e160
JW
6437 constant, a register and memory, a hard register and a pseudo
6438 register, or memory and a constant and put it into the reload
6439 register. The best possible way of doing this is if the machine
6440 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
6441
6442 The simplest approach is to try to generate such an insn and see if it
6443 is recognized and matches its constraints. If so, it can be used.
6444
6445 It might be better not to actually emit the insn unless it is valid,
0009eff2 6446 but we need to pass the insn as an operand to `recog' and
b36d7dd7 6447 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 6448 not valid than to dummy things up. */
a8fdc208 6449
af929c62 6450 rtx op0, op1, tem, insn;
32131a9c 6451 int code;
a8fdc208 6452
af929c62
RK
6453 op0 = find_replacement (&XEXP (in, 0));
6454 op1 = find_replacement (&XEXP (in, 1));
6455
32131a9c
RK
6456 /* Since constraint checking is strict, commutativity won't be
6457 checked, so we need to do that here to avoid spurious failure
6458 if the add instruction is two-address and the second operand
6459 of the add is the same as the reload reg, which is frequently
6460 the case. If the insn would be A = B + A, rearrange it so
6461 it will be A = A + B as constrain_operands expects. */
a8fdc208 6462
32131a9c
RK
6463 if (GET_CODE (XEXP (in, 1)) == REG
6464 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
6465 tem = op0, op0 = op1, op1 = tem;
6466
6467 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6468 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c 6469
546b63fb 6470 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6471 code = recog_memoized (insn);
6472
6473 if (code >= 0)
6474 {
6475 insn_extract (insn);
6476 /* We want constrain operands to treat this insn strictly in
6477 its validity determination, i.e., the way it would after reload
6478 has completed. */
6479 if (constrain_operands (code, 1))
6480 return insn;
6481 }
6482
546b63fb 6483 delete_insns_since (last);
32131a9c
RK
6484
6485 /* If that failed, we must use a conservative two-insn sequence.
6486 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
6487 register since "move" will be able to handle an arbitrary operand,
6488 unlike add which can't, in general. Then add the registers.
32131a9c
RK
6489
6490 If there is another way to do this for a specific machine, a
6491 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6492 we emit below. */
6493
af929c62
RK
6494 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6495 || (GET_CODE (op1) == REG
6496 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6497 tem = op0, op0 = op1, op1 = tem;
32131a9c 6498
546b63fb 6499 emit_insn (gen_move_insn (reloadreg, op0));
39b56c2a
RK
6500
6501 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6502 This fixes a problem on the 32K where the stack pointer cannot
6503 be used as an operand of an add insn. */
6504
6505 if (rtx_equal_p (op0, op1))
6506 op1 = reloadreg;
6507
546b63fb 6508 emit_insn (gen_add2_insn (reloadreg, op1));
32131a9c
RK
6509 }
6510
0dadecf6
RK
6511#ifdef SECONDARY_MEMORY_NEEDED
6512 /* If we need a memory location to do the move, do it that way. */
6513 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6514 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6515 REGNO_REG_CLASS (REGNO (reloadreg)),
6516 GET_MODE (reloadreg)))
6517 {
6518 /* Get the memory to use and rewrite both registers to its mode. */
546b63fb 6519 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
0dadecf6
RK
6520
6521 if (GET_MODE (loc) != GET_MODE (reloadreg))
6522 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6523
6524 if (GET_MODE (loc) != GET_MODE (in))
6525 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6526
546b63fb
RK
6527 emit_insn (gen_move_insn (loc, in));
6528 emit_insn (gen_move_insn (reloadreg, loc));
0dadecf6
RK
6529 }
6530#endif
6531
32131a9c
RK
6532 /* If IN is a simple operand, use gen_move_insn. */
6533 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
546b63fb 6534 emit_insn (gen_move_insn (reloadreg, in));
32131a9c
RK
6535
6536#ifdef HAVE_reload_load_address
6537 else if (HAVE_reload_load_address)
546b63fb 6538 emit_insn (gen_reload_load_address (reloadreg, in));
32131a9c
RK
6539#endif
6540
6541 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6542 else
546b63fb 6543 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6544
6545 /* Return the first insn emitted.
546b63fb 6546 We can not just return get_last_insn, because there may have
32131a9c
RK
6547 been multiple instructions emitted. Also note that gen_move_insn may
6548 emit more than one insn itself, so we can not assume that there is one
6549 insn emitted per emit_insn_before call. */
6550
546b63fb 6551 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
6552}
6553\f
6554/* Delete a previously made output-reload
6555 whose result we now believe is not needed.
6556 First we double-check.
6557
6558 INSN is the insn now being processed.
6559 OUTPUT_RELOAD_INSN is the insn of the output reload.
6560 J is the reload-number for this insn. */
6561
6562static void
6563delete_output_reload (insn, j, output_reload_insn)
6564 rtx insn;
6565 int j;
6566 rtx output_reload_insn;
6567{
6568 register rtx i1;
6569
6570 /* Get the raw pseudo-register referred to. */
6571
6572 rtx reg = reload_in[j];
6573 while (GET_CODE (reg) == SUBREG)
6574 reg = SUBREG_REG (reg);
6575
6576 /* If the pseudo-reg we are reloading is no longer referenced
6577 anywhere between the store into it and here,
6578 and no jumps or labels intervene, then the value can get
6579 here through the reload reg alone.
6580 Otherwise, give up--return. */
6581 for (i1 = NEXT_INSN (output_reload_insn);
6582 i1 != insn; i1 = NEXT_INSN (i1))
6583 {
6584 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6585 return;
6586 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6587 && reg_mentioned_p (reg, PATTERN (i1)))
6588 return;
6589 }
6590
208dffa5
RS
6591 if (cannot_omit_stores[REGNO (reg)])
6592 return;
6593
32131a9c
RK
6594 /* If this insn will store in the pseudo again,
6595 the previous store can be removed. */
6596 if (reload_out[j] == reload_in[j])
6597 delete_insn (output_reload_insn);
6598
6599 /* See if the pseudo reg has been completely replaced
6600 with reload regs. If so, delete the store insn
6601 and forget we had a stack slot for the pseudo. */
6602 else if (reg_n_deaths[REGNO (reg)] == 1
6603 && reg_basic_block[REGNO (reg)] >= 0
6604 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6605 {
6606 rtx i2;
6607
6608 /* We know that it was used only between here
6609 and the beginning of the current basic block.
6610 (We also know that the last use before INSN was
6611 the output reload we are thinking of deleting, but never mind that.)
6612 Search that range; see if any ref remains. */
6613 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6614 {
d445b551
RK
6615 rtx set = single_set (i2);
6616
32131a9c
RK
6617 /* Uses which just store in the pseudo don't count,
6618 since if they are the only uses, they are dead. */
d445b551 6619 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6620 continue;
6621 if (GET_CODE (i2) == CODE_LABEL
6622 || GET_CODE (i2) == JUMP_INSN)
6623 break;
6624 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6625 && reg_mentioned_p (reg, PATTERN (i2)))
6626 /* Some other ref remains;
6627 we can't do anything. */
6628 return;
6629 }
6630
6631 /* Delete the now-dead stores into this pseudo. */
6632 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6633 {
d445b551
RK
6634 rtx set = single_set (i2);
6635
6636 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6637 delete_insn (i2);
6638 if (GET_CODE (i2) == CODE_LABEL
6639 || GET_CODE (i2) == JUMP_INSN)
6640 break;
6641 }
6642
6643 /* For the debugging info,
6644 say the pseudo lives in this reload reg. */
6645 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6646 alter_reg (REGNO (reg), -1);
6647 }
6648}
32131a9c 6649\f
a8fdc208 6650/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 6651 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
6652 is a register or memory location;
6653 so reloading involves incrementing that location.
6654
6655 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 6656 This cannot be deduced from VALUE. */
32131a9c 6657
546b63fb
RK
6658static void
6659inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
6660 rtx reloadreg;
6661 rtx value;
6662 int inc_amount;
32131a9c
RK
6663{
6664 /* REG or MEM to be copied and incremented. */
6665 rtx incloc = XEXP (value, 0);
6666 /* Nonzero if increment after copying. */
6667 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 6668 rtx last;
0009eff2
RK
6669 rtx inc;
6670 rtx add_insn;
6671 int code;
32131a9c
RK
6672
6673 /* No hard register is equivalent to this register after
6674 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6675 we could inc/dec that register as well (maybe even using it for
6676 the source), but I'm not sure it's worth worrying about. */
6677 if (GET_CODE (incloc) == REG)
6678 reg_last_reload_reg[REGNO (incloc)] = 0;
6679
6680 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6681 inc_amount = - inc_amount;
6682
fb3821f7 6683 inc = GEN_INT (inc_amount);
0009eff2
RK
6684
6685 /* If this is post-increment, first copy the location to the reload reg. */
6686 if (post)
546b63fb 6687 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
6688
6689 /* See if we can directly increment INCLOC. Use a method similar to that
6690 in gen_input_reload. */
6691
546b63fb
RK
6692 last = get_last_insn ();
6693 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6694 gen_rtx (PLUS, GET_MODE (incloc),
6695 incloc, inc)));
0009eff2
RK
6696
6697 code = recog_memoized (add_insn);
6698 if (code >= 0)
32131a9c 6699 {
0009eff2
RK
6700 insn_extract (add_insn);
6701 if (constrain_operands (code, 1))
32131a9c 6702 {
0009eff2
RK
6703 /* If this is a pre-increment and we have incremented the value
6704 where it lives, copy the incremented value to RELOADREG to
6705 be used as an address. */
6706
6707 if (! post)
546b63fb
RK
6708 emit_insn (gen_move_insn (reloadreg, incloc));
6709
6710 return;
32131a9c
RK
6711 }
6712 }
0009eff2 6713
546b63fb 6714 delete_insns_since (last);
0009eff2
RK
6715
6716 /* If couldn't do the increment directly, must increment in RELOADREG.
6717 The way we do this depends on whether this is pre- or post-increment.
6718 For pre-increment, copy INCLOC to the reload register, increment it
6719 there, then save back. */
6720
6721 if (! post)
6722 {
546b63fb
RK
6723 emit_insn (gen_move_insn (reloadreg, incloc));
6724 emit_insn (gen_add2_insn (reloadreg, inc));
6725 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 6726 }
32131a9c
RK
6727 else
6728 {
0009eff2
RK
6729 /* Postincrement.
6730 Because this might be a jump insn or a compare, and because RELOADREG
6731 may not be available after the insn in an input reload, we must do
6732 the incrementation before the insn being reloaded for.
6733
6734 We have already copied INCLOC to RELOADREG. Increment the copy in
6735 RELOADREG, save that back, then decrement RELOADREG so it has
6736 the original value. */
6737
546b63fb
RK
6738 emit_insn (gen_add2_insn (reloadreg, inc));
6739 emit_insn (gen_move_insn (incloc, reloadreg));
6740 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 6741 }
0009eff2 6742
546b63fb 6743 return;
32131a9c
RK
6744}
6745\f
6746/* Return 1 if we are certain that the constraint-string STRING allows
6747 the hard register REG. Return 0 if we can't be sure of this. */
6748
6749static int
6750constraint_accepts_reg_p (string, reg)
6751 char *string;
6752 rtx reg;
6753{
6754 int value = 0;
6755 int regno = true_regnum (reg);
6756 int c;
6757
6758 /* Initialize for first alternative. */
6759 value = 0;
6760 /* Check that each alternative contains `g' or `r'. */
6761 while (1)
6762 switch (c = *string++)
6763 {
6764 case 0:
6765 /* If an alternative lacks `g' or `r', we lose. */
6766 return value;
6767 case ',':
6768 /* If an alternative lacks `g' or `r', we lose. */
6769 if (value == 0)
6770 return 0;
6771 /* Initialize for next alternative. */
6772 value = 0;
6773 break;
6774 case 'g':
6775 case 'r':
6776 /* Any general reg wins for this alternative. */
6777 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6778 value = 1;
6779 break;
6780 default:
6781 /* Any reg in specified class wins for this alternative. */
6782 {
0009eff2 6783 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 6784
0009eff2 6785 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
6786 value = 1;
6787 }
6788 }
6789}
6790\f
d445b551
RK
6791/* Return the number of places FIND appears within X, but don't count
6792 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
6793
6794static int
6795count_occurrences (x, find)
6796 register rtx x, find;
6797{
6798 register int i, j;
6799 register enum rtx_code code;
6800 register char *format_ptr;
6801 int count;
6802
6803 if (x == find)
6804 return 1;
6805 if (x == 0)
6806 return 0;
6807
6808 code = GET_CODE (x);
6809
6810 switch (code)
6811 {
6812 case REG:
6813 case QUEUED:
6814 case CONST_INT:
6815 case CONST_DOUBLE:
6816 case SYMBOL_REF:
6817 case CODE_LABEL:
6818 case PC:
6819 case CC0:
6820 return 0;
d445b551
RK
6821
6822 case SET:
6823 if (SET_DEST (x) == find)
6824 return count_occurrences (SET_SRC (x), find);
6825 break;
32131a9c
RK
6826 }
6827
6828 format_ptr = GET_RTX_FORMAT (code);
6829 count = 0;
6830
6831 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6832 {
6833 switch (*format_ptr++)
6834 {
6835 case 'e':
6836 count += count_occurrences (XEXP (x, i), find);
6837 break;
6838
6839 case 'E':
6840 if (XVEC (x, i) != NULL)
6841 {
6842 for (j = 0; j < XVECLEN (x, i); j++)
6843 count += count_occurrences (XVECEXP (x, i, j), find);
6844 }
6845 break;
6846 }
6847 }
6848 return count;
6849}
This page took 0.96974 seconds and 5 git commands to generate.