]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(movdf recognizer): Remove `&' from constraint for loading mem to cpu reg.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
4c05b187 2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
546b63fb
RK
69
70
71#ifndef REGISTER_MOVE_COST
72#define REGISTER_MOVE_COST(x, y) 2
73#endif
74
75#ifndef MEMORY_MOVE_COST
76#define MEMORY_MOVE_COST(x) 4
77#endif
32131a9c
RK
78\f
79/* During reload_as_needed, element N contains a REG rtx for the hard reg
d08ea79f 80 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
81static rtx *reg_last_reload_reg;
82
83/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85static char *reg_has_output_reload;
86
87/* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89static HARD_REG_SET reg_is_output_reload;
90
91/* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95rtx *reg_equiv_constant;
96
97/* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 101rtx *reg_equiv_memory_loc;
32131a9c
RK
102
103/* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106rtx *reg_equiv_address;
107
108/* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110rtx *reg_equiv_mem;
111
112/* Widest width in which each pseudo reg is referred to (via subreg). */
113static int *reg_max_ref_width;
114
115/* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117static rtx *reg_equiv_init;
118
119/* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126/* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132/* Number of spill-regs so far; number of valid elements of spill_regs. */
133static int n_spills;
134
135/* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141/* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146/* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152/* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155HARD_REG_SET forbidden_regs;
156
157/* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
546b63fb
RK
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
32131a9c
RK
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164static HARD_REG_SET bad_spill_regs;
165
166/* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171/* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178/* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183/* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185static HARD_REG_SET counted_for_groups;
186
187/* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191static HARD_REG_SET counted_for_nongroups;
192
208dffa5
RS
193/* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197static char *cannot_omit_stores;
198
32131a9c
RK
199/* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206static char spill_indirect_levels;
207
208/* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212char indirect_symref_ok;
213
214/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216char double_reg_address_ok;
217
218/* Record the stack slot for each spilled hard register. */
219
220static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222/* Width allocated so far for that stack slot. */
223
224static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226/* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231char *basic_block_needs[N_REG_CLASSES];
232
233/* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235int reload_first_uid;
236
237/* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240int caller_save_needed;
241
242/* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245int reload_in_progress = 0;
246
247/* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
d45cf215 254/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258struct obstack reload_obstack;
259char *reload_firstobj;
260
261#define obstack_chunk_alloc xmalloc
262#define obstack_chunk_free free
263
32131a9c
RK
264/* List of labels that must never be deleted. */
265extern rtx forced_labels;
266\f
267/* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272static struct elim_table
273{
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
a8efe40d 281 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290} reg_eliminate[] =
291
292/* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296#ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298#else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300#endif
301
302#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304/* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307static int num_not_at_initial_offset;
308
309/* Count the number of registers that we may be able to eliminate. */
310static int num_eliminable;
311
312/* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319static char *offsets_known_at;
320static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322/* Number of labels in the current function. */
323
324static int num_labels;
546b63fb
RK
325
326struct hard_reg_n_uses { int regno; int uses; };
32131a9c 327\f
546b63fb
RK
328static int possible_group_p PROTO((int, int *));
329static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334static void spill_failure PROTO((rtx));
335static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337static void delete_dead_insn PROTO((rtx));
338static void alter_reg PROTO((int, int));
c307c237 339static void mark_scratch_live PROTO((rtx));
546b63fb
RK
340static void set_label_offsets PROTO((rtx, rtx, int));
341static int eliminate_regs_in_insn PROTO((rtx, int));
342static void mark_not_eliminable PROTO((rtx, rtx));
343static int spill_hard_reg PROTO((int, int, FILE *, int));
344static void scan_paradoxical_subregs PROTO((rtx));
345static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347static void order_regs_for_reload PROTO((void));
348static void reload_as_needed PROTO((rtx, int));
9a881562 349static void forget_old_reloads_1 PROTO((rtx, rtx));
546b63fb
RK
350static int reload_reg_class_lower PROTO((short *, short *));
351static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
352 enum machine_mode));
be7ae2a4
RK
353static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
354 enum machine_mode));
546b63fb
RK
355static int reload_reg_free_p PROTO((int, int, enum reload_type));
356static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
357static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
358static int allocate_reload_reg PROTO((int, rtx, int, int));
359static void choose_reload_regs PROTO((rtx, rtx));
360static void merge_assigned_reloads PROTO((rtx));
361static void emit_reload_insns PROTO((rtx));
362static void delete_output_reload PROTO((rtx, int, rtx));
363static void inc_for_reload PROTO((rtx, rtx, int));
364static int constraint_accepts_reg_p PROTO((char *, rtx));
365static int count_occurrences PROTO((rtx, rtx));
32131a9c 366\f
546b63fb
RK
367/* Initialize the reload pass once per compilation. */
368
32131a9c
RK
369void
370init_reload ()
371{
372 register int i;
373
374 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
375 Set spill_indirect_levels to the number of levels such addressing is
376 permitted, zero if it is not permitted at all. */
377
378 register rtx tem
379 = gen_rtx (MEM, Pmode,
380 gen_rtx (PLUS, Pmode,
381 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 382 GEN_INT (4)));
32131a9c
RK
383 spill_indirect_levels = 0;
384
385 while (memory_address_p (QImode, tem))
386 {
387 spill_indirect_levels++;
388 tem = gen_rtx (MEM, Pmode, tem);
389 }
390
391 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
392
393 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
394 indirect_symref_ok = memory_address_p (QImode, tem);
395
396 /* See if reg+reg is a valid (and offsettable) address. */
397
65701fd2 398 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
399 {
400 tem = gen_rtx (PLUS, Pmode,
401 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
402 gen_rtx (REG, Pmode, i));
403 /* This way, we make sure that reg+reg is an offsettable address. */
404 tem = plus_constant (tem, 4);
405
406 if (memory_address_p (QImode, tem))
407 {
408 double_reg_address_ok = 1;
409 break;
410 }
411 }
32131a9c
RK
412
413 /* Initialize obstack for our rtl allocation. */
414 gcc_obstack_init (&reload_obstack);
415 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
416}
417
546b63fb 418/* Main entry point for the reload pass.
32131a9c
RK
419
420 FIRST is the first insn of the function being compiled.
421
422 GLOBAL nonzero means we were called from global_alloc
423 and should attempt to reallocate any pseudoregs that we
424 displace from hard regs we will use for reloads.
425 If GLOBAL is zero, we do not have enough information to do that,
426 so any pseudo reg that is spilled must go to the stack.
427
428 DUMPFILE is the global-reg debugging dump file stream, or 0.
429 If it is nonzero, messages are written to it to describe
430 which registers are seized as reload regs, which pseudo regs
5352b11a 431 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 432
5352b11a
RS
433 Return value is nonzero if reload failed
434 and we must not do any more for this function. */
435
436int
32131a9c
RK
437reload (first, global, dumpfile)
438 rtx first;
439 int global;
440 FILE *dumpfile;
441{
442 register int class;
546b63fb 443 register int i, j;
32131a9c
RK
444 register rtx insn;
445 register struct elim_table *ep;
446
447 int something_changed;
448 int something_needs_reloads;
449 int something_needs_elimination;
450 int new_basic_block_needs;
a8efe40d
RK
451 enum reg_class caller_save_spill_class = NO_REGS;
452 int caller_save_group_size = 1;
32131a9c 453
5352b11a
RS
454 /* Nonzero means we couldn't get enough spill regs. */
455 int failure = 0;
456
32131a9c
RK
457 /* The basic block number currently being processed for INSN. */
458 int this_block;
459
460 /* Make sure even insns with volatile mem refs are recognizable. */
461 init_recog ();
462
463 /* Enable find_equiv_reg to distinguish insns made by reload. */
464 reload_first_uid = get_max_uid ();
465
466 for (i = 0; i < N_REG_CLASSES; i++)
467 basic_block_needs[i] = 0;
468
0dadecf6
RK
469#ifdef SECONDARY_MEMORY_NEEDED
470 /* Initialize the secondary memory table. */
471 clear_secondary_mem ();
472#endif
473
32131a9c
RK
474 /* Remember which hard regs appear explicitly
475 before we merge into `regs_ever_live' the ones in which
476 pseudo regs have been allocated. */
477 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
478
479 /* We don't have a stack slot for any spill reg yet. */
480 bzero (spill_stack_slot, sizeof spill_stack_slot);
481 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
482
a8efe40d
RK
483 /* Initialize the save area information for caller-save, in case some
484 are needed. */
485 init_save_areas ();
a8fdc208 486
32131a9c
RK
487 /* Compute which hard registers are now in use
488 as homes for pseudo registers.
489 This is done here rather than (eg) in global_alloc
490 because this point is reached even if not optimizing. */
491
492 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
493 mark_home_live (i);
494
c307c237
RK
495 for (i = 0; i < scratch_list_length; i++)
496 if (scratch_list[i])
497 mark_scratch_live (scratch_list[i]);
498
32131a9c
RK
499 /* Make sure that the last insn in the chain
500 is not something that needs reloading. */
fb3821f7 501 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
502
503 /* Find all the pseudo registers that didn't get hard regs
504 but do have known equivalent constants or memory slots.
505 These include parameters (known equivalent to parameter slots)
506 and cse'd or loop-moved constant memory addresses.
507
508 Record constant equivalents in reg_equiv_constant
509 so they will be substituted by find_reloads.
510 Record memory equivalents in reg_mem_equiv so they can
511 be substituted eventually by altering the REG-rtx's. */
512
513 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
514 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
515 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
517 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
519 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero (reg_equiv_init, max_regno * sizeof (rtx));
521 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero (reg_equiv_address, max_regno * sizeof (rtx));
523 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
524 bzero (reg_max_ref_width, max_regno * sizeof (int));
208dffa5
RS
525 cannot_omit_stores = (char *) alloca (max_regno);
526 bzero (cannot_omit_stores, max_regno);
32131a9c
RK
527
528 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
529 Also find all paradoxical subregs
530 and find largest such for each pseudo. */
531
532 for (insn = first; insn; insn = NEXT_INSN (insn))
533 {
534 rtx set = single_set (insn);
535
536 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
537 {
fb3821f7 538 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
539 if (note
540#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 541 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
542 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
543#endif
544 )
32131a9c
RK
545 {
546 rtx x = XEXP (note, 0);
547 i = REGNO (SET_DEST (set));
548 if (i > LAST_VIRTUAL_REGISTER)
549 {
550 if (GET_CODE (x) == MEM)
551 reg_equiv_memory_loc[i] = x;
552 else if (CONSTANT_P (x))
553 {
554 if (LEGITIMATE_CONSTANT_P (x))
555 reg_equiv_constant[i] = x;
556 else
557 reg_equiv_memory_loc[i]
d445b551 558 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
559 }
560 else
561 continue;
562
563 /* If this register is being made equivalent to a MEM
564 and the MEM is not SET_SRC, the equivalencing insn
565 is one with the MEM as a SET_DEST and it occurs later.
566 So don't mark this insn now. */
567 if (GET_CODE (x) != MEM
568 || rtx_equal_p (SET_SRC (set), x))
569 reg_equiv_init[i] = insn;
570 }
571 }
572 }
573
574 /* If this insn is setting a MEM from a register equivalent to it,
575 this is the equivalencing insn. */
576 else if (set && GET_CODE (SET_DEST (set)) == MEM
577 && GET_CODE (SET_SRC (set)) == REG
578 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
579 && rtx_equal_p (SET_DEST (set),
580 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
581 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
582
583 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
584 scan_paradoxical_subregs (PATTERN (insn));
585 }
586
587 /* Does this function require a frame pointer? */
588
589 frame_pointer_needed = (! flag_omit_frame_pointer
590#ifdef EXIT_IGNORE_STACK
591 /* ?? If EXIT_IGNORE_STACK is set, we will not save
592 and restore sp for alloca. So we can't eliminate
593 the frame pointer in that case. At some point,
594 we should improve this by emitting the
595 sp-adjusting insns for this case. */
596 || (current_function_calls_alloca
597 && EXIT_IGNORE_STACK)
598#endif
599 || FRAME_POINTER_REQUIRED);
600
601 num_eliminable = 0;
602
603 /* Initialize the table of registers to eliminate. The way we do this
604 depends on how the eliminable registers were defined. */
605#ifdef ELIMINABLE_REGS
606 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
607 {
608 ep->can_eliminate = ep->can_eliminate_previous
609 = (CAN_ELIMINATE (ep->from, ep->to)
610 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
611 }
612#else
613 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
614 = ! frame_pointer_needed;
615#endif
616
617 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 618 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
619 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
620 We depend on this. */
621 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
622 {
623 num_eliminable += ep->can_eliminate;
624 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
625 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
626 }
627
628 num_labels = max_label_num () - get_first_label_num ();
629
630 /* Allocate the tables used to store offset information at labels. */
631 offsets_known_at = (char *) alloca (num_labels);
632 offsets_at
633 = (int (*)[NUM_ELIMINABLE_REGS])
634 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
635
636 offsets_known_at -= get_first_label_num ();
637 offsets_at -= get_first_label_num ();
638
639 /* Alter each pseudo-reg rtx to contain its hard reg number.
640 Assign stack slots to the pseudos that lack hard regs or equivalents.
641 Do not touch virtual registers. */
642
643 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
644 alter_reg (i, -1);
645
646 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
647 because the stack size may be a part of the offset computation for
648 register elimination. */
649 assign_stack_local (BLKmode, 0, 0);
650
651 /* If we have some registers we think can be eliminated, scan all insns to
652 see if there is an insn that sets one of these registers to something
653 other than itself plus a constant. If so, the register cannot be
654 eliminated. Doing this scan here eliminates an extra pass through the
655 main reload loop in the most common case where register elimination
656 cannot be done. */
657 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
658 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
659 || GET_CODE (insn) == CALL_INSN)
660 note_stores (PATTERN (insn), mark_not_eliminable);
661
662#ifndef REGISTER_CONSTRAINTS
663 /* If all the pseudo regs have hard regs,
664 except for those that are never referenced,
665 we know that no reloads are needed. */
666 /* But that is not true if there are register constraints, since
667 in that case some pseudos might be in the wrong kind of hard reg. */
668
669 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
670 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
671 break;
672
b8093d02 673 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
674 return;
675#endif
676
677 /* Compute the order of preference for hard registers to spill.
678 Store them by decreasing preference in potential_reload_regs. */
679
680 order_regs_for_reload ();
681
682 /* So far, no hard regs have been spilled. */
683 n_spills = 0;
684 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
685 spill_reg_order[i] = -1;
686
687 /* On most machines, we can't use any register explicitly used in the
688 rtl as a spill register. But on some, we have to. Those will have
689 taken care to keep the life of hard regs as short as possible. */
690
691#ifdef SMALL_REGISTER_CLASSES
692 CLEAR_HARD_REG_SET (forbidden_regs);
693#else
694 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
695#endif
696
697 /* Spill any hard regs that we know we can't eliminate. */
698 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
699 if (! ep->can_eliminate)
700 {
701 spill_hard_reg (ep->from, global, dumpfile, 1);
702 regs_ever_live[ep->from] = 1;
703 }
704
705 if (global)
706 for (i = 0; i < N_REG_CLASSES; i++)
707 {
708 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
709 bzero (basic_block_needs[i], n_basic_blocks);
710 }
711
b2f15f94
RK
712 /* From now on, we need to emit any moves without making new pseudos. */
713 reload_in_progress = 1;
714
32131a9c
RK
715 /* This loop scans the entire function each go-round
716 and repeats until one repetition spills no additional hard regs. */
717
d45cf215 718 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
719 to require another pass. Note that getting an additional reload
720 reg does not necessarily imply any pseudo reg was spilled;
721 sometimes we find a reload reg that no pseudo reg was allocated in. */
722 something_changed = 1;
723 /* This flag is set if there are any insns that require reloading. */
724 something_needs_reloads = 0;
725 /* This flag is set if there are any insns that require register
726 eliminations. */
727 something_needs_elimination = 0;
728 while (something_changed)
729 {
730 rtx after_call = 0;
731
732 /* For each class, number of reload regs needed in that class.
733 This is the maximum over all insns of the needs in that class
734 of the individual insn. */
735 int max_needs[N_REG_CLASSES];
736 /* For each class, size of group of consecutive regs
737 that is needed for the reloads of this class. */
738 int group_size[N_REG_CLASSES];
739 /* For each class, max number of consecutive groups needed.
740 (Each group contains group_size[CLASS] consecutive registers.) */
741 int max_groups[N_REG_CLASSES];
742 /* For each class, max number needed of regs that don't belong
743 to any of the groups. */
744 int max_nongroups[N_REG_CLASSES];
745 /* For each class, the machine mode which requires consecutive
746 groups of regs of that class.
747 If two different modes ever require groups of one class,
748 they must be the same size and equally restrictive for that class,
749 otherwise we can't handle the complexity. */
750 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
751 /* Record the insn where each maximum need is first found. */
752 rtx max_needs_insn[N_REG_CLASSES];
753 rtx max_groups_insn[N_REG_CLASSES];
754 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 755 rtx x;
0dadecf6 756 int starting_frame_size = get_frame_size ();
e404a39a 757 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
758
759 something_changed = 0;
760 bzero (max_needs, sizeof max_needs);
761 bzero (max_groups, sizeof max_groups);
762 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
763 bzero (max_needs_insn, sizeof max_needs_insn);
764 bzero (max_groups_insn, sizeof max_groups_insn);
765 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
766 bzero (group_size, sizeof group_size);
767 for (i = 0; i < N_REG_CLASSES; i++)
768 group_mode[i] = VOIDmode;
769
770 /* Keep track of which basic blocks are needing the reloads. */
771 this_block = 0;
772
773 /* Remember whether any element of basic_block_needs
774 changes from 0 to 1 in this pass. */
775 new_basic_block_needs = 0;
776
777 /* Reset all offsets on eliminable registers to their initial values. */
778#ifdef ELIMINABLE_REGS
779 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
780 {
781 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
782 ep->previous_offset = ep->offset
783 = ep->max_offset = ep->initial_offset;
32131a9c
RK
784 }
785#else
786#ifdef INITIAL_FRAME_POINTER_OFFSET
787 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
788#else
789 if (!FRAME_POINTER_REQUIRED)
790 abort ();
791 reg_eliminate[0].initial_offset = 0;
792#endif
a8efe40d 793 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
794 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
795#endif
796
797 num_not_at_initial_offset = 0;
798
799 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
800
801 /* Set a known offset for each forced label to be at the initial offset
802 of each elimination. We do this because we assume that all
803 computed jumps occur from a location where each elimination is
804 at its initial offset. */
805
806 for (x = forced_labels; x; x = XEXP (x, 1))
807 if (XEXP (x, 0))
fb3821f7 808 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
809
810 /* For each pseudo register that has an equivalent location defined,
811 try to eliminate any eliminable registers (such as the frame pointer)
812 assuming initial offsets for the replacement register, which
813 is the normal case.
814
815 If the resulting location is directly addressable, substitute
816 the MEM we just got directly for the old REG.
817
818 If it is not addressable but is a constant or the sum of a hard reg
819 and constant, it is probably not addressable because the constant is
820 out of range, in that case record the address; we will generate
821 hairy code to compute the address in a register each time it is
a8fdc208 822 needed.
32131a9c
RK
823
824 If the location is not addressable, but does not have one of the
825 above forms, assign a stack slot. We have to do this to avoid the
826 potential of producing lots of reloads if, e.g., a location involves
827 a pseudo that didn't get a hard register and has an equivalent memory
828 location that also involves a pseudo that didn't get a hard register.
829
830 Perhaps at some point we will improve reload_when_needed handling
831 so this problem goes away. But that's very hairy. */
832
833 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
834 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
835 {
fb3821f7 836 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
837
838 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
839 XEXP (x, 0)))
840 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
841 else if (CONSTANT_P (XEXP (x, 0))
842 || (GET_CODE (XEXP (x, 0)) == PLUS
843 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
844 && (REGNO (XEXP (XEXP (x, 0), 0))
845 < FIRST_PSEUDO_REGISTER)
846 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
847 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
848 else
849 {
850 /* Make a new stack slot. Then indicate that something
a8fdc208 851 changed so we go back and recompute offsets for
32131a9c
RK
852 eliminable registers because the allocation of memory
853 below might change some offset. reg_equiv_{mem,address}
854 will be set up for this pseudo on the next pass around
855 the loop. */
856 reg_equiv_memory_loc[i] = 0;
857 reg_equiv_init[i] = 0;
858 alter_reg (i, -1);
859 something_changed = 1;
860 }
861 }
a8fdc208 862
d45cf215 863 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
864 bookkeeping. */
865 if (something_changed)
866 continue;
867
a8efe40d
RK
868 /* If caller-saves needs a group, initialize the group to include
869 the size and mode required for caller-saves. */
870
871 if (caller_save_group_size > 1)
872 {
873 group_mode[(int) caller_save_spill_class] = Pmode;
874 group_size[(int) caller_save_spill_class] = caller_save_group_size;
875 }
876
32131a9c
RK
877 /* Compute the most additional registers needed by any instruction.
878 Collect information separately for each class of regs. */
879
880 for (insn = first; insn; insn = NEXT_INSN (insn))
881 {
882 if (global && this_block + 1 < n_basic_blocks
883 && insn == basic_block_head[this_block+1])
884 ++this_block;
885
886 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
887 might include REG_LABEL), we need to see what effects this
888 has on the known offsets at labels. */
889
890 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
891 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
892 && REG_NOTES (insn) != 0))
893 set_label_offsets (insn, insn, 0);
894
895 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
896 {
897 /* Nonzero means don't use a reload reg that overlaps
898 the place where a function value can be returned. */
899 rtx avoid_return_reg = 0;
900
901 rtx old_body = PATTERN (insn);
902 int old_code = INSN_CODE (insn);
903 rtx old_notes = REG_NOTES (insn);
904 int did_elimination = 0;
546b63fb
RK
905 int max_total_input_groups = 0, max_total_output_groups = 0;
906
907 /* To compute the number of reload registers of each class
908 needed for an insn, we must similate what choose_reload_regs
909 can do. We do this by splitting an insn into an "input" and
910 an "output" part. RELOAD_OTHER reloads are used in both.
911 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
912 which must be live over the entire input section of reloads,
913 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
914 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
915 inputs.
916
917 The registers needed for output are RELOAD_OTHER and
918 RELOAD_FOR_OUTPUT, which are live for the entire output
919 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
920 reloads for each operand.
921
922 The total number of registers needed is the maximum of the
923 inputs and outputs. */
924
925 /* These just count RELOAD_OTHER. */
32131a9c
RK
926 int insn_needs[N_REG_CLASSES];
927 int insn_groups[N_REG_CLASSES];
928 int insn_total_groups = 0;
929
546b63fb 930 /* Count RELOAD_FOR_INPUT reloads. */
32131a9c
RK
931 int insn_needs_for_inputs[N_REG_CLASSES];
932 int insn_groups_for_inputs[N_REG_CLASSES];
933 int insn_total_groups_for_inputs = 0;
934
546b63fb 935 /* Count RELOAD_FOR_OUTPUT reloads. */
32131a9c
RK
936 int insn_needs_for_outputs[N_REG_CLASSES];
937 int insn_groups_for_outputs[N_REG_CLASSES];
938 int insn_total_groups_for_outputs = 0;
939
546b63fb
RK
940 /* Count RELOAD_FOR_INSN reloads. */
941 int insn_needs_for_insn[N_REG_CLASSES];
942 int insn_groups_for_insn[N_REG_CLASSES];
943 int insn_total_groups_for_insn = 0;
944
945 /* Count RELOAD_FOR_OTHER_ADDRESS reloads. */
946 int insn_needs_for_other_addr[N_REG_CLASSES];
947 int insn_groups_for_other_addr[N_REG_CLASSES];
948 int insn_total_groups_for_other_addr = 0;
949
950 /* Count RELOAD_FOR_INPUT_ADDRESS reloads. */
951 int insn_needs_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
952 int insn_groups_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
953 int insn_total_groups_for_in_addr[MAX_RECOG_OPERANDS];
954
955 /* Count RELOAD_FOR_OUTPUT_ADDRESS reloads. */
956 int insn_needs_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
957 int insn_groups_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
958 int insn_total_groups_for_out_addr[MAX_RECOG_OPERANDS];
959
32131a9c 960 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
546b63fb
RK
961 int insn_needs_for_op_addr[N_REG_CLASSES];
962 int insn_groups_for_op_addr[N_REG_CLASSES];
963 int insn_total_groups_for_op_addr = 0;
32131a9c 964
32131a9c
RK
965#if 0 /* This wouldn't work nowadays, since optimize_bit_field
966 looks for non-strict memory addresses. */
967 /* Optimization: a bit-field instruction whose field
968 happens to be a byte or halfword in memory
969 can be changed to a move instruction. */
970
971 if (GET_CODE (PATTERN (insn)) == SET)
972 {
973 rtx dest = SET_DEST (PATTERN (insn));
974 rtx src = SET_SRC (PATTERN (insn));
975
976 if (GET_CODE (dest) == ZERO_EXTRACT
977 || GET_CODE (dest) == SIGN_EXTRACT)
978 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
979 if (GET_CODE (src) == ZERO_EXTRACT
980 || GET_CODE (src) == SIGN_EXTRACT)
981 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
982 }
983#endif
984
985 /* If needed, eliminate any eliminable registers. */
986 if (num_eliminable)
987 did_elimination = eliminate_regs_in_insn (insn, 0);
988
989#ifdef SMALL_REGISTER_CLASSES
990 /* Set avoid_return_reg if this is an insn
991 that might use the value of a function call. */
992 if (GET_CODE (insn) == CALL_INSN)
993 {
994 if (GET_CODE (PATTERN (insn)) == SET)
995 after_call = SET_DEST (PATTERN (insn));
996 else if (GET_CODE (PATTERN (insn)) == PARALLEL
997 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
998 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
999 else
1000 after_call = 0;
1001 }
1002 else if (after_call != 0
1003 && !(GET_CODE (PATTERN (insn)) == SET
1004 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1005 {
1006 if (reg_mentioned_p (after_call, PATTERN (insn)))
1007 avoid_return_reg = after_call;
1008 after_call = 0;
1009 }
1010#endif /* SMALL_REGISTER_CLASSES */
1011
1012 /* Analyze the instruction. */
1013 find_reloads (insn, 0, spill_indirect_levels, global,
1014 spill_reg_order);
1015
1016 /* Remember for later shortcuts which insns had any reloads or
1017 register eliminations.
1018
1019 One might think that it would be worthwhile to mark insns
1020 that need register replacements but not reloads, but this is
1021 not safe because find_reloads may do some manipulation of
1022 the insn (such as swapping commutative operands), which would
1023 be lost when we restore the old pattern after register
1024 replacement. So the actions of find_reloads must be redone in
1025 subsequent passes or in reload_as_needed.
1026
1027 However, it is safe to mark insns that need reloads
1028 but not register replacement. */
1029
1030 PUT_MODE (insn, (did_elimination ? QImode
1031 : n_reloads ? HImode
546b63fb 1032 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
1033 : VOIDmode));
1034
1035 /* Discard any register replacements done. */
1036 if (did_elimination)
1037 {
1038 obstack_free (&reload_obstack, reload_firstobj);
1039 PATTERN (insn) = old_body;
1040 INSN_CODE (insn) = old_code;
1041 REG_NOTES (insn) = old_notes;
1042 something_needs_elimination = 1;
1043 }
1044
a8efe40d 1045 /* If this insn has no reloads, we need not do anything except
a8fdc208 1046 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1047 caller-save needs reloads. */
1048
1049 if (n_reloads == 0
1050 && ! (GET_CODE (insn) == CALL_INSN
1051 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1052 continue;
1053
1054 something_needs_reloads = 1;
1055
a8efe40d
RK
1056 for (i = 0; i < N_REG_CLASSES; i++)
1057 {
1058 insn_needs[i] = 0, insn_groups[i] = 0;
1059 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1060 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
546b63fb
RK
1061 insn_needs_for_insn[i] = 0, insn_groups_for_insn[i] = 0;
1062 insn_needs_for_op_addr[i] = 0, insn_groups_for_op_addr[i] = 0;
1063 insn_needs_for_other_addr[i] = 0;
1064 insn_groups_for_other_addr[i] = 0;
a8efe40d
RK
1065 }
1066
546b63fb
RK
1067 for (i = 0; i < reload_n_operands; i++)
1068 {
1069 insn_total_groups_for_in_addr[i] = 0;
1070 insn_total_groups_for_out_addr[i] = 0;
1071
1072 for (j = 0; j < N_REG_CLASSES; j++)
1073 {
1074 insn_needs_for_in_addr[i][j] = 0;
1075 insn_needs_for_out_addr[i][j] = 0;
1076 insn_groups_for_in_addr[i][j] = 0;
1077 insn_groups_for_out_addr[i][j] = 0;
1078 }
1079 }
1080
32131a9c
RK
1081 /* Count each reload once in every class
1082 containing the reload's own class. */
1083
1084 for (i = 0; i < n_reloads; i++)
1085 {
1086 register enum reg_class *p;
e85ddd99 1087 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1088 int size;
1089 enum machine_mode mode;
1090 int *this_groups;
1091 int *this_needs;
1092 int *this_total_groups;
1093
1094 /* Don't count the dummy reloads, for which one of the
1095 regs mentioned in the insn can be used for reloading.
1096 Don't count optional reloads.
1097 Don't count reloads that got combined with others. */
1098 if (reload_reg_rtx[i] != 0
1099 || reload_optional[i] != 0
1100 || (reload_out[i] == 0 && reload_in[i] == 0
1101 && ! reload_secondary_p[i]))
1102 continue;
1103
e85ddd99
RK
1104 /* Show that a reload register of this class is needed
1105 in this basic block. We do not use insn_needs and
1106 insn_groups because they are overly conservative for
1107 this purpose. */
1108 if (global && ! basic_block_needs[(int) class][this_block])
1109 {
1110 basic_block_needs[(int) class][this_block] = 1;
1111 new_basic_block_needs = 1;
1112 }
1113
32131a9c
RK
1114 /* Decide which time-of-use to count this reload for. */
1115 switch (reload_when_needed[i])
1116 {
1117 case RELOAD_OTHER:
32131a9c
RK
1118 this_needs = insn_needs;
1119 this_groups = insn_groups;
1120 this_total_groups = &insn_total_groups;
1121 break;
1122
546b63fb 1123 case RELOAD_FOR_INPUT:
32131a9c
RK
1124 this_needs = insn_needs_for_inputs;
1125 this_groups = insn_groups_for_inputs;
1126 this_total_groups = &insn_total_groups_for_inputs;
1127 break;
1128
546b63fb 1129 case RELOAD_FOR_OUTPUT:
32131a9c
RK
1130 this_needs = insn_needs_for_outputs;
1131 this_groups = insn_groups_for_outputs;
1132 this_total_groups = &insn_total_groups_for_outputs;
1133 break;
1134
546b63fb
RK
1135 case RELOAD_FOR_INSN:
1136 this_needs = insn_needs_for_insn;
1137 this_groups = insn_groups_for_outputs;
1138 this_total_groups = &insn_total_groups_for_insn;
1139 break;
1140
1141 case RELOAD_FOR_OTHER_ADDRESS:
1142 this_needs = insn_needs_for_other_addr;
1143 this_groups = insn_groups_for_other_addr;
1144 this_total_groups = &insn_total_groups_for_other_addr;
1145 break;
1146
1147 case RELOAD_FOR_INPUT_ADDRESS:
1148 this_needs = insn_needs_for_in_addr[reload_opnum[i]];
1149 this_groups = insn_groups_for_in_addr[reload_opnum[i]];
1150 this_total_groups
1151 = &insn_total_groups_for_in_addr[reload_opnum[i]];
1152 break;
1153
1154 case RELOAD_FOR_OUTPUT_ADDRESS:
1155 this_needs = insn_needs_for_out_addr[reload_opnum[i]];
1156 this_groups = insn_groups_for_out_addr[reload_opnum[i]];
1157 this_total_groups
1158 = &insn_total_groups_for_out_addr[reload_opnum[i]];
1159 break;
1160
32131a9c 1161 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
1162 this_needs = insn_needs_for_op_addr;
1163 this_groups = insn_groups_for_op_addr;
1164 this_total_groups = &insn_total_groups_for_op_addr;
32131a9c
RK
1165 break;
1166 }
1167
1168 mode = reload_inmode[i];
1169 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1170 mode = reload_outmode[i];
e85ddd99 1171 size = CLASS_MAX_NREGS (class, mode);
32131a9c
RK
1172 if (size > 1)
1173 {
1174 enum machine_mode other_mode, allocate_mode;
1175
1176 /* Count number of groups needed separately from
1177 number of individual regs needed. */
e85ddd99
RK
1178 this_groups[(int) class]++;
1179 p = reg_class_superclasses[(int) class];
32131a9c
RK
1180 while (*p != LIM_REG_CLASSES)
1181 this_groups[(int) *p++]++;
1182 (*this_total_groups)++;
1183
1184 /* Record size and mode of a group of this class. */
1185 /* If more than one size group is needed,
1186 make all groups the largest needed size. */
e85ddd99 1187 if (group_size[(int) class] < size)
32131a9c 1188 {
e85ddd99 1189 other_mode = group_mode[(int) class];
32131a9c
RK
1190 allocate_mode = mode;
1191
e85ddd99
RK
1192 group_size[(int) class] = size;
1193 group_mode[(int) class] = mode;
32131a9c
RK
1194 }
1195 else
1196 {
1197 other_mode = mode;
e85ddd99 1198 allocate_mode = group_mode[(int) class];
32131a9c
RK
1199 }
1200
1201 /* Crash if two dissimilar machine modes both need
1202 groups of consecutive regs of the same class. */
1203
1204 if (other_mode != VOIDmode
1205 && other_mode != allocate_mode
1206 && ! modes_equiv_for_class_p (allocate_mode,
1207 other_mode,
e85ddd99 1208 class))
32131a9c
RK
1209 abort ();
1210 }
1211 else if (size == 1)
1212 {
e85ddd99
RK
1213 this_needs[(int) class] += 1;
1214 p = reg_class_superclasses[(int) class];
32131a9c
RK
1215 while (*p != LIM_REG_CLASSES)
1216 this_needs[(int) *p++] += 1;
1217 }
1218 else
1219 abort ();
1220 }
1221
1222 /* All reloads have been counted for this insn;
1223 now merge the various times of use.
1224 This sets insn_needs, etc., to the maximum total number
1225 of registers needed at any point in this insn. */
1226
1227 for (i = 0; i < N_REG_CLASSES; i++)
1228 {
546b63fb
RK
1229 int in_max, out_max;
1230
1231 for (in_max = 0, out_max = 0, j = 0;
1232 j < reload_n_operands; j++)
1233 {
1234 in_max = MAX (in_max, insn_needs_for_in_addr[j][i]);
1235 out_max = MAX (out_max, insn_needs_for_out_addr[j][i]);
1236 }
1237
1238 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1239 and operand addresses but not things used to reload them.
1240 Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads don't
1241 conflict with things needed to reload inputs or
1242 outputs. */
1243
1244 in_max = MAX (in_max, insn_needs_for_op_addr[i]);
1245 out_max = MAX (out_max, insn_needs_for_insn[i]);
1246
1247 insn_needs_for_inputs[i]
1248 = MAX (insn_needs_for_inputs[i]
1249 + insn_needs_for_op_addr[i]
1250 + insn_needs_for_insn[i],
1251 in_max + insn_needs_for_inputs[i]);
1252
1253 insn_needs_for_outputs[i] += out_max;
1254 insn_needs[i] += MAX (MAX (insn_needs_for_inputs[i],
1255 insn_needs_for_outputs[i]),
1256 insn_needs_for_other_addr[i]);
1257
1258 for (in_max = 0, out_max = 0, j = 0;
1259 j < reload_n_operands; j++)
1260 {
1261 in_max = MAX (in_max, insn_groups_for_in_addr[j][i]);
1262 out_max = MAX (out_max, insn_groups_for_out_addr[j][i]);
1263 }
1264
1265 in_max = MAX (in_max, insn_groups_for_op_addr[i]);
1266 out_max = MAX (out_max, insn_groups_for_insn[i]);
1267
1268 insn_groups_for_inputs[i]
1269 = MAX (insn_groups_for_inputs[i]
1270 + insn_groups_for_op_addr[i]
1271 + insn_groups_for_insn[i],
1272 in_max + insn_groups_for_inputs[i]);
1273
1274 insn_groups_for_outputs[i] += out_max;
1275 insn_groups[i] += MAX (MAX (insn_groups_for_inputs[i],
1276 insn_groups_for_outputs[i]),
1277 insn_groups_for_other_addr[i]);
1278 }
1279
1280 for (i = 0; i < reload_n_operands; i++)
1281 {
1282 max_total_input_groups
1283 = MAX (max_total_input_groups,
1284 insn_total_groups_for_in_addr[i]);
1285 max_total_output_groups
1286 = MAX (max_total_output_groups,
1287 insn_total_groups_for_out_addr[i]);
32131a9c 1288 }
a8efe40d 1289
546b63fb
RK
1290 max_total_input_groups = MAX (max_total_input_groups,
1291 insn_total_groups_for_op_addr);
1292 max_total_output_groups = MAX (max_total_output_groups,
1293 insn_total_groups_for_insn);
1294
1295 insn_total_groups_for_inputs
1296 = MAX (max_total_input_groups + insn_total_groups_for_op_addr
1297 + insn_total_groups_for_insn,
1298 max_total_input_groups + insn_total_groups_for_inputs);
1299
1300 insn_total_groups_for_outputs += max_total_output_groups;
1301
1302 insn_total_groups += MAX (MAX (insn_total_groups_for_outputs,
1303 insn_total_groups_for_inputs),
1304 insn_total_groups_for_other_addr);
32131a9c 1305
a8efe40d
RK
1306 /* If this is a CALL_INSN and caller-saves will need
1307 a spill register, act as if the spill register is
1308 needed for this insn. However, the spill register
1309 can be used by any reload of this insn, so we only
1310 need do something if no need for that class has
a8fdc208 1311 been recorded.
a8efe40d
RK
1312
1313 The assumption that every CALL_INSN will trigger a
1314 caller-save is highly conservative, however, the number
1315 of cases where caller-saves will need a spill register but
1316 a block containing a CALL_INSN won't need a spill register
1317 of that class should be quite rare.
1318
1319 If a group is needed, the size and mode of the group will
d45cf215 1320 have been set up at the beginning of this loop. */
a8efe40d
RK
1321
1322 if (GET_CODE (insn) == CALL_INSN
1323 && caller_save_spill_class != NO_REGS)
1324 {
1325 int *caller_save_needs
1326 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1327
1328 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1329 {
1330 register enum reg_class *p
1331 = reg_class_superclasses[(int) caller_save_spill_class];
1332
1333 caller_save_needs[(int) caller_save_spill_class]++;
1334
1335 while (*p != LIM_REG_CLASSES)
0aaa6af8 1336 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1337 }
1338
1339 if (caller_save_group_size > 1)
1340 insn_total_groups = MAX (insn_total_groups, 1);
d1c1397e
RS
1341
1342
1343 /* Show that this basic block will need a register of
1344 this class. */
1345
1346 if (global
1347 && ! (basic_block_needs[(int) caller_save_spill_class]
1348 [this_block]))
1349 {
1350 basic_block_needs[(int) caller_save_spill_class]
1351 [this_block] = 1;
1352 new_basic_block_needs = 1;
1353 }
a8efe40d
RK
1354 }
1355
32131a9c
RK
1356#ifdef SMALL_REGISTER_CLASSES
1357 /* If this insn stores the value of a function call,
1358 and that value is in a register that has been spilled,
1359 and if the insn needs a reload in a class
1360 that might use that register as the reload register,
1361 then add add an extra need in that class.
1362 This makes sure we have a register available that does
1363 not overlap the return value. */
1364 if (avoid_return_reg)
1365 {
1366 int regno = REGNO (avoid_return_reg);
1367 int nregs
1368 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1369 int r;
546b63fb
RK
1370 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1371
1372 /* First compute the "basic needs", which counts a
1373 need only in the smallest class in which it
1374 is required. */
1375
1376 bcopy (insn_needs, basic_needs, sizeof basic_needs);
1377 bcopy (insn_groups, basic_groups, sizeof basic_groups);
1378
1379 for (i = 0; i < N_REG_CLASSES; i++)
1380 {
1381 enum reg_class *p;
1382
1383 if (basic_needs[i] >= 0)
1384 for (p = reg_class_superclasses[i];
1385 *p != LIM_REG_CLASSES; p++)
1386 basic_needs[(int) *p] -= basic_needs[i];
1387
1388 if (basic_groups[i] >= 0)
1389 for (p = reg_class_superclasses[i];
1390 *p != LIM_REG_CLASSES; p++)
1391 basic_groups[(int) *p] -= basic_groups[i];
1392 }
1393
1394 /* Now count extra regs if there might be a conflict with
1395 the return value register.
1396
1397 ??? This is not quite correct because we don't properly
1398 handle the case of groups, but if we end up doing
1399 something wrong, it either will end up not mattering or
1400 we will abort elsewhere. */
1401
32131a9c
RK
1402 for (r = regno; r < regno + nregs; r++)
1403 if (spill_reg_order[r] >= 0)
1404 for (i = 0; i < N_REG_CLASSES; i++)
1405 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1406 {
546b63fb
RK
1407 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1408 {
1409 enum reg_class *p;
1410
1411 insn_needs[i]++;
1412 p = reg_class_superclasses[i];
1413 while (*p != LIM_REG_CLASSES)
1414 insn_needs[(int) *p++]++;
1415 }
32131a9c 1416 }
32131a9c
RK
1417 }
1418#endif /* SMALL_REGISTER_CLASSES */
1419
1420 /* For each class, collect maximum need of any insn. */
1421
1422 for (i = 0; i < N_REG_CLASSES; i++)
1423 {
1424 if (max_needs[i] < insn_needs[i])
5352b11a
RS
1425 {
1426 max_needs[i] = insn_needs[i];
1427 max_needs_insn[i] = insn;
1428 }
32131a9c 1429 if (max_groups[i] < insn_groups[i])
5352b11a
RS
1430 {
1431 max_groups[i] = insn_groups[i];
1432 max_groups_insn[i] = insn;
1433 }
32131a9c
RK
1434 if (insn_total_groups > 0)
1435 if (max_nongroups[i] < insn_needs[i])
5352b11a
RS
1436 {
1437 max_nongroups[i] = insn_needs[i];
1438 max_nongroups_insn[i] = insn;
1439 }
32131a9c
RK
1440 }
1441 }
1442 /* Note that there is a continue statement above. */
1443 }
1444
0dadecf6
RK
1445 /* If we allocated any new memory locations, make another pass
1446 since it might have changed elimination offsets. */
1447 if (starting_frame_size != get_frame_size ())
1448 something_changed = 1;
1449
e404a39a
RK
1450 if (dumpfile)
1451 for (i = 0; i < N_REG_CLASSES; i++)
1452 {
1453 if (max_needs[i] > 0)
1454 fprintf (dumpfile,
1455 ";; Need %d reg%s of class %s (for insn %d).\n",
1456 max_needs[i], max_needs[i] == 1 ? "" : "s",
1457 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1458 if (max_nongroups[i] > 0)
1459 fprintf (dumpfile,
1460 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1461 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1462 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1463 if (max_groups[i] > 0)
1464 fprintf (dumpfile,
1465 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1466 max_groups[i], max_groups[i] == 1 ? "" : "s",
1467 mode_name[(int) group_mode[i]],
1468 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1469 }
1470
d445b551 1471 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1472 will need a spill register. */
32131a9c 1473
d445b551 1474 if (caller_save_needed
a8efe40d
RK
1475 && ! setup_save_areas (&something_changed)
1476 && caller_save_spill_class == NO_REGS)
32131a9c 1477 {
a8efe40d
RK
1478 /* The class we will need depends on whether the machine
1479 supports the sum of two registers for an address; see
1480 find_address_reloads for details. */
1481
a8fdc208 1482 caller_save_spill_class
a8efe40d
RK
1483 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1484 caller_save_group_size
1485 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1486 something_changed = 1;
32131a9c
RK
1487 }
1488
5c23c401
RK
1489 /* See if anything that happened changes which eliminations are valid.
1490 For example, on the Sparc, whether or not the frame pointer can
1491 be eliminated can depend on what registers have been used. We need
1492 not check some conditions again (such as flag_omit_frame_pointer)
1493 since they can't have changed. */
1494
1495 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1496 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1497#ifdef ELIMINABLE_REGS
1498 || ! CAN_ELIMINATE (ep->from, ep->to)
1499#endif
1500 )
1501 ep->can_eliminate = 0;
1502
32131a9c
RK
1503 /* Look for the case where we have discovered that we can't replace
1504 register A with register B and that means that we will now be
1505 trying to replace register A with register C. This means we can
1506 no longer replace register C with register B and we need to disable
1507 such an elimination, if it exists. This occurs often with A == ap,
1508 B == sp, and C == fp. */
a8fdc208 1509
32131a9c
RK
1510 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1511 {
1512 struct elim_table *op;
1513 register int new_to = -1;
1514
1515 if (! ep->can_eliminate && ep->can_eliminate_previous)
1516 {
1517 /* Find the current elimination for ep->from, if there is a
1518 new one. */
1519 for (op = reg_eliminate;
1520 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1521 if (op->from == ep->from && op->can_eliminate)
1522 {
1523 new_to = op->to;
1524 break;
1525 }
1526
1527 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1528 disable it. */
1529 for (op = reg_eliminate;
1530 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1531 if (op->from == new_to && op->to == ep->to)
1532 op->can_eliminate = 0;
1533 }
1534 }
1535
1536 /* See if any registers that we thought we could eliminate the previous
1537 time are no longer eliminable. If so, something has changed and we
1538 must spill the register. Also, recompute the number of eliminable
1539 registers and see if the frame pointer is needed; it is if there is
1540 no elimination of the frame pointer that we can perform. */
1541
1542 frame_pointer_needed = 1;
1543 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1544 {
1545 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1546 frame_pointer_needed = 0;
1547
1548 if (! ep->can_eliminate && ep->can_eliminate_previous)
1549 {
1550 ep->can_eliminate_previous = 0;
1551 spill_hard_reg (ep->from, global, dumpfile, 1);
1552 regs_ever_live[ep->from] = 1;
1553 something_changed = 1;
1554 num_eliminable--;
1555 }
1556 }
1557
1558 /* If all needs are met, we win. */
1559
1560 for (i = 0; i < N_REG_CLASSES; i++)
1561 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1562 break;
1563 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1564 break;
1565
546b63fb
RK
1566 /* Not all needs are met; must spill some hard regs. */
1567
1568 /* Put all registers spilled so far back in potential_reload_regs, but
1569 put them at the front, since we've already spilled most of the
1570 psuedos in them (we might have left some pseudos unspilled if they
1571 were in a block that didn't need any spill registers of a conflicting
1572 class. We used to try to mark off the need for those registers,
1573 but doing so properly is very complex and reallocating them is the
1574 simpler approach. First, "pack" potential_reload_regs by pushing
1575 any nonnegative entries towards the end. That will leave room
1576 for the registers we already spilled.
1577
1578 Also, undo the marking of the spill registers from the last time
1579 around in FORBIDDEN_REGS since we will be probably be allocating
1580 them again below.
1581
1582 ??? It is theoretically possible that we might end up not using one
1583 of our previously-spilled registers in this allocation, even though
1584 they are at the head of the list. It's not clear what to do about
1585 this, but it was no better before, when we marked off the needs met
1586 by the previously-spilled registers. With the current code, globals
1587 can be allocated into these registers, but locals cannot. */
1588
1589 if (n_spills)
1590 {
1591 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1592 if (potential_reload_regs[i] != -1)
1593 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1594
546b63fb
RK
1595 for (i = 0; i < n_spills; i++)
1596 {
1597 potential_reload_regs[i] = spill_regs[i];
1598 spill_reg_order[spill_regs[i]] = -1;
1599 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1600 }
32131a9c 1601
546b63fb
RK
1602 n_spills = 0;
1603 }
32131a9c
RK
1604
1605 /* Now find more reload regs to satisfy the remaining need
1606 Do it by ascending class number, since otherwise a reg
1607 might be spilled for a big class and might fail to count
1608 for a smaller class even though it belongs to that class.
1609
1610 Count spilled regs in `spills', and add entries to
1611 `spill_regs' and `spill_reg_order'.
1612
1613 ??? Note there is a problem here.
1614 When there is a need for a group in a high-numbered class,
1615 and also need for non-group regs that come from a lower class,
1616 the non-group regs are chosen first. If there aren't many regs,
1617 they might leave no room for a group.
1618
1619 This was happening on the 386. To fix it, we added the code
1620 that calls possible_group_p, so that the lower class won't
1621 break up the last possible group.
1622
1623 Really fixing the problem would require changes above
1624 in counting the regs already spilled, and in choose_reload_regs.
1625 It might be hard to avoid introducing bugs there. */
1626
546b63fb
RK
1627 CLEAR_HARD_REG_SET (counted_for_groups);
1628 CLEAR_HARD_REG_SET (counted_for_nongroups);
1629
32131a9c
RK
1630 for (class = 0; class < N_REG_CLASSES; class++)
1631 {
1632 /* First get the groups of registers.
1633 If we got single registers first, we might fragment
1634 possible groups. */
1635 while (max_groups[class] > 0)
1636 {
1637 /* If any single spilled regs happen to form groups,
1638 count them now. Maybe we don't really need
1639 to spill another group. */
1640 count_possible_groups (group_size, group_mode, max_groups);
1641
93193ab5
RK
1642 if (max_groups[class] <= 0)
1643 break;
1644
32131a9c
RK
1645 /* Groups of size 2 (the only groups used on most machines)
1646 are treated specially. */
1647 if (group_size[class] == 2)
1648 {
1649 /* First, look for a register that will complete a group. */
1650 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1651 {
32131a9c 1652 int other;
546b63fb
RK
1653
1654 j = potential_reload_regs[i];
32131a9c
RK
1655 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1656 &&
1657 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1658 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1659 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1660 && HARD_REGNO_MODE_OK (other, group_mode[class])
1661 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1662 other)
1663 /* We don't want one part of another group.
1664 We could get "two groups" that overlap! */
1665 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1666 ||
1667 (j < FIRST_PSEUDO_REGISTER - 1
1668 && (other = j + 1, spill_reg_order[other] >= 0)
1669 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1670 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1671 && HARD_REGNO_MODE_OK (j, group_mode[class])
1672 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1673 other)
1674 && ! TEST_HARD_REG_BIT (counted_for_groups,
1675 other))))
1676 {
1677 register enum reg_class *p;
1678
1679 /* We have found one that will complete a group,
1680 so count off one group as provided. */
1681 max_groups[class]--;
1682 p = reg_class_superclasses[class];
1683 while (*p != LIM_REG_CLASSES)
1684 max_groups[(int) *p++]--;
1685
1686 /* Indicate both these regs are part of a group. */
1687 SET_HARD_REG_BIT (counted_for_groups, j);
1688 SET_HARD_REG_BIT (counted_for_groups, other);
1689 break;
1690 }
1691 }
1692 /* We can't complete a group, so start one. */
1693 if (i == FIRST_PSEUDO_REGISTER)
1694 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1695 {
57697575 1696 int k;
546b63fb 1697 j = potential_reload_regs[i];
57697575
RS
1698 /* Verify that J+1 is a potential reload reg. */
1699 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1700 if (potential_reload_regs[k] == j + 1)
1701 break;
32131a9c 1702 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1703 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1704 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1705 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1706 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1707 && HARD_REGNO_MODE_OK (j, group_mode[class])
1708 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1709 j + 1)
1710 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1711 break;
1712 }
1713
1714 /* I should be the index in potential_reload_regs
1715 of the new reload reg we have found. */
1716
5352b11a
RS
1717 if (i >= FIRST_PSEUDO_REGISTER)
1718 {
1719 /* There are no groups left to spill. */
1720 spill_failure (max_groups_insn[class]);
1721 failure = 1;
1722 goto failed;
1723 }
1724 else
1725 something_changed
fb3821f7 1726 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1727 global, dumpfile);
32131a9c
RK
1728 }
1729 else
1730 {
1731 /* For groups of more than 2 registers,
1732 look for a sufficient sequence of unspilled registers,
1733 and spill them all at once. */
1734 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1735 {
32131a9c 1736 int k;
546b63fb
RK
1737
1738 j = potential_reload_regs[i];
9d1a4667
RS
1739 if (j >= 0
1740 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1741 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1742 {
1743 /* Check each reg in the sequence. */
1744 for (k = 0; k < group_size[class]; k++)
1745 if (! (spill_reg_order[j + k] < 0
1746 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1747 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1748 break;
1749 /* We got a full sequence, so spill them all. */
1750 if (k == group_size[class])
1751 {
1752 register enum reg_class *p;
1753 for (k = 0; k < group_size[class]; k++)
1754 {
1755 int idx;
1756 SET_HARD_REG_BIT (counted_for_groups, j + k);
1757 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1758 if (potential_reload_regs[idx] == j + k)
1759 break;
9d1a4667
RS
1760 something_changed
1761 |= new_spill_reg (idx, class,
1762 max_needs, NULL_PTR,
1763 global, dumpfile);
32131a9c
RK
1764 }
1765
1766 /* We have found one that will complete a group,
1767 so count off one group as provided. */
1768 max_groups[class]--;
1769 p = reg_class_superclasses[class];
1770 while (*p != LIM_REG_CLASSES)
1771 max_groups[(int) *p++]--;
1772
1773 break;
1774 }
1775 }
1776 }
fa52261e 1777 /* We couldn't find any registers for this reload.
9d1a4667
RS
1778 Avoid going into an infinite loop. */
1779 if (i >= FIRST_PSEUDO_REGISTER)
1780 {
1781 /* There are no groups left. */
1782 spill_failure (max_groups_insn[class]);
1783 failure = 1;
1784 goto failed;
1785 }
32131a9c
RK
1786 }
1787 }
1788
1789 /* Now similarly satisfy all need for single registers. */
1790
1791 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1792 {
9a6cde3a
RS
1793#ifdef SMALL_REGISTER_CLASSES
1794 /* This should be right for all machines, but only the 386
1795 is known to need it, so this conditional plays safe.
1796 ??? For 2.5, try making this unconditional. */
1797 /* If we spilled enough regs, but they weren't counted
1798 against the non-group need, see if we can count them now.
1799 If so, we can avoid some actual spilling. */
1800 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1801 for (i = 0; i < n_spills; i++)
1802 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1803 spill_regs[i])
1804 && !TEST_HARD_REG_BIT (counted_for_groups,
1805 spill_regs[i])
1806 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1807 spill_regs[i])
1808 && max_nongroups[class] > 0)
1809 {
1810 register enum reg_class *p;
1811
1812 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1813 max_nongroups[class]--;
1814 p = reg_class_superclasses[class];
1815 while (*p != LIM_REG_CLASSES)
1816 max_nongroups[(int) *p++]--;
1817 }
1818 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1819 break;
1820#endif
1821
32131a9c
RK
1822 /* Consider the potential reload regs that aren't
1823 yet in use as reload regs, in order of preference.
1824 Find the most preferred one that's in this class. */
1825
1826 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1827 if (potential_reload_regs[i] >= 0
1828 && TEST_HARD_REG_BIT (reg_class_contents[class],
1829 potential_reload_regs[i])
1830 /* If this reg will not be available for groups,
1831 pick one that does not foreclose possible groups.
1832 This is a kludge, and not very general,
1833 but it should be sufficient to make the 386 work,
1834 and the problem should not occur on machines with
1835 more registers. */
1836 && (max_nongroups[class] == 0
1837 || possible_group_p (potential_reload_regs[i], max_groups)))
1838 break;
1839
e404a39a
RK
1840 /* If we couldn't get a register, try to get one even if we
1841 might foreclose possible groups. This may cause problems
1842 later, but that's better than aborting now, since it is
1843 possible that we will, in fact, be able to form the needed
1844 group even with this allocation. */
1845
1846 if (i >= FIRST_PSEUDO_REGISTER
1847 && (asm_noperands (max_needs[class] > 0
1848 ? max_needs_insn[class]
1849 : max_nongroups_insn[class])
1850 < 0))
1851 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1852 if (potential_reload_regs[i] >= 0
1853 && TEST_HARD_REG_BIT (reg_class_contents[class],
1854 potential_reload_regs[i]))
1855 break;
1856
32131a9c
RK
1857 /* I should be the index in potential_reload_regs
1858 of the new reload reg we have found. */
1859
5352b11a
RS
1860 if (i >= FIRST_PSEUDO_REGISTER)
1861 {
1862 /* There are no possible registers left to spill. */
1863 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1864 : max_nongroups_insn[class]);
1865 failure = 1;
1866 goto failed;
1867 }
1868 else
1869 something_changed
1870 |= new_spill_reg (i, class, max_needs, max_nongroups,
1871 global, dumpfile);
32131a9c
RK
1872 }
1873 }
1874 }
1875
1876 /* If global-alloc was run, notify it of any register eliminations we have
1877 done. */
1878 if (global)
1879 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1880 if (ep->can_eliminate)
1881 mark_elimination (ep->from, ep->to);
1882
32131a9c 1883 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1884 around calls. Tell if what mode to use so that we will process
1885 those insns in reload_as_needed if we have to. */
32131a9c
RK
1886
1887 if (caller_save_needed)
a8efe40d
RK
1888 save_call_clobbered_regs (num_eliminable ? QImode
1889 : caller_save_spill_class != NO_REGS ? HImode
1890 : VOIDmode);
32131a9c
RK
1891
1892 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1893 If that insn didn't set the register (i.e., it copied the register to
1894 memory), just delete that insn instead of the equivalencing insn plus
1895 anything now dead. If we call delete_dead_insn on that insn, we may
1896 delete the insn that actually sets the register if the register die
1897 there and that is incorrect. */
1898
1899 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1900 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1901 && GET_CODE (reg_equiv_init[i]) != NOTE)
1902 {
1903 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1904 delete_dead_insn (reg_equiv_init[i]);
1905 else
1906 {
1907 PUT_CODE (reg_equiv_init[i], NOTE);
1908 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1909 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1910 }
1911 }
1912
1913 /* Use the reload registers where necessary
1914 by generating move instructions to move the must-be-register
1915 values into or out of the reload registers. */
1916
a8efe40d
RK
1917 if (something_needs_reloads || something_needs_elimination
1918 || (caller_save_needed && num_eliminable)
1919 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1920 reload_as_needed (first, global);
1921
2a1f8b6b 1922 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1923 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1924 virtue of being in a pseudo, that pseudo will be marked live
1925 and hence the frame pointer will be known to be live via that
1926 pseudo. */
1927
1928 if (! frame_pointer_needed)
1929 for (i = 0; i < n_basic_blocks; i++)
1930 basic_block_live_at_start[i][FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1931 &= ~ ((REGSET_ELT_TYPE) 1 << (FRAME_POINTER_REGNUM % REGSET_ELT_BITS));
1932
5352b11a
RS
1933 /* Come here (with failure set nonzero) if we can't get enough spill regs
1934 and we decide not to abort about it. */
1935 failed:
1936
a3ec87a8
RS
1937 reload_in_progress = 0;
1938
32131a9c
RK
1939 /* Now eliminate all pseudo regs by modifying them into
1940 their equivalent memory references.
1941 The REG-rtx's for the pseudos are modified in place,
1942 so all insns that used to refer to them now refer to memory.
1943
1944 For a reg that has a reg_equiv_address, all those insns
1945 were changed by reloading so that no insns refer to it any longer;
1946 but the DECL_RTL of a variable decl may refer to it,
1947 and if so this causes the debugging info to mention the variable. */
1948
1949 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1950 {
1951 rtx addr = 0;
ab1fd483 1952 int in_struct = 0;
32131a9c 1953 if (reg_equiv_mem[i])
ab1fd483
RS
1954 {
1955 addr = XEXP (reg_equiv_mem[i], 0);
1956 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1957 }
32131a9c
RK
1958 if (reg_equiv_address[i])
1959 addr = reg_equiv_address[i];
1960 if (addr)
1961 {
1962 if (reg_renumber[i] < 0)
1963 {
1964 rtx reg = regno_reg_rtx[i];
1965 XEXP (reg, 0) = addr;
1966 REG_USERVAR_P (reg) = 0;
ab1fd483 1967 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1968 PUT_CODE (reg, MEM);
1969 }
1970 else if (reg_equiv_mem[i])
1971 XEXP (reg_equiv_mem[i], 0) = addr;
1972 }
1973 }
1974
1975#ifdef PRESERVE_DEATH_INFO_REGNO_P
1976 /* Make a pass over all the insns and remove death notes for things that
1977 are no longer registers or no longer die in the insn (e.g., an input
1978 and output pseudo being tied). */
1979
1980 for (insn = first; insn; insn = NEXT_INSN (insn))
1981 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1982 {
1983 rtx note, next;
1984
1985 for (note = REG_NOTES (insn); note; note = next)
1986 {
1987 next = XEXP (note, 1);
1988 if (REG_NOTE_KIND (note) == REG_DEAD
1989 && (GET_CODE (XEXP (note, 0)) != REG
1990 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1991 remove_note (insn, note);
1992 }
1993 }
1994#endif
1995
1996 /* Indicate that we no longer have known memory locations or constants. */
1997 reg_equiv_constant = 0;
1998 reg_equiv_memory_loc = 0;
5352b11a 1999
c307c237
RK
2000 free (scratch_list);
2001 scratch_list = 0;
2002 free (scratch_block);
2003 scratch_block = 0;
2004
5352b11a 2005 return failure;
32131a9c
RK
2006}
2007\f
2008/* Nonzero if, after spilling reg REGNO for non-groups,
2009 it will still be possible to find a group if we still need one. */
2010
2011static int
2012possible_group_p (regno, max_groups)
2013 int regno;
2014 int *max_groups;
2015{
2016 int i;
2017 int class = (int) NO_REGS;
2018
2019 for (i = 0; i < (int) N_REG_CLASSES; i++)
2020 if (max_groups[i] > 0)
2021 {
2022 class = i;
2023 break;
2024 }
2025
2026 if (class == (int) NO_REGS)
2027 return 1;
2028
2029 /* Consider each pair of consecutive registers. */
2030 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2031 {
2032 /* Ignore pairs that include reg REGNO. */
2033 if (i == regno || i + 1 == regno)
2034 continue;
2035
2036 /* Ignore pairs that are outside the class that needs the group.
2037 ??? Here we fail to handle the case where two different classes
2038 independently need groups. But this never happens with our
2039 current machine descriptions. */
2040 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2041 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2042 continue;
2043
2044 /* A pair of consecutive regs we can still spill does the trick. */
2045 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2046 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2047 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2048 return 1;
2049
2050 /* A pair of one already spilled and one we can spill does it
2051 provided the one already spilled is not otherwise reserved. */
2052 if (spill_reg_order[i] < 0
2053 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2054 && spill_reg_order[i + 1] >= 0
2055 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2056 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2057 return 1;
2058 if (spill_reg_order[i + 1] < 0
2059 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2060 && spill_reg_order[i] >= 0
2061 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2062 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2063 return 1;
2064 }
2065
2066 return 0;
2067}
2068\f
2069/* Count any groups that can be formed from the registers recently spilled.
2070 This is done class by class, in order of ascending class number. */
2071
2072static void
2073count_possible_groups (group_size, group_mode, max_groups)
546b63fb 2074 int *group_size;
32131a9c 2075 enum machine_mode *group_mode;
546b63fb 2076 int *max_groups;
32131a9c
RK
2077{
2078 int i;
2079 /* Now find all consecutive groups of spilled registers
2080 and mark each group off against the need for such groups.
2081 But don't count them against ordinary need, yet. */
2082
2083 for (i = 0; i < N_REG_CLASSES; i++)
2084 if (group_size[i] > 1)
2085 {
93193ab5 2086 HARD_REG_SET new;
32131a9c
RK
2087 int j;
2088
93193ab5
RK
2089 CLEAR_HARD_REG_SET (new);
2090
32131a9c
RK
2091 /* Make a mask of all the regs that are spill regs in class I. */
2092 for (j = 0; j < n_spills; j++)
2093 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2094 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2095 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2096 spill_regs[j]))
93193ab5
RK
2097 SET_HARD_REG_BIT (new, spill_regs[j]);
2098
32131a9c
RK
2099 /* Find each consecutive group of them. */
2100 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
93193ab5
RK
2101 if (TEST_HARD_REG_BIT (new, j)
2102 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
2103 /* Next line in case group-mode for this class
2104 demands an even-odd pair. */
2105 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2106 {
2107 int k;
2108 for (k = 1; k < group_size[i]; k++)
93193ab5 2109 if (! TEST_HARD_REG_BIT (new, j + k))
32131a9c
RK
2110 break;
2111 if (k == group_size[i])
2112 {
2113 /* We found a group. Mark it off against this class's
2114 need for groups, and against each superclass too. */
2115 register enum reg_class *p;
2116 max_groups[i]--;
2117 p = reg_class_superclasses[i];
2118 while (*p != LIM_REG_CLASSES)
2119 max_groups[(int) *p++]--;
a8fdc208 2120 /* Don't count these registers again. */
32131a9c
RK
2121 for (k = 0; k < group_size[i]; k++)
2122 SET_HARD_REG_BIT (counted_for_groups, j + k);
2123 }
fa52261e
RS
2124 /* Skip to the last reg in this group. When j is incremented
2125 above, it will then point to the first reg of the next
2126 possible group. */
2127 j += k - 1;
32131a9c
RK
2128 }
2129 }
2130
2131}
2132\f
2133/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2134 another mode that needs to be reloaded for the same register class CLASS.
2135 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2136 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2137
2138 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2139 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2140 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2141 causes unnecessary failures on machines requiring alignment of register
2142 groups when the two modes are different sizes, because the larger mode has
2143 more strict alignment rules than the smaller mode. */
2144
2145static int
2146modes_equiv_for_class_p (allocate_mode, other_mode, class)
2147 enum machine_mode allocate_mode, other_mode;
2148 enum reg_class class;
2149{
2150 register int regno;
2151 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2152 {
2153 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2154 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2155 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2156 return 0;
2157 }
2158 return 1;
2159}
2160
5352b11a
RS
2161/* Handle the failure to find a register to spill.
2162 INSN should be one of the insns which needed this particular spill reg. */
2163
2164static void
2165spill_failure (insn)
2166 rtx insn;
2167{
2168 if (asm_noperands (PATTERN (insn)) >= 0)
2169 error_for_asm (insn, "`asm' needs too many reloads");
2170 else
2171 abort ();
2172}
2173
32131a9c
RK
2174/* Add a new register to the tables of available spill-registers
2175 (as well as spilling all pseudos allocated to the register).
2176 I is the index of this register in potential_reload_regs.
2177 CLASS is the regclass whose need is being satisfied.
2178 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2179 so that this register can count off against them.
2180 MAX_NONGROUPS is 0 if this register is part of a group.
2181 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2182
2183static int
2184new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2185 int i;
2186 int class;
2187 int *max_needs;
2188 int *max_nongroups;
2189 int global;
2190 FILE *dumpfile;
2191{
2192 register enum reg_class *p;
2193 int val;
2194 int regno = potential_reload_regs[i];
2195
2196 if (i >= FIRST_PSEUDO_REGISTER)
2197 abort (); /* Caller failed to find any register. */
2198
2199 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2200 fatal ("fixed or forbidden register was spilled.\n\
2201This may be due to a compiler bug or to impossible asm statements.");
2202
2203 /* Make reg REGNO an additional reload reg. */
2204
2205 potential_reload_regs[i] = -1;
2206 spill_regs[n_spills] = regno;
2207 spill_reg_order[regno] = n_spills;
2208 if (dumpfile)
2209 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2210
2211 /* Clear off the needs we just satisfied. */
2212
2213 max_needs[class]--;
2214 p = reg_class_superclasses[class];
2215 while (*p != LIM_REG_CLASSES)
2216 max_needs[(int) *p++]--;
2217
2218 if (max_nongroups && max_nongroups[class] > 0)
2219 {
2220 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2221 max_nongroups[class]--;
2222 p = reg_class_superclasses[class];
2223 while (*p != LIM_REG_CLASSES)
2224 max_nongroups[(int) *p++]--;
2225 }
2226
2227 /* Spill every pseudo reg that was allocated to this reg
2228 or to something that overlaps this reg. */
2229
2230 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2231
2232 /* If there are some registers still to eliminate and this register
2233 wasn't ever used before, additional stack space may have to be
2234 allocated to store this register. Thus, we may have changed the offset
2235 between the stack and frame pointers, so mark that something has changed.
2236 (If new pseudos were spilled, thus requiring more space, VAL would have
2237 been set non-zero by the call to spill_hard_reg above since additional
2238 reloads may be needed in that case.
2239
2240 One might think that we need only set VAL to 1 if this is a call-used
2241 register. However, the set of registers that must be saved by the
2242 prologue is not identical to the call-used set. For example, the
2243 register used by the call insn for the return PC is a call-used register,
2244 but must be saved by the prologue. */
2245 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2246 val = 1;
2247
2248 regs_ever_live[spill_regs[n_spills]] = 1;
2249 n_spills++;
2250
2251 return val;
2252}
2253\f
2254/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2255 data that is dead in INSN. */
2256
2257static void
2258delete_dead_insn (insn)
2259 rtx insn;
2260{
2261 rtx prev = prev_real_insn (insn);
2262 rtx prev_dest;
2263
2264 /* If the previous insn sets a register that dies in our insn, delete it
2265 too. */
2266 if (prev && GET_CODE (PATTERN (prev)) == SET
2267 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2268 && reg_mentioned_p (prev_dest, PATTERN (insn))
2269 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2270 delete_dead_insn (prev);
2271
2272 PUT_CODE (insn, NOTE);
2273 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2274 NOTE_SOURCE_FILE (insn) = 0;
2275}
2276
2277/* Modify the home of pseudo-reg I.
2278 The new home is present in reg_renumber[I].
2279
2280 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2281 or it may be -1, meaning there is none or it is not relevant.
2282 This is used so that all pseudos spilled from a given hard reg
2283 can share one stack slot. */
2284
2285static void
2286alter_reg (i, from_reg)
2287 register int i;
2288 int from_reg;
2289{
2290 /* When outputting an inline function, this can happen
2291 for a reg that isn't actually used. */
2292 if (regno_reg_rtx[i] == 0)
2293 return;
2294
2295 /* If the reg got changed to a MEM at rtl-generation time,
2296 ignore it. */
2297 if (GET_CODE (regno_reg_rtx[i]) != REG)
2298 return;
2299
2300 /* Modify the reg-rtx to contain the new hard reg
2301 number or else to contain its pseudo reg number. */
2302 REGNO (regno_reg_rtx[i])
2303 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2304
2305 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2306 allocate a stack slot for it. */
2307
2308 if (reg_renumber[i] < 0
2309 && reg_n_refs[i] > 0
2310 && reg_equiv_constant[i] == 0
2311 && reg_equiv_memory_loc[i] == 0)
2312 {
2313 register rtx x;
2314 int inherent_size = PSEUDO_REGNO_BYTES (i);
2315 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2316 int adjust = 0;
2317
2318 /* Each pseudo reg has an inherent size which comes from its own mode,
2319 and a total size which provides room for paradoxical subregs
2320 which refer to the pseudo reg in wider modes.
2321
2322 We can use a slot already allocated if it provides both
2323 enough inherent space and enough total space.
2324 Otherwise, we allocate a new slot, making sure that it has no less
2325 inherent space, and no less total space, then the previous slot. */
2326 if (from_reg == -1)
2327 {
2328 /* No known place to spill from => no slot to reuse. */
2329 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2330#if BYTES_BIG_ENDIAN
2331 /* Cancel the big-endian correction done in assign_stack_local.
2332 Get the address of the beginning of the slot.
2333 This is so we can do a big-endian correction unconditionally
2334 below. */
2335 adjust = inherent_size - total_size;
2336#endif
2337 }
2338 /* Reuse a stack slot if possible. */
2339 else if (spill_stack_slot[from_reg] != 0
2340 && spill_stack_slot_width[from_reg] >= total_size
2341 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2342 >= inherent_size))
2343 x = spill_stack_slot[from_reg];
2344 /* Allocate a bigger slot. */
2345 else
2346 {
2347 /* Compute maximum size needed, both for inherent size
2348 and for total size. */
2349 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2350 if (spill_stack_slot[from_reg])
2351 {
2352 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2353 > inherent_size)
2354 mode = GET_MODE (spill_stack_slot[from_reg]);
2355 if (spill_stack_slot_width[from_reg] > total_size)
2356 total_size = spill_stack_slot_width[from_reg];
2357 }
2358 /* Make a slot with that size. */
2359 x = assign_stack_local (mode, total_size, -1);
2360#if BYTES_BIG_ENDIAN
2361 /* Cancel the big-endian correction done in assign_stack_local.
2362 Get the address of the beginning of the slot.
2363 This is so we can do a big-endian correction unconditionally
2364 below. */
2365 adjust = GET_MODE_SIZE (mode) - total_size;
2366#endif
2367 spill_stack_slot[from_reg] = x;
2368 spill_stack_slot_width[from_reg] = total_size;
2369 }
2370
2371#if BYTES_BIG_ENDIAN
2372 /* On a big endian machine, the "address" of the slot
2373 is the address of the low part that fits its inherent mode. */
2374 if (inherent_size < total_size)
2375 adjust += (total_size - inherent_size);
2376#endif /* BYTES_BIG_ENDIAN */
2377
2378 /* If we have any adjustment to make, or if the stack slot is the
2379 wrong mode, make a new stack slot. */
2380 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2381 {
2382 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2383 plus_constant (XEXP (x, 0), adjust));
2384 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2385 }
2386
2387 /* Save the stack slot for later. */
2388 reg_equiv_memory_loc[i] = x;
2389 }
2390}
2391
2392/* Mark the slots in regs_ever_live for the hard regs
2393 used by pseudo-reg number REGNO. */
2394
2395void
2396mark_home_live (regno)
2397 int regno;
2398{
2399 register int i, lim;
2400 i = reg_renumber[regno];
2401 if (i < 0)
2402 return;
2403 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2404 while (i < lim)
2405 regs_ever_live[i++] = 1;
2406}
c307c237
RK
2407
2408/* Mark the registers used in SCRATCH as being live. */
2409
2410static void
2411mark_scratch_live (scratch)
2412 rtx scratch;
2413{
2414 register int i;
2415 int regno = REGNO (scratch);
2416 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2417
2418 for (i = regno; i < lim; i++)
2419 regs_ever_live[i] = 1;
2420}
32131a9c
RK
2421\f
2422/* This function handles the tracking of elimination offsets around branches.
2423
2424 X is a piece of RTL being scanned.
2425
2426 INSN is the insn that it came from, if any.
2427
2428 INITIAL_P is non-zero if we are to set the offset to be the initial
2429 offset and zero if we are setting the offset of the label to be the
2430 current offset. */
2431
2432static void
2433set_label_offsets (x, insn, initial_p)
2434 rtx x;
2435 rtx insn;
2436 int initial_p;
2437{
2438 enum rtx_code code = GET_CODE (x);
2439 rtx tem;
2440 int i;
2441 struct elim_table *p;
2442
2443 switch (code)
2444 {
2445 case LABEL_REF:
8be386d9
RS
2446 if (LABEL_REF_NONLOCAL_P (x))
2447 return;
2448
32131a9c
RK
2449 x = XEXP (x, 0);
2450
2451 /* ... fall through ... */
2452
2453 case CODE_LABEL:
2454 /* If we know nothing about this label, set the desired offsets. Note
2455 that this sets the offset at a label to be the offset before a label
2456 if we don't know anything about the label. This is not correct for
2457 the label after a BARRIER, but is the best guess we can make. If
2458 we guessed wrong, we will suppress an elimination that might have
2459 been possible had we been able to guess correctly. */
2460
2461 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2462 {
2463 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2464 offsets_at[CODE_LABEL_NUMBER (x)][i]
2465 = (initial_p ? reg_eliminate[i].initial_offset
2466 : reg_eliminate[i].offset);
2467 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2468 }
2469
2470 /* Otherwise, if this is the definition of a label and it is
d45cf215 2471 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2472 that label. */
2473
2474 else if (x == insn
2475 && (tem = prev_nonnote_insn (insn)) != 0
2476 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2477 {
2478 num_not_at_initial_offset = 0;
2479 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2480 {
2481 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2482 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2483 if (reg_eliminate[i].can_eliminate
2484 && (reg_eliminate[i].offset
2485 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2486 num_not_at_initial_offset++;
2487 }
2488 }
32131a9c
RK
2489
2490 else
2491 /* If neither of the above cases is true, compare each offset
2492 with those previously recorded and suppress any eliminations
2493 where the offsets disagree. */
a8fdc208 2494
32131a9c
RK
2495 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2496 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2497 != (initial_p ? reg_eliminate[i].initial_offset
2498 : reg_eliminate[i].offset))
2499 reg_eliminate[i].can_eliminate = 0;
2500
2501 return;
2502
2503 case JUMP_INSN:
2504 set_label_offsets (PATTERN (insn), insn, initial_p);
2505
2506 /* ... fall through ... */
2507
2508 case INSN:
2509 case CALL_INSN:
2510 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2511 and hence must have all eliminations at their initial offsets. */
2512 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2513 if (REG_NOTE_KIND (tem) == REG_LABEL)
2514 set_label_offsets (XEXP (tem, 0), insn, 1);
2515 return;
2516
2517 case ADDR_VEC:
2518 case ADDR_DIFF_VEC:
2519 /* Each of the labels in the address vector must be at their initial
2520 offsets. We want the first first for ADDR_VEC and the second
2521 field for ADDR_DIFF_VEC. */
2522
2523 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2524 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2525 insn, initial_p);
2526 return;
2527
2528 case SET:
2529 /* We only care about setting PC. If the source is not RETURN,
2530 IF_THEN_ELSE, or a label, disable any eliminations not at
2531 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2532 isn't one of those possibilities. For branches to a label,
2533 call ourselves recursively.
2534
2535 Note that this can disable elimination unnecessarily when we have
2536 a non-local goto since it will look like a non-constant jump to
2537 someplace in the current function. This isn't a significant
2538 problem since such jumps will normally be when all elimination
2539 pairs are back to their initial offsets. */
2540
2541 if (SET_DEST (x) != pc_rtx)
2542 return;
2543
2544 switch (GET_CODE (SET_SRC (x)))
2545 {
2546 case PC:
2547 case RETURN:
2548 return;
2549
2550 case LABEL_REF:
2551 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2552 return;
2553
2554 case IF_THEN_ELSE:
2555 tem = XEXP (SET_SRC (x), 1);
2556 if (GET_CODE (tem) == LABEL_REF)
2557 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2558 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2559 break;
2560
2561 tem = XEXP (SET_SRC (x), 2);
2562 if (GET_CODE (tem) == LABEL_REF)
2563 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2564 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2565 break;
2566 return;
2567 }
2568
2569 /* If we reach here, all eliminations must be at their initial
2570 offset because we are doing a jump to a variable address. */
2571 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2572 if (p->offset != p->initial_offset)
2573 p->can_eliminate = 0;
2574 }
2575}
2576\f
2577/* Used for communication between the next two function to properly share
2578 the vector for an ASM_OPERANDS. */
2579
2580static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2581
a8fdc208 2582/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2583 replacement (such as sp), plus an offset.
2584
2585 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2586 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2587 MEM, we are allowed to replace a sum of a register and the constant zero
2588 with the register, which we cannot do outside a MEM. In addition, we need
2589 to record the fact that a register is referenced outside a MEM.
2590
2591 If INSN is nonzero, it is the insn containing X. If we replace a REG
2592 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2593 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2594 that the REG is being modified.
2595
2596 If we see a modification to a register we know about, take the
2597 appropriate action (see case SET, below).
2598
2599 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2600 replacements done assuming all offsets are at their initial values. If
2601 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2602 encounter, return the actual location so that find_reloads will do
2603 the proper thing. */
2604
2605rtx
2606eliminate_regs (x, mem_mode, insn)
2607 rtx x;
2608 enum machine_mode mem_mode;
2609 rtx insn;
2610{
2611 enum rtx_code code = GET_CODE (x);
2612 struct elim_table *ep;
2613 int regno;
2614 rtx new;
2615 int i, j;
2616 char *fmt;
2617 int copied = 0;
2618
2619 switch (code)
2620 {
2621 case CONST_INT:
2622 case CONST_DOUBLE:
2623 case CONST:
2624 case SYMBOL_REF:
2625 case CODE_LABEL:
2626 case PC:
2627 case CC0:
2628 case ASM_INPUT:
2629 case ADDR_VEC:
2630 case ADDR_DIFF_VEC:
2631 case RETURN:
2632 return x;
2633
2634 case REG:
2635 regno = REGNO (x);
2636
2637 /* First handle the case where we encounter a bare register that
2638 is eliminable. Replace it with a PLUS. */
2639 if (regno < FIRST_PSEUDO_REGISTER)
2640 {
2641 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2642 ep++)
2643 if (ep->from_rtx == x && ep->can_eliminate)
2644 {
2645 if (! mem_mode)
2646 ep->ref_outside_mem = 1;
2647 return plus_constant (ep->to_rtx, ep->previous_offset);
2648 }
2649
2650 }
2651 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2652 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2653 {
2654 /* In this case, find_reloads would attempt to either use an
2655 incorrect address (if something is not at its initial offset)
2656 or substitute an replaced address into an insn (which loses
2657 if the offset is changed by some later action). So we simply
2658 return the replaced stack slot (assuming it is changed by
2659 elimination) and ignore the fact that this is actually a
2660 reference to the pseudo. Ensure we make a copy of the
2661 address in case it is shared. */
fb3821f7
CH
2662 new = eliminate_regs (reg_equiv_memory_loc[regno],
2663 mem_mode, NULL_RTX);
32131a9c 2664 if (new != reg_equiv_memory_loc[regno])
208dffa5
RS
2665 {
2666 cannot_omit_stores[regno] = 1;
2667 return copy_rtx (new);
2668 }
32131a9c
RK
2669 }
2670 return x;
2671
2672 case PLUS:
2673 /* If this is the sum of an eliminable register and a constant, rework
2674 the sum. */
2675 if (GET_CODE (XEXP (x, 0)) == REG
2676 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2677 && CONSTANT_P (XEXP (x, 1)))
2678 {
2679 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2680 ep++)
2681 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2682 {
2683 if (! mem_mode)
2684 ep->ref_outside_mem = 1;
2685
2686 /* The only time we want to replace a PLUS with a REG (this
2687 occurs when the constant operand of the PLUS is the negative
2688 of the offset) is when we are inside a MEM. We won't want
2689 to do so at other times because that would change the
2690 structure of the insn in a way that reload can't handle.
2691 We special-case the commonest situation in
2692 eliminate_regs_in_insn, so just replace a PLUS with a
2693 PLUS here, unless inside a MEM. */
a23b64d5 2694 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2695 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2696 return ep->to_rtx;
2697 else
2698 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2699 plus_constant (XEXP (x, 1),
2700 ep->previous_offset));
2701 }
2702
2703 /* If the register is not eliminable, we are done since the other
2704 operand is a constant. */
2705 return x;
2706 }
2707
2708 /* If this is part of an address, we want to bring any constant to the
2709 outermost PLUS. We will do this by doing register replacement in
2710 our operands and seeing if a constant shows up in one of them.
2711
2712 We assume here this is part of an address (or a "load address" insn)
2713 since an eliminable register is not likely to appear in any other
2714 context.
2715
2716 If we have (plus (eliminable) (reg)), we want to produce
2717 (plus (plus (replacement) (reg) (const))). If this was part of a
2718 normal add insn, (plus (replacement) (reg)) will be pushed as a
2719 reload. This is the desired action. */
2720
2721 {
fb3821f7
CH
2722 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2723 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2724
2725 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2726 {
2727 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2728 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2729 we must replace the constant here since it may no longer
2730 be in the position of any operand. */
2731 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2732 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2733 && reg_renumber[REGNO (new1)] < 0
2734 && reg_equiv_constant != 0
2735 && reg_equiv_constant[REGNO (new1)] != 0)
2736 new1 = reg_equiv_constant[REGNO (new1)];
2737 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2738 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2739 && reg_renumber[REGNO (new0)] < 0
2740 && reg_equiv_constant[REGNO (new0)] != 0)
2741 new0 = reg_equiv_constant[REGNO (new0)];
2742
2743 new = form_sum (new0, new1);
2744
2745 /* As above, if we are not inside a MEM we do not want to
2746 turn a PLUS into something else. We might try to do so here
2747 for an addition of 0 if we aren't optimizing. */
2748 if (! mem_mode && GET_CODE (new) != PLUS)
2749 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2750 else
2751 return new;
2752 }
2753 }
2754 return x;
2755
2756 case EXPR_LIST:
2757 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2758 if (XEXP (x, 0))
2759 {
fb3821f7 2760 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2761 if (new != XEXP (x, 0))
2762 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2763 }
2764
2765 /* ... fall through ... */
2766
2767 case INSN_LIST:
2768 /* Now do eliminations in the rest of the chain. If this was
2769 an EXPR_LIST, this might result in allocating more memory than is
2770 strictly needed, but it simplifies the code. */
2771 if (XEXP (x, 1))
2772 {
fb3821f7 2773 new = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2774 if (new != XEXP (x, 1))
2775 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2776 }
2777 return x;
2778
2779 case CALL:
2780 case COMPARE:
2781 case MINUS:
2782 case MULT:
2783 case DIV: case UDIV:
2784 case MOD: case UMOD:
2785 case AND: case IOR: case XOR:
2786 case LSHIFT: case ASHIFT: case ROTATE:
2787 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2788 case NE: case EQ:
2789 case GE: case GT: case GEU: case GTU:
2790 case LE: case LT: case LEU: case LTU:
2791 {
fb3821f7
CH
2792 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2793 rtx new1
2794 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX) : 0;
32131a9c
RK
2795
2796 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2797 return gen_rtx (code, GET_MODE (x), new0, new1);
2798 }
2799 return x;
2800
2801 case PRE_INC:
2802 case POST_INC:
2803 case PRE_DEC:
2804 case POST_DEC:
2805 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2806 if (ep->to_rtx == XEXP (x, 0))
2807 {
4c05b187
RK
2808 int size = GET_MODE_SIZE (mem_mode);
2809
2810 /* If more bytes than MEM_MODE are pushed, account for them. */
2811#ifdef PUSH_ROUNDING
2812 if (ep->to_rtx == stack_pointer_rtx)
2813 size = PUSH_ROUNDING (size);
2814#endif
32131a9c 2815 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2816 ep->offset += size;
32131a9c 2817 else
4c05b187 2818 ep->offset -= size;
32131a9c
RK
2819 }
2820
2821 /* Fall through to generic unary operation case. */
2822 case USE:
2823 case STRICT_LOW_PART:
2824 case NEG: case NOT:
2825 case SIGN_EXTEND: case ZERO_EXTEND:
2826 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2827 case FLOAT: case FIX:
2828 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2829 case ABS:
2830 case SQRT:
2831 case FFS:
fb3821f7 2832 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2833 if (new != XEXP (x, 0))
2834 return gen_rtx (code, GET_MODE (x), new);
2835 return x;
2836
2837 case SUBREG:
2838 /* Similar to above processing, but preserve SUBREG_WORD.
2839 Convert (subreg (mem)) to (mem) if not paradoxical.
2840 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2841 pseudo didn't get a hard reg, we must replace this with the
2842 eliminated version of the memory location because push_reloads
2843 may do the replacement in certain circumstances. */
2844 if (GET_CODE (SUBREG_REG (x)) == REG
2845 && (GET_MODE_SIZE (GET_MODE (x))
2846 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2847 && reg_equiv_memory_loc != 0
2848 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2849 {
2850 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
fb3821f7 2851 mem_mode, NULL_RTX);
32131a9c
RK
2852
2853 /* If we didn't change anything, we must retain the pseudo. */
2854 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2855 new = XEXP (x, 0);
2856 else
2857 /* Otherwise, ensure NEW isn't shared in case we have to reload
2858 it. */
2859 new = copy_rtx (new);
2860 }
2861 else
fb3821f7 2862 new = eliminate_regs (SUBREG_REG (x), mem_mode, NULL_RTX);
32131a9c
RK
2863
2864 if (new != XEXP (x, 0))
2865 {
2866 if (GET_CODE (new) == MEM
2867 && (GET_MODE_SIZE (GET_MODE (x))
a3b75c07
RS
2868 <= GET_MODE_SIZE (GET_MODE (new)))
2869#if defined(BYTES_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
2870 /* On these machines we will be reloading what is
2871 inside the SUBREG if it originally was a pseudo and
2872 the inner and outer modes are both a word or
2873 smaller. So leave the SUBREG then. */
2874 && ! (GET_CODE (SUBREG_REG (x)) == REG
2875 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2876 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2877#endif
2878 )
32131a9c
RK
2879 {
2880 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2881 enum machine_mode mode = GET_MODE (x);
2882
2883#if BYTES_BIG_ENDIAN
2884 offset += (MIN (UNITS_PER_WORD,
2885 GET_MODE_SIZE (GET_MODE (new)))
2886 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2887#endif
2888
2889 PUT_MODE (new, mode);
2890 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2891 return new;
2892 }
2893 else
2894 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2895 }
2896
2897 return x;
2898
2899 case CLOBBER:
2900 /* If clobbering a register that is the replacement register for an
d45cf215 2901 elimination we still think can be performed, note that it cannot
32131a9c
RK
2902 be performed. Otherwise, we need not be concerned about it. */
2903 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2904 if (ep->to_rtx == XEXP (x, 0))
2905 ep->can_eliminate = 0;
2906
2045084c
JVA
2907 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2908 if (new != XEXP (x, 0))
2909 return gen_rtx (code, GET_MODE (x), new);
32131a9c
RK
2910 return x;
2911
2912 case ASM_OPERANDS:
2913 {
2914 rtx *temp_vec;
2915 /* Properly handle sharing input and constraint vectors. */
2916 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2917 {
2918 /* When we come to a new vector not seen before,
2919 scan all its elements; keep the old vector if none
2920 of them changes; otherwise, make a copy. */
2921 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2922 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2923 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2924 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
fb3821f7 2925 mem_mode, NULL_RTX);
32131a9c
RK
2926
2927 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2928 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2929 break;
2930
2931 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2932 new_asm_operands_vec = old_asm_operands_vec;
2933 else
2934 new_asm_operands_vec
2935 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2936 }
2937
2938 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2939 if (new_asm_operands_vec == old_asm_operands_vec)
2940 return x;
2941
2942 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2943 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2944 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2945 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2946 ASM_OPERANDS_SOURCE_FILE (x),
2947 ASM_OPERANDS_SOURCE_LINE (x));
2948 new->volatil = x->volatil;
2949 return new;
2950 }
2951
2952 case SET:
2953 /* Check for setting a register that we know about. */
2954 if (GET_CODE (SET_DEST (x)) == REG)
2955 {
2956 /* See if this is setting the replacement register for an
a8fdc208 2957 elimination.
32131a9c
RK
2958
2959 If DEST is the frame pointer, we do nothing because we assume that
2960 all assignments to the frame pointer are for non-local gotos and
2961 are being done at a time when they are valid and do not disturb
2962 anything else. Some machines want to eliminate a fake argument
2963 pointer with either the frame or stack pointer. Assignments to
2964 the frame pointer must not prevent this elimination. */
2965
2966 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2967 ep++)
2968 if (ep->to_rtx == SET_DEST (x)
2969 && SET_DEST (x) != frame_pointer_rtx)
2970 {
6dc42e49 2971 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2972 this elimination can't be done. */
2973 rtx src = SET_SRC (x);
2974
2975 if (GET_CODE (src) == PLUS
2976 && XEXP (src, 0) == SET_DEST (x)
2977 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2978 ep->offset -= INTVAL (XEXP (src, 1));
2979 else
2980 ep->can_eliminate = 0;
2981 }
2982
2983 /* Now check to see we are assigning to a register that can be
2984 eliminated. If so, it must be as part of a PARALLEL, since we
2985 will not have been called if this is a single SET. So indicate
2986 that we can no longer eliminate this reg. */
2987 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2988 ep++)
2989 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2990 ep->can_eliminate = 0;
2991 }
2992
2993 /* Now avoid the loop below in this common case. */
2994 {
fb3821f7
CH
2995 rtx new0 = eliminate_regs (SET_DEST (x), 0, NULL_RTX);
2996 rtx new1 = eliminate_regs (SET_SRC (x), 0, NULL_RTX);
32131a9c
RK
2997
2998 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2999 write a CLOBBER insn. */
3000 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3001 && insn != 0)
3002 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3003
3004 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3005 return gen_rtx (SET, VOIDmode, new0, new1);
3006 }
3007
3008 return x;
3009
3010 case MEM:
3011 /* Our only special processing is to pass the mode of the MEM to our
3012 recursive call and copy the flags. While we are here, handle this
3013 case more efficiently. */
fb3821f7 3014 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), NULL_RTX);
32131a9c
RK
3015 if (new != XEXP (x, 0))
3016 {
3017 new = gen_rtx (MEM, GET_MODE (x), new);
3018 new->volatil = x->volatil;
3019 new->unchanging = x->unchanging;
3020 new->in_struct = x->in_struct;
3021 return new;
3022 }
3023 else
3024 return x;
3025 }
3026
3027 /* Process each of our operands recursively. If any have changed, make a
3028 copy of the rtx. */
3029 fmt = GET_RTX_FORMAT (code);
3030 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3031 {
3032 if (*fmt == 'e')
3033 {
fb3821f7 3034 new = eliminate_regs (XEXP (x, i), mem_mode, NULL_RTX);
32131a9c
RK
3035 if (new != XEXP (x, i) && ! copied)
3036 {
3037 rtx new_x = rtx_alloc (code);
3038 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3039 + (sizeof (new_x->fld[0])
3040 * GET_RTX_LENGTH (code))));
3041 x = new_x;
3042 copied = 1;
3043 }
3044 XEXP (x, i) = new;
3045 }
3046 else if (*fmt == 'E')
3047 {
3048 int copied_vec = 0;
3049 for (j = 0; j < XVECLEN (x, i); j++)
3050 {
3051 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3052 if (new != XVECEXP (x, i, j) && ! copied_vec)
3053 {
3054 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3055 &XVECEXP (x, i, 0));
3056 if (! copied)
3057 {
3058 rtx new_x = rtx_alloc (code);
3059 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3060 + (sizeof (new_x->fld[0])
3061 * GET_RTX_LENGTH (code))));
3062 x = new_x;
3063 copied = 1;
3064 }
3065 XVEC (x, i) = new_v;
3066 copied_vec = 1;
3067 }
3068 XVECEXP (x, i, j) = new;
3069 }
3070 }
3071 }
3072
3073 return x;
3074}
3075\f
3076/* Scan INSN and eliminate all eliminable registers in it.
3077
3078 If REPLACE is nonzero, do the replacement destructively. Also
3079 delete the insn as dead it if it is setting an eliminable register.
3080
3081 If REPLACE is zero, do all our allocations in reload_obstack.
3082
3083 If no eliminations were done and this insn doesn't require any elimination
3084 processing (these are not identical conditions: it might be updating sp,
3085 but not referencing fp; this needs to be seen during reload_as_needed so
3086 that the offset between fp and sp can be taken into consideration), zero
3087 is returned. Otherwise, 1 is returned. */
3088
3089static int
3090eliminate_regs_in_insn (insn, replace)
3091 rtx insn;
3092 int replace;
3093{
3094 rtx old_body = PATTERN (insn);
3095 rtx new_body;
3096 int val = 0;
3097 struct elim_table *ep;
3098
3099 if (! replace)
3100 push_obstacks (&reload_obstack, &reload_obstack);
3101
3102 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3103 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3104 {
3105 /* Check for setting an eliminable register. */
3106 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3107 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3108 {
3109 /* In this case this insn isn't serving a useful purpose. We
3110 will delete it in reload_as_needed once we know that this
3111 elimination is, in fact, being done.
3112
3113 If REPLACE isn't set, we can't delete this insn, but neededn't
3114 process it since it won't be used unless something changes. */
3115 if (replace)
3116 delete_dead_insn (insn);
3117 val = 1;
3118 goto done;
3119 }
3120
3121 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3122 in the insn is the negative of the offset in FROM. Substitute
3123 (set (reg) (reg to)) for the insn and change its code.
3124
3125 We have to do this here, rather than in eliminate_regs, do that we can
3126 change the insn code. */
3127
3128 if (GET_CODE (SET_SRC (old_body)) == PLUS
3129 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3130 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3131 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3132 ep++)
3133 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
922d9d40 3134 && ep->can_eliminate)
32131a9c 3135 {
922d9d40
RK
3136 /* We must stop at the first elimination that will be used.
3137 If this one would replace the PLUS with a REG, do it
3138 now. Otherwise, quit the loop and let eliminate_regs
3139 do its normal replacement. */
3140 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3141 {
3142 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3143 SET_DEST (old_body), ep->to_rtx);
3144 INSN_CODE (insn) = -1;
3145 val = 1;
3146 goto done;
3147 }
3148
3149 break;
32131a9c
RK
3150 }
3151 }
3152
3153 old_asm_operands_vec = 0;
3154
3155 /* Replace the body of this insn with a substituted form. If we changed
3156 something, return non-zero. If this is the final call for this
3157 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
3158
3159 If we are replacing a body that was a (set X (plus Y Z)), try to
3160 re-recognize the insn. We do this in case we had a simple addition
3161 but now can do this as a load-address. This saves an insn in this
3162 common case. */
3163
fb3821f7 3164 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3165 if (new_body != old_body)
3166 {
7c791b13
RK
3167 /* If we aren't replacing things permanently and we changed something,
3168 make another copy to ensure that all the RTL is new. Otherwise
3169 things can go wrong if find_reload swaps commutative operands
3170 and one is inside RTL that has been copied while the other is not. */
3171
4d411872
RS
3172 /* Don't copy an asm_operands because (1) there's no need and (2)
3173 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3174 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3175 new_body = copy_rtx (new_body);
3176
4a5d0fb5 3177 /* If we had a move insn but now we don't, rerecognize it. */
0ba846c7
RS
3178 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3179 && (GET_CODE (new_body) != SET
3180 || GET_CODE (SET_SRC (new_body)) != REG))
3181 /* If this was an add insn before, rerecognize. */
3182 ||
3183 (GET_CODE (old_body) == SET
3184 && GET_CODE (SET_SRC (old_body)) == PLUS))
4a5d0fb5
RS
3185 {
3186 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3187 /* If recognition fails, store the new body anyway.
3188 It's normal to have recognition failures here
3189 due to bizarre memory addresses; reloading will fix them. */
3190 PATTERN (insn) = new_body;
4a5d0fb5 3191 }
0ba846c7 3192 else
32131a9c
RK
3193 PATTERN (insn) = new_body;
3194
3195 if (replace && REG_NOTES (insn))
fb3821f7 3196 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, NULL_RTX);
32131a9c
RK
3197 val = 1;
3198 }
a8fdc208 3199
32131a9c
RK
3200 /* Loop through all elimination pairs. See if any have changed and
3201 recalculate the number not at initial offset.
3202
a8efe40d
RK
3203 Compute the maximum offset (minimum offset if the stack does not
3204 grow downward) for each elimination pair.
3205
32131a9c
RK
3206 We also detect a cases where register elimination cannot be done,
3207 namely, if a register would be both changed and referenced outside a MEM
3208 in the resulting insn since such an insn is often undefined and, even if
3209 not, we cannot know what meaning will be given to it. Note that it is
3210 valid to have a register used in an address in an insn that changes it
3211 (presumably with a pre- or post-increment or decrement).
3212
3213 If anything changes, return nonzero. */
3214
3215 num_not_at_initial_offset = 0;
3216 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3217 {
3218 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3219 ep->can_eliminate = 0;
3220
3221 ep->ref_outside_mem = 0;
3222
3223 if (ep->previous_offset != ep->offset)
3224 val = 1;
3225
3226 ep->previous_offset = ep->offset;
3227 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3228 num_not_at_initial_offset++;
a8efe40d
RK
3229
3230#ifdef STACK_GROWS_DOWNWARD
3231 ep->max_offset = MAX (ep->max_offset, ep->offset);
3232#else
3233 ep->max_offset = MIN (ep->max_offset, ep->offset);
3234#endif
32131a9c
RK
3235 }
3236
3237 done:
3238 if (! replace)
3239 pop_obstacks ();
3240
3241 return val;
3242}
3243
3244/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3245 replacement we currently believe is valid, mark it as not eliminable if X
3246 modifies DEST in any way other than by adding a constant integer to it.
3247
3248 If DEST is the frame pointer, we do nothing because we assume that
3249 all assignments to the frame pointer are nonlocal gotos and are being done
3250 at a time when they are valid and do not disturb anything else.
3251 Some machines want to eliminate a fake argument pointer with either the
3252 frame or stack pointer. Assignments to the frame pointer must not prevent
3253 this elimination.
3254
3255 Called via note_stores from reload before starting its passes to scan
3256 the insns of the function. */
3257
3258static void
3259mark_not_eliminable (dest, x)
3260 rtx dest;
3261 rtx x;
3262{
3263 register int i;
3264
3265 /* A SUBREG of a hard register here is just changing its mode. We should
3266 not see a SUBREG of an eliminable hard register, but check just in
3267 case. */
3268 if (GET_CODE (dest) == SUBREG)
3269 dest = SUBREG_REG (dest);
3270
3271 if (dest == frame_pointer_rtx)
3272 return;
3273
3274 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3275 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3276 && (GET_CODE (x) != SET
3277 || GET_CODE (SET_SRC (x)) != PLUS
3278 || XEXP (SET_SRC (x), 0) != dest
3279 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3280 {
3281 reg_eliminate[i].can_eliminate_previous
3282 = reg_eliminate[i].can_eliminate = 0;
3283 num_eliminable--;
3284 }
3285}
3286\f
3287/* Kick all pseudos out of hard register REGNO.
3288 If GLOBAL is nonzero, try to find someplace else to put them.
3289 If DUMPFILE is nonzero, log actions taken on that file.
3290
3291 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3292 because we found we can't eliminate some register. In the case, no pseudos
3293 are allowed to be in the register, even if they are only in a block that
3294 doesn't require spill registers, unlike the case when we are spilling this
3295 hard reg to produce another spill register.
3296
3297 Return nonzero if any pseudos needed to be kicked out. */
3298
3299static int
3300spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3301 register int regno;
3302 int global;
3303 FILE *dumpfile;
3304 int cant_eliminate;
3305{
c307c237 3306 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3307 int something_changed = 0;
3308 register int i;
3309
3310 SET_HARD_REG_BIT (forbidden_regs, regno);
3311
3312 /* Spill every pseudo reg that was allocated to this reg
3313 or to something that overlaps this reg. */
3314
3315 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3316 if (reg_renumber[i] >= 0
3317 && reg_renumber[i] <= regno
a8fdc208 3318 && (reg_renumber[i]
32131a9c
RK
3319 + HARD_REGNO_NREGS (reg_renumber[i],
3320 PSEUDO_REGNO_MODE (i))
3321 > regno))
3322 {
32131a9c
RK
3323 /* If this register belongs solely to a basic block which needed no
3324 spilling of any class that this register is contained in,
3325 leave it be, unless we are spilling this register because
3326 it was a hard register that can't be eliminated. */
3327
3328 if (! cant_eliminate
3329 && basic_block_needs[0]
3330 && reg_basic_block[i] >= 0
3331 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3332 {
3333 enum reg_class *p;
3334
3335 for (p = reg_class_superclasses[(int) class];
3336 *p != LIM_REG_CLASSES; p++)
3337 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3338 break;
a8fdc208 3339
32131a9c
RK
3340 if (*p == LIM_REG_CLASSES)
3341 continue;
3342 }
3343
3344 /* Mark it as no longer having a hard register home. */
3345 reg_renumber[i] = -1;
3346 /* We will need to scan everything again. */
3347 something_changed = 1;
3348 if (global)
3349 retry_global_alloc (i, forbidden_regs);
3350
3351 alter_reg (i, regno);
3352 if (dumpfile)
3353 {
3354 if (reg_renumber[i] == -1)
3355 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3356 else
3357 fprintf (dumpfile, " Register %d now in %d.\n\n",
3358 i, reg_renumber[i]);
3359 }
3360 }
c307c237
RK
3361 for (i = 0; i < scratch_list_length; i++)
3362 {
3363 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3364 {
3365 if (! cant_eliminate && basic_block_needs[0]
3366 && ! basic_block_needs[(int) class][scratch_block[i]])
3367 {
3368 enum reg_class *p;
3369
3370 for (p = reg_class_superclasses[(int) class];
3371 *p != LIM_REG_CLASSES; p++)
3372 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3373 break;
3374
3375 if (*p == LIM_REG_CLASSES)
3376 continue;
3377 }
3378 PUT_CODE (scratch_list[i], SCRATCH);
3379 scratch_list[i] = 0;
3380 something_changed = 1;
3381 continue;
3382 }
3383 }
32131a9c
RK
3384
3385 return something_changed;
3386}
3387\f
3388/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3389
3390static void
3391scan_paradoxical_subregs (x)
3392 register rtx x;
3393{
3394 register int i;
3395 register char *fmt;
3396 register enum rtx_code code = GET_CODE (x);
3397
3398 switch (code)
3399 {
3400 case CONST_INT:
3401 case CONST:
3402 case SYMBOL_REF:
3403 case LABEL_REF:
3404 case CONST_DOUBLE:
3405 case CC0:
3406 case PC:
3407 case REG:
3408 case USE:
3409 case CLOBBER:
3410 return;
3411
3412 case SUBREG:
3413 if (GET_CODE (SUBREG_REG (x)) == REG
3414 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3415 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3416 = GET_MODE_SIZE (GET_MODE (x));
3417 return;
3418 }
3419
3420 fmt = GET_RTX_FORMAT (code);
3421 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3422 {
3423 if (fmt[i] == 'e')
3424 scan_paradoxical_subregs (XEXP (x, i));
3425 else if (fmt[i] == 'E')
3426 {
3427 register int j;
3428 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3429 scan_paradoxical_subregs (XVECEXP (x, i, j));
3430 }
3431 }
3432}
3433\f
32131a9c
RK
3434static int
3435hard_reg_use_compare (p1, p2)
3436 struct hard_reg_n_uses *p1, *p2;
3437{
3438 int tem = p1->uses - p2->uses;
3439 if (tem != 0) return tem;
3440 /* If regs are equally good, sort by regno,
3441 so that the results of qsort leave nothing to chance. */
3442 return p1->regno - p2->regno;
3443}
3444
3445/* Choose the order to consider regs for use as reload registers
3446 based on how much trouble would be caused by spilling one.
3447 Store them in order of decreasing preference in potential_reload_regs. */
3448
3449static void
3450order_regs_for_reload ()
3451{
3452 register int i;
3453 register int o = 0;
3454 int large = 0;
3455
3456 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3457
3458 CLEAR_HARD_REG_SET (bad_spill_regs);
3459
3460 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3461 potential_reload_regs[i] = -1;
3462
3463 /* Count number of uses of each hard reg by pseudo regs allocated to it
3464 and then order them by decreasing use. */
3465
3466 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3467 {
3468 hard_reg_n_uses[i].uses = 0;
3469 hard_reg_n_uses[i].regno = i;
3470 }
3471
3472 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3473 {
3474 int regno = reg_renumber[i];
3475 if (regno >= 0)
3476 {
3477 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3478 while (regno < lim)
3479 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3480 }
3481 large += reg_n_refs[i];
3482 }
3483
3484 /* Now fixed registers (which cannot safely be used for reloading)
3485 get a very high use count so they will be considered least desirable.
3486 Registers used explicitly in the rtl code are almost as bad. */
3487
3488 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3489 {
3490 if (fixed_regs[i])
3491 {
3492 hard_reg_n_uses[i].uses += 2 * large + 2;
3493 SET_HARD_REG_BIT (bad_spill_regs, i);
3494 }
3495 else if (regs_explicitly_used[i])
3496 {
3497 hard_reg_n_uses[i].uses += large + 1;
546b63fb 3498#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
3499 /* ??? We are doing this here because of the potential that
3500 bad code may be generated if a register explicitly used in
3501 an insn was used as a spill register for that insn. But
3502 not using these are spill registers may lose on some machine.
3503 We'll have to see how this works out. */
3504 SET_HARD_REG_BIT (bad_spill_regs, i);
546b63fb 3505#endif
32131a9c
RK
3506 }
3507 }
3508 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3509 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3510
3511#ifdef ELIMINABLE_REGS
3512 /* If registers other than the frame pointer are eliminable, mark them as
3513 poor choices. */
3514 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3515 {
3516 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3517 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3518 }
3519#endif
3520
3521 /* Prefer registers not so far used, for use in temporary loading.
3522 Among them, if REG_ALLOC_ORDER is defined, use that order.
3523 Otherwise, prefer registers not preserved by calls. */
3524
3525#ifdef REG_ALLOC_ORDER
3526 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3527 {
3528 int regno = reg_alloc_order[i];
3529
3530 if (hard_reg_n_uses[regno].uses == 0)
3531 potential_reload_regs[o++] = regno;
3532 }
3533#else
3534 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3535 {
3536 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3537 potential_reload_regs[o++] = i;
3538 }
3539 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3540 {
3541 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3542 potential_reload_regs[o++] = i;
3543 }
3544#endif
3545
3546 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3547 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3548
3549 /* Now add the regs that are already used,
3550 preferring those used less often. The fixed and otherwise forbidden
3551 registers will be at the end of this list. */
3552
3553 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3554 if (hard_reg_n_uses[i].uses != 0)
3555 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3556}
3557\f
3558/* Reload pseudo-registers into hard regs around each insn as needed.
3559 Additional register load insns are output before the insn that needs it
3560 and perhaps store insns after insns that modify the reloaded pseudo reg.
3561
3562 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3563 which registers are already available in reload registers.
32131a9c
RK
3564 We update these for the reloads that we perform,
3565 as the insns are scanned. */
3566
3567static void
3568reload_as_needed (first, live_known)
3569 rtx first;
3570 int live_known;
3571{
3572 register rtx insn;
3573 register int i;
3574 int this_block = 0;
3575 rtx x;
3576 rtx after_call = 0;
3577
3578 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3579 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3580 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3581 reg_has_output_reload = (char *) alloca (max_regno);
3582 for (i = 0; i < n_spills; i++)
3583 {
3584 reg_reloaded_contents[i] = -1;
3585 reg_reloaded_insn[i] = 0;
3586 }
3587
3588 /* Reset all offsets on eliminable registers to their initial values. */
3589#ifdef ELIMINABLE_REGS
3590 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3591 {
3592 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3593 reg_eliminate[i].initial_offset);
32131a9c
RK
3594 reg_eliminate[i].previous_offset
3595 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3596 }
3597#else
3598 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3599 reg_eliminate[0].previous_offset
3600 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3601#endif
3602
3603 num_not_at_initial_offset = 0;
3604
3605 for (insn = first; insn;)
3606 {
3607 register rtx next = NEXT_INSN (insn);
3608
3609 /* Notice when we move to a new basic block. */
aa2c50d6 3610 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3611 && insn == basic_block_head[this_block+1])
3612 ++this_block;
3613
3614 /* If we pass a label, copy the offsets from the label information
3615 into the current offsets of each elimination. */
3616 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3617 {
3618 num_not_at_initial_offset = 0;
3619 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3620 {
3621 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3622 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3623 if (reg_eliminate[i].can_eliminate
3624 && (reg_eliminate[i].offset
3625 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3626 num_not_at_initial_offset++;
3627 }
3628 }
32131a9c
RK
3629
3630 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3631 {
3632 rtx avoid_return_reg = 0;
3633
3634#ifdef SMALL_REGISTER_CLASSES
3635 /* Set avoid_return_reg if this is an insn
3636 that might use the value of a function call. */
3637 if (GET_CODE (insn) == CALL_INSN)
3638 {
3639 if (GET_CODE (PATTERN (insn)) == SET)
3640 after_call = SET_DEST (PATTERN (insn));
3641 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3642 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3643 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3644 else
3645 after_call = 0;
3646 }
3647 else if (after_call != 0
3648 && !(GET_CODE (PATTERN (insn)) == SET
3649 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3650 {
3651 if (reg_mentioned_p (after_call, PATTERN (insn)))
3652 avoid_return_reg = after_call;
3653 after_call = 0;
3654 }
3655#endif /* SMALL_REGISTER_CLASSES */
3656
2758481d
RS
3657 /* If this is a USE and CLOBBER of a MEM, ensure that any
3658 references to eliminable registers have been removed. */
3659
3660 if ((GET_CODE (PATTERN (insn)) == USE
3661 || GET_CODE (PATTERN (insn)) == CLOBBER)
3662 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3663 XEXP (XEXP (PATTERN (insn), 0), 0)
3664 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3665 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3666
32131a9c
RK
3667 /* If we need to do register elimination processing, do so.
3668 This might delete the insn, in which case we are done. */
3669 if (num_eliminable && GET_MODE (insn) == QImode)
3670 {
3671 eliminate_regs_in_insn (insn, 1);
3672 if (GET_CODE (insn) == NOTE)
3673 {
3674 insn = next;
3675 continue;
3676 }
3677 }
3678
3679 if (GET_MODE (insn) == VOIDmode)
3680 n_reloads = 0;
3681 /* First find the pseudo regs that must be reloaded for this insn.
3682 This info is returned in the tables reload_... (see reload.h).
3683 Also modify the body of INSN by substituting RELOAD
3684 rtx's for those pseudo regs. */
3685 else
3686 {
3687 bzero (reg_has_output_reload, max_regno);
3688 CLEAR_HARD_REG_SET (reg_is_output_reload);
3689
3690 find_reloads (insn, 1, spill_indirect_levels, live_known,
3691 spill_reg_order);
3692 }
3693
3694 if (n_reloads > 0)
3695 {
3c3eeea6
RK
3696 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3697 rtx p;
32131a9c
RK
3698 int class;
3699
3700 /* If this block has not had spilling done for a
546b63fb
RK
3701 particular clas and we have any non-optionals that need a
3702 spill reg in that class, abort. */
32131a9c
RK
3703
3704 for (class = 0; class < N_REG_CLASSES; class++)
3705 if (basic_block_needs[class] != 0
3706 && basic_block_needs[class][this_block] == 0)
3707 for (i = 0; i < n_reloads; i++)
546b63fb
RK
3708 if (class == (int) reload_reg_class[i]
3709 && reload_reg_rtx[i] == 0
3710 && ! reload_optional[i]
3711 && (reload_in[i] != 0 || reload_out[i] != 0
3712 || reload_secondary_p[i] != 0))
3713 abort ();
32131a9c
RK
3714
3715 /* Now compute which reload regs to reload them into. Perhaps
3716 reusing reload regs from previous insns, or else output
3717 load insns to reload them. Maybe output store insns too.
3718 Record the choices of reload reg in reload_reg_rtx. */
3719 choose_reload_regs (insn, avoid_return_reg);
3720
546b63fb
RK
3721#ifdef SMALL_REGISTER_CLASSES
3722 /* Merge any reloads that we didn't combine for fear of
3723 increasing the number of spill registers needed but now
3724 discover can be safely merged. */
3725 merge_assigned_reloads (insn);
3726#endif
3727
32131a9c
RK
3728 /* Generate the insns to reload operands into or out of
3729 their reload regs. */
3730 emit_reload_insns (insn);
3731
3732 /* Substitute the chosen reload regs from reload_reg_rtx
3733 into the insn's body (or perhaps into the bodies of other
3734 load and store insn that we just made for reloading
3735 and that we moved the structure into). */
3736 subst_reloads ();
3c3eeea6
RK
3737
3738 /* If this was an ASM, make sure that all the reload insns
3739 we have generated are valid. If not, give an error
3740 and delete them. */
3741
3742 if (asm_noperands (PATTERN (insn)) >= 0)
3743 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3744 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3745 && (recog_memoized (p) < 0
3746 || (insn_extract (p),
3747 ! constrain_operands (INSN_CODE (p), 1))))
3748 {
3749 error_for_asm (insn,
3750 "`asm' operand requires impossible reload");
3751 PUT_CODE (p, NOTE);
3752 NOTE_SOURCE_FILE (p) = 0;
3753 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3754 }
32131a9c
RK
3755 }
3756 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3757 is no longer validly lying around to save a future reload.
3758 Note that this does not detect pseudos that were reloaded
3759 for this insn in order to be stored in
3760 (obeying register constraints). That is correct; such reload
3761 registers ARE still valid. */
3762 note_stores (PATTERN (insn), forget_old_reloads_1);
3763
3764 /* There may have been CLOBBER insns placed after INSN. So scan
3765 between INSN and NEXT and use them to forget old reloads. */
3766 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3767 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3768 note_stores (PATTERN (x), forget_old_reloads_1);
3769
3770#ifdef AUTO_INC_DEC
3771 /* Likewise for regs altered by auto-increment in this insn.
3772 But note that the reg-notes are not changed by reloading:
3773 they still contain the pseudo-regs, not the spill regs. */
3774 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3775 if (REG_NOTE_KIND (x) == REG_INC)
3776 {
3777 /* See if this pseudo reg was reloaded in this insn.
3778 If so, its last-reload info is still valid
3779 because it is based on this insn's reload. */
3780 for (i = 0; i < n_reloads; i++)
3781 if (reload_out[i] == XEXP (x, 0))
3782 break;
3783
08fb99fa 3784 if (i == n_reloads)
9a881562 3785 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
3786 }
3787#endif
3788 }
3789 /* A reload reg's contents are unknown after a label. */
3790 if (GET_CODE (insn) == CODE_LABEL)
3791 for (i = 0; i < n_spills; i++)
3792 {
3793 reg_reloaded_contents[i] = -1;
3794 reg_reloaded_insn[i] = 0;
3795 }
3796
3797 /* Don't assume a reload reg is still good after a call insn
3798 if it is a call-used reg. */
546b63fb 3799 else if (GET_CODE (insn) == CALL_INSN)
32131a9c
RK
3800 for (i = 0; i < n_spills; i++)
3801 if (call_used_regs[spill_regs[i]])
3802 {
3803 reg_reloaded_contents[i] = -1;
3804 reg_reloaded_insn[i] = 0;
3805 }
3806
3807 /* In case registers overlap, allow certain insns to invalidate
3808 particular hard registers. */
3809
3810#ifdef INSN_CLOBBERS_REGNO_P
3811 for (i = 0 ; i < n_spills ; i++)
3812 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3813 {
3814 reg_reloaded_contents[i] = -1;
3815 reg_reloaded_insn[i] = 0;
3816 }
3817#endif
3818
3819 insn = next;
3820
3821#ifdef USE_C_ALLOCA
3822 alloca (0);
3823#endif
3824 }
3825}
3826
3827/* Discard all record of any value reloaded from X,
3828 or reloaded in X from someplace else;
3829 unless X is an output reload reg of the current insn.
3830
3831 X may be a hard reg (the reload reg)
3832 or it may be a pseudo reg that was reloaded from. */
3833
3834static void
9a881562 3835forget_old_reloads_1 (x, ignored)
32131a9c 3836 rtx x;
9a881562 3837 rtx ignored;
32131a9c
RK
3838{
3839 register int regno;
3840 int nr;
0a2e51a9
RS
3841 int offset = 0;
3842
3843 /* note_stores does give us subregs of hard regs. */
3844 while (GET_CODE (x) == SUBREG)
3845 {
3846 offset += SUBREG_WORD (x);
3847 x = SUBREG_REG (x);
3848 }
32131a9c
RK
3849
3850 if (GET_CODE (x) != REG)
3851 return;
3852
0a2e51a9 3853 regno = REGNO (x) + offset;
32131a9c
RK
3854
3855 if (regno >= FIRST_PSEUDO_REGISTER)
3856 nr = 1;
3857 else
3858 {
3859 int i;
3860 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3861 /* Storing into a spilled-reg invalidates its contents.
3862 This can happen if a block-local pseudo is allocated to that reg
3863 and it wasn't spilled because this block's total need is 0.
3864 Then some insn might have an optional reload and use this reg. */
3865 for (i = 0; i < nr; i++)
3866 if (spill_reg_order[regno + i] >= 0
3867 /* But don't do this if the reg actually serves as an output
3868 reload reg in the current instruction. */
3869 && (n_reloads == 0
3870 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3871 {
3872 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3873 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3874 }
3875 }
3876
3877 /* Since value of X has changed,
3878 forget any value previously copied from it. */
3879
3880 while (nr-- > 0)
3881 /* But don't forget a copy if this is the output reload
3882 that establishes the copy's validity. */
3883 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3884 reg_last_reload_reg[regno + nr] = 0;
3885}
3886\f
3887/* For each reload, the mode of the reload register. */
3888static enum machine_mode reload_mode[MAX_RELOADS];
3889
3890/* For each reload, the largest number of registers it will require. */
3891static int reload_nregs[MAX_RELOADS];
3892
3893/* Comparison function for qsort to decide which of two reloads
3894 should be handled first. *P1 and *P2 are the reload numbers. */
3895
3896static int
3897reload_reg_class_lower (p1, p2)
3898 short *p1, *p2;
3899{
3900 register int r1 = *p1, r2 = *p2;
3901 register int t;
a8fdc208 3902
32131a9c
RK
3903 /* Consider required reloads before optional ones. */
3904 t = reload_optional[r1] - reload_optional[r2];
3905 if (t != 0)
3906 return t;
3907
3908 /* Count all solitary classes before non-solitary ones. */
3909 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3910 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3911 if (t != 0)
3912 return t;
3913
3914 /* Aside from solitaires, consider all multi-reg groups first. */
3915 t = reload_nregs[r2] - reload_nregs[r1];
3916 if (t != 0)
3917 return t;
3918
3919 /* Consider reloads in order of increasing reg-class number. */
3920 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3921 if (t != 0)
3922 return t;
3923
3924 /* If reloads are equally urgent, sort by reload number,
3925 so that the results of qsort leave nothing to chance. */
3926 return r1 - r2;
3927}
3928\f
3929/* The following HARD_REG_SETs indicate when each hard register is
3930 used for a reload of various parts of the current insn. */
3931
3932/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3933static HARD_REG_SET reload_reg_used;
546b63fb
RK
3934/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3935static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3936/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3937static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
3938/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
3939static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
3940/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
3941static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
3942/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3943static HARD_REG_SET reload_reg_used_in_op_addr;
546b63fb
RK
3944/* If reg is in use for a RELOAD_FOR_INSN reload. */
3945static HARD_REG_SET reload_reg_used_in_insn;
3946/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
3947static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
3948
3949/* If reg is in use as a reload reg for any sort of reload. */
3950static HARD_REG_SET reload_reg_used_at_all;
3951
be7ae2a4
RK
3952/* If reg is use as an inherited reload. We just mark the first register
3953 in the group. */
3954static HARD_REG_SET reload_reg_used_for_inherit;
3955
546b63fb
RK
3956/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
3957 TYPE. MODE is used to indicate how many consecutive regs are
3958 actually used. */
32131a9c
RK
3959
3960static void
546b63fb 3961mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 3962 int regno;
546b63fb
RK
3963 int opnum;
3964 enum reload_type type;
32131a9c
RK
3965 enum machine_mode mode;
3966{
3967 int nregs = HARD_REGNO_NREGS (regno, mode);
3968 int i;
3969
3970 for (i = regno; i < nregs + regno; i++)
3971 {
546b63fb 3972 switch (type)
32131a9c
RK
3973 {
3974 case RELOAD_OTHER:
3975 SET_HARD_REG_BIT (reload_reg_used, i);
3976 break;
3977
546b63fb
RK
3978 case RELOAD_FOR_INPUT_ADDRESS:
3979 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
3980 break;
3981
546b63fb
RK
3982 case RELOAD_FOR_OUTPUT_ADDRESS:
3983 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
3984 break;
3985
3986 case RELOAD_FOR_OPERAND_ADDRESS:
3987 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3988 break;
3989
546b63fb
RK
3990 case RELOAD_FOR_OTHER_ADDRESS:
3991 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
3992 break;
3993
32131a9c 3994 case RELOAD_FOR_INPUT:
546b63fb 3995 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
3996 break;
3997
3998 case RELOAD_FOR_OUTPUT:
546b63fb
RK
3999 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4000 break;
4001
4002 case RELOAD_FOR_INSN:
4003 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4004 break;
4005 }
4006
4007 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4008 }
4009}
4010
be7ae2a4
RK
4011/* Similarly, but show REGNO is no longer in use for a reload. */
4012
4013static void
4014clear_reload_reg_in_use (regno, opnum, type, mode)
4015 int regno;
4016 int opnum;
4017 enum reload_type type;
4018 enum machine_mode mode;
4019{
4020 int nregs = HARD_REGNO_NREGS (regno, mode);
4021 int i;
4022
4023 for (i = regno; i < nregs + regno; i++)
4024 {
4025 switch (type)
4026 {
4027 case RELOAD_OTHER:
4028 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4029 break;
4030
4031 case RELOAD_FOR_INPUT_ADDRESS:
4032 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4033 break;
4034
4035 case RELOAD_FOR_OUTPUT_ADDRESS:
4036 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4037 break;
4038
4039 case RELOAD_FOR_OPERAND_ADDRESS:
4040 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4041 break;
4042
4043 case RELOAD_FOR_OTHER_ADDRESS:
4044 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4045 break;
4046
4047 case RELOAD_FOR_INPUT:
4048 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4049 break;
4050
4051 case RELOAD_FOR_OUTPUT:
4052 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4053 break;
4054
4055 case RELOAD_FOR_INSN:
4056 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4057 break;
4058 }
4059 }
4060}
4061
32131a9c 4062/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4063 specified by OPNUM and TYPE. */
32131a9c
RK
4064
4065static int
546b63fb 4066reload_reg_free_p (regno, opnum, type)
32131a9c 4067 int regno;
546b63fb
RK
4068 int opnum;
4069 enum reload_type type;
32131a9c 4070{
546b63fb
RK
4071 int i;
4072
4073 /* In use for a RELOAD_OTHER means it's not available for anything except
4074 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4075 to be used only for inputs. */
4076
4077 if (type != RELOAD_FOR_OTHER_ADDRESS
4078 && TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4079 return 0;
546b63fb
RK
4080
4081 switch (type)
32131a9c
RK
4082 {
4083 case RELOAD_OTHER:
4084 /* In use for anything means not available for a RELOAD_OTHER. */
4085 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4086
4087 /* The other kinds of use can sometimes share a register. */
4088 case RELOAD_FOR_INPUT:
546b63fb
RK
4089 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4090 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4091 return 0;
4092
4093 /* If it is used for some other input, can't use it. */
4094 for (i = 0; i < reload_n_operands; i++)
4095 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4096 return 0;
4097
4098 /* If it is used in a later operand's address, can't use it. */
4099 for (i = opnum + 1; i < reload_n_operands; i++)
4100 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4101 return 0;
4102
4103 return 1;
4104
4105 case RELOAD_FOR_INPUT_ADDRESS:
4106 /* Can't use a register if it is used for an input address for this
4107 operand or used as an input in an earlier one. */
4108 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4109 return 0;
4110
4111 for (i = 0; i < opnum; i++)
4112 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4113 return 0;
4114
4115 return 1;
4116
4117 case RELOAD_FOR_OUTPUT_ADDRESS:
4118 /* Can't use a register if it is used for an output address for this
4119 operand or used as an output in this or a later operand. */
4120 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4121 return 0;
4122
4123 for (i = opnum; i < reload_n_operands; i++)
4124 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4125 return 0;
4126
4127 return 1;
4128
32131a9c 4129 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4130 for (i = 0; i < reload_n_operands; i++)
4131 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4132 return 0;
4133
4134 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4135 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4136
32131a9c 4137 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4138 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4139 outputs, or an operand address for this or an earlier output. */
4140 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4141 return 0;
4142
4143 for (i = 0; i < reload_n_operands; i++)
4144 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4145 return 0;
4146
4147 for (i = 0; i <= opnum; i++)
4148 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4149 return 0;
4150
4151 return 1;
4152
4153 case RELOAD_FOR_INSN:
4154 for (i = 0; i < reload_n_operands; i++)
4155 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4156 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4157 return 0;
4158
4159 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4160 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4161
4162 case RELOAD_FOR_OTHER_ADDRESS:
4163 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4164 }
4165 abort ();
4166}
4167
4168/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4169 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4170 is not in use for a reload in any prior part of the insn.
4171
4172 We can assume that the reload reg was already tested for availability
4173 at the time it is needed, and we should not check this again,
4174 in case the reg has already been marked in use. */
4175
4176static int
546b63fb 4177reload_reg_free_before_p (regno, opnum, type)
32131a9c 4178 int regno;
546b63fb
RK
4179 int opnum;
4180 enum reload_type type;
32131a9c 4181{
546b63fb
RK
4182 int i;
4183
4184 switch (type)
32131a9c 4185 {
546b63fb
RK
4186 case RELOAD_FOR_OTHER_ADDRESS:
4187 /* These always come first. */
32131a9c
RK
4188 return 1;
4189
546b63fb
RK
4190 case RELOAD_OTHER:
4191 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4192
32131a9c 4193 /* If this use is for part of the insn,
546b63fb
RK
4194 check the reg is not in use for any prior part. It is tempting
4195 to try to do this by falling through from objecs that occur
4196 later in the insn to ones that occur earlier, but that will not
4197 correctly take into account the fact that here we MUST ignore
4198 things that would prevent the register from being allocated in
4199 the first place, since we know that it was allocated. */
4200
4201 case RELOAD_FOR_OUTPUT_ADDRESS:
4202 /* Earlier reloads are for earlier outputs or their addresses,
4203 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4204 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4205 RELOAD_OTHER).. */
4206 for (i = 0; i < opnum; i++)
4207 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4208 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4209 return 0;
4210
4211 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4212 return 0;
546b63fb
RK
4213
4214 for (i = 0; i < reload_n_operands; i++)
4215 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4216 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4217 return 0;
4218
4219 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4220 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4221 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4222
32131a9c 4223 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4224 /* This can't be used in the output address for this operand and
4225 anything that can't be used for it, except that we've already
4226 tested for RELOAD_FOR_INSN objects. */
4227
4228 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
32131a9c 4229 return 0;
546b63fb
RK
4230
4231 for (i = 0; i < opnum; i++)
4232 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4233 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4234 return 0;
4235
4236 for (i = 0; i < reload_n_operands; i++)
4237 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4238 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4239 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4240 return 0;
4241
4242 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4243
32131a9c 4244 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4245 case RELOAD_FOR_INSN:
4246 /* These can't conflict with inputs, or each other, so all we have to
4247 test is input addresses and the addresses of OTHER items. */
4248
4249 for (i = 0; i < reload_n_operands; i++)
4250 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4251 return 0;
4252
4253 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4254
32131a9c 4255 case RELOAD_FOR_INPUT:
546b63fb
RK
4256 /* The only things earlier are the address for this and
4257 earlier inputs, other inputs (which we know we don't conflict
4258 with), and addresses of RELOAD_OTHER objects. */
4259
4260 for (i = 0; i <= opnum; i++)
4261 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4262 return 0;
4263
4264 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4265
4266 case RELOAD_FOR_INPUT_ADDRESS:
4267 /* Similarly, all we have to check is for use in earlier inputs'
4268 addresses. */
4269 for (i = 0; i < opnum; i++)
4270 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4271 return 0;
4272
4273 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4274 }
4275 abort ();
4276}
4277
4278/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4279 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4280 is still available in REGNO at the end of the insn.
4281
4282 We can assume that the reload reg was already tested for availability
4283 at the time it is needed, and we should not check this again,
4284 in case the reg has already been marked in use. */
4285
4286static int
546b63fb 4287reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4288 int regno;
546b63fb
RK
4289 int opnum;
4290 enum reload_type type;
32131a9c 4291{
546b63fb
RK
4292 int i;
4293
4294 switch (type)
32131a9c
RK
4295 {
4296 case RELOAD_OTHER:
4297 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4298 its value must reach the end. */
4299 return 1;
4300
4301 /* If this use is for part of the insn,
546b63fb
RK
4302 its value reaches if no subsequent part uses the same register.
4303 Just like the above function, don't try to do this with lots
4304 of fallthroughs. */
4305
4306 case RELOAD_FOR_OTHER_ADDRESS:
4307 /* Here we check for everything else, since these don't conflict
4308 with anything else and everything comes later. */
4309
4310 for (i = 0; i < reload_n_operands; i++)
4311 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4312 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4313 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4314 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4315 return 0;
4316
4317 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4318 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4319 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4320
4321 case RELOAD_FOR_INPUT_ADDRESS:
4322 /* Similar, except that we check only for this and subsequent inputs
4323 and the address of only subsequent inputs and we do not need
4324 to check for RELOAD_OTHER objects since they are known not to
4325 conflict. */
4326
4327 for (i = opnum; i < reload_n_operands; i++)
4328 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4329 return 0;
4330
4331 for (i = opnum + 1; i < reload_n_operands; i++)
4332 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4333 return 0;
4334
4335 for (i = 0; i < reload_n_operands; i++)
4336 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4337 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4338 return 0;
4339
4340 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4341 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4342
32131a9c 4343 case RELOAD_FOR_INPUT:
546b63fb
RK
4344 /* Similar to input address, except we start at the next operand for
4345 both input and input address and we do not check for
4346 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4347 would conflict. */
4348
4349 for (i = opnum + 1; i < reload_n_operands; i++)
4350 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4351 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4352 return 0;
4353
4354 /* ... fall through ... */
4355
32131a9c 4356 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4357 /* Check outputs and their addresses. */
4358
4359 for (i = 0; i < reload_n_operands; i++)
4360 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4361 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4362 return 0;
4363
4364 return 1;
4365
4366 case RELOAD_FOR_INSN:
4367 /* These conflict with other outputs with with RELOAD_OTHER. So
4368 we need only check for output addresses. */
4369
4370 opnum = -1;
4371
4372 /* ... fall through ... */
4373
32131a9c 4374 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4375 case RELOAD_FOR_OUTPUT_ADDRESS:
4376 /* We already know these can't conflict with a later output. So the
4377 only thing to check are later output addresses. */
4378 for (i = opnum + 1; i < reload_n_operands; i++)
4379 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4380 return 0;
4381
32131a9c
RK
4382 return 1;
4383 }
546b63fb 4384
32131a9c
RK
4385 abort ();
4386}
4387\f
4388/* Vector of reload-numbers showing the order in which the reloads should
4389 be processed. */
4390short reload_order[MAX_RELOADS];
4391
4392/* Indexed by reload number, 1 if incoming value
4393 inherited from previous insns. */
4394char reload_inherited[MAX_RELOADS];
4395
4396/* For an inherited reload, this is the insn the reload was inherited from,
4397 if we know it. Otherwise, this is 0. */
4398rtx reload_inheritance_insn[MAX_RELOADS];
4399
4400/* If non-zero, this is a place to get the value of the reload,
4401 rather than using reload_in. */
4402rtx reload_override_in[MAX_RELOADS];
4403
4404/* For each reload, the index in spill_regs of the spill register used,
4405 or -1 if we did not need one of the spill registers for this reload. */
4406int reload_spill_index[MAX_RELOADS];
4407
4408/* Index of last register assigned as a spill register. We allocate in
4409 a round-robin fashio. */
4410
1d2310f3 4411static int last_spill_reg = 0;
32131a9c
RK
4412
4413/* Find a spill register to use as a reload register for reload R.
4414 LAST_RELOAD is non-zero if this is the last reload for the insn being
4415 processed.
4416
4417 Set reload_reg_rtx[R] to the register allocated.
4418
4419 If NOERROR is nonzero, we return 1 if successful,
4420 or 0 if we couldn't find a spill reg and we didn't change anything. */
4421
4422static int
4423allocate_reload_reg (r, insn, last_reload, noerror)
4424 int r;
4425 rtx insn;
4426 int last_reload;
4427 int noerror;
4428{
4429 int i;
4430 int pass;
4431 int count;
4432 rtx new;
4433 int regno;
4434
4435 /* If we put this reload ahead, thinking it is a group,
4436 then insist on finding a group. Otherwise we can grab a
a8fdc208 4437 reg that some other reload needs.
32131a9c
RK
4438 (That can happen when we have a 68000 DATA_OR_FP_REG
4439 which is a group of data regs or one fp reg.)
4440 We need not be so restrictive if there are no more reloads
4441 for this insn.
4442
4443 ??? Really it would be nicer to have smarter handling
4444 for that kind of reg class, where a problem like this is normal.
4445 Perhaps those classes should be avoided for reloading
4446 by use of more alternatives. */
4447
4448 int force_group = reload_nregs[r] > 1 && ! last_reload;
4449
4450 /* If we want a single register and haven't yet found one,
4451 take any reg in the right class and not in use.
4452 If we want a consecutive group, here is where we look for it.
4453
4454 We use two passes so we can first look for reload regs to
4455 reuse, which are already in use for other reloads in this insn,
4456 and only then use additional registers.
4457 I think that maximizing reuse is needed to make sure we don't
4458 run out of reload regs. Suppose we have three reloads, and
4459 reloads A and B can share regs. These need two regs.
4460 Suppose A and B are given different regs.
4461 That leaves none for C. */
4462 for (pass = 0; pass < 2; pass++)
4463 {
4464 /* I is the index in spill_regs.
4465 We advance it round-robin between insns to use all spill regs
4466 equally, so that inherited reloads have a chance
4467 of leapfrogging each other. */
4468
4469 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4470 {
4471 int class = (int) reload_reg_class[r];
4472
4473 i = (i + 1) % n_spills;
4474
546b63fb
RK
4475 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4476 reload_when_needed[r])
32131a9c
RK
4477 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4478 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
4479 /* Look first for regs to share, then for unshared. But
4480 don't share regs used for inherited reloads; they are
4481 the ones we want to preserve. */
4482 && (pass
4483 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4484 spill_regs[i])
4485 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4486 spill_regs[i]))))
32131a9c
RK
4487 {
4488 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4489 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4490 (on 68000) got us two FP regs. If NR is 1,
4491 we would reject both of them. */
4492 if (force_group)
4493 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4494 /* If we need only one reg, we have already won. */
4495 if (nr == 1)
4496 {
4497 /* But reject a single reg if we demand a group. */
4498 if (force_group)
4499 continue;
4500 break;
4501 }
4502 /* Otherwise check that as many consecutive regs as we need
4503 are available here.
4504 Also, don't use for a group registers that are
4505 needed for nongroups. */
4506 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4507 while (nr > 1)
4508 {
4509 regno = spill_regs[i] + nr - 1;
4510 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4511 && spill_reg_order[regno] >= 0
546b63fb
RK
4512 && reload_reg_free_p (regno, reload_opnum[r],
4513 reload_when_needed[r])
32131a9c
RK
4514 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4515 regno)))
4516 break;
4517 nr--;
4518 }
4519 if (nr == 1)
4520 break;
4521 }
4522 }
4523
4524 /* If we found something on pass 1, omit pass 2. */
4525 if (count < n_spills)
4526 break;
4527 }
4528
4529 /* We should have found a spill register by now. */
4530 if (count == n_spills)
4531 {
4532 if (noerror)
4533 return 0;
139fc12e 4534 goto failure;
32131a9c
RK
4535 }
4536
be7ae2a4
RK
4537 /* I is the index in SPILL_REG_RTX of the reload register we are to
4538 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
4539
4540 new = spill_reg_rtx[i];
4541
4542 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4
RK
4543 spill_reg_rtx[i] = new
4544 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4545
32131a9c
RK
4546 regno = true_regnum (new);
4547
4548 /* Detect when the reload reg can't hold the reload mode.
4549 This used to be one `if', but Sequent compiler can't handle that. */
4550 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4551 {
4552 enum machine_mode test_mode = VOIDmode;
4553 if (reload_in[r])
4554 test_mode = GET_MODE (reload_in[r]);
4555 /* If reload_in[r] has VOIDmode, it means we will load it
4556 in whatever mode the reload reg has: to wit, reload_mode[r].
4557 We have already tested that for validity. */
4558 /* Aside from that, we need to test that the expressions
4559 to reload from or into have modes which are valid for this
4560 reload register. Otherwise the reload insns would be invalid. */
4561 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4562 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4563 if (! (reload_out[r] != 0
4564 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
4565 {
4566 /* The reg is OK. */
4567 last_spill_reg = i;
4568
4569 /* Mark as in use for this insn the reload regs we use
4570 for this. */
4571 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4572 reload_when_needed[r], reload_mode[r]);
4573
4574 reload_reg_rtx[r] = new;
4575 reload_spill_index[r] = i;
4576 return 1;
4577 }
32131a9c
RK
4578 }
4579
4580 /* The reg is not OK. */
4581 if (noerror)
4582 return 0;
4583
139fc12e 4584 failure:
32131a9c
RK
4585 if (asm_noperands (PATTERN (insn)) < 0)
4586 /* It's the compiler's fault. */
4587 abort ();
4588
4589 /* It's the user's fault; the operand's mode and constraint
4590 don't match. Disable this reload so we don't crash in final. */
4591 error_for_asm (insn,
4592 "`asm' operand constraint incompatible with operand size");
4593 reload_in[r] = 0;
4594 reload_out[r] = 0;
4595 reload_reg_rtx[r] = 0;
4596 reload_optional[r] = 1;
4597 reload_secondary_p[r] = 1;
4598
4599 return 1;
4600}
4601\f
4602/* Assign hard reg targets for the pseudo-registers we must reload
4603 into hard regs for this insn.
4604 Also output the instructions to copy them in and out of the hard regs.
4605
4606 For machines with register classes, we are responsible for
4607 finding a reload reg in the proper class. */
4608
4609static void
4610choose_reload_regs (insn, avoid_return_reg)
4611 rtx insn;
32131a9c
RK
4612 rtx avoid_return_reg;
4613{
4614 register int i, j;
4615 int max_group_size = 1;
4616 enum reg_class group_class = NO_REGS;
4617 int inheritance;
4618
4619 rtx save_reload_reg_rtx[MAX_RELOADS];
4620 char save_reload_inherited[MAX_RELOADS];
4621 rtx save_reload_inheritance_insn[MAX_RELOADS];
4622 rtx save_reload_override_in[MAX_RELOADS];
4623 int save_reload_spill_index[MAX_RELOADS];
4624 HARD_REG_SET save_reload_reg_used;
546b63fb
RK
4625 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4626 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4627 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4628 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 4629 HARD_REG_SET save_reload_reg_used_in_op_addr;
546b63fb
RK
4630 HARD_REG_SET save_reload_reg_used_in_insn;
4631 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
4632 HARD_REG_SET save_reload_reg_used_at_all;
4633
4634 bzero (reload_inherited, MAX_RELOADS);
4635 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4636 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4637
4638 CLEAR_HARD_REG_SET (reload_reg_used);
4639 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 4640 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
546b63fb
RK
4641 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4642 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 4643
546b63fb
RK
4644 for (i = 0; i < reload_n_operands; i++)
4645 {
4646 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4647 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4648 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4649 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4650 }
32131a9c
RK
4651
4652#ifdef SMALL_REGISTER_CLASSES
4653 /* Don't bother with avoiding the return reg
4654 if we have no mandatory reload that could use it. */
4655 if (avoid_return_reg)
4656 {
4657 int do_avoid = 0;
4658 int regno = REGNO (avoid_return_reg);
4659 int nregs
4660 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4661 int r;
4662
4663 for (r = regno; r < regno + nregs; r++)
4664 if (spill_reg_order[r] >= 0)
4665 for (j = 0; j < n_reloads; j++)
4666 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4667 && (reload_in[j] != 0 || reload_out[j] != 0
4668 || reload_secondary_p[j])
4669 &&
4670 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4671 do_avoid = 1;
4672 if (!do_avoid)
4673 avoid_return_reg = 0;
4674 }
4675#endif /* SMALL_REGISTER_CLASSES */
4676
4677#if 0 /* Not needed, now that we can always retry without inheritance. */
4678 /* See if we have more mandatory reloads than spill regs.
4679 If so, then we cannot risk optimizations that could prevent
a8fdc208 4680 reloads from sharing one spill register.
32131a9c
RK
4681
4682 Since we will try finding a better register than reload_reg_rtx
4683 unless it is equal to reload_in or reload_out, count such reloads. */
4684
4685 {
4686 int tem = 0;
4687#ifdef SMALL_REGISTER_CLASSES
4688 int tem = (avoid_return_reg != 0);
a8fdc208 4689#endif
32131a9c
RK
4690 for (j = 0; j < n_reloads; j++)
4691 if (! reload_optional[j]
4692 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4693 && (reload_reg_rtx[j] == 0
4694 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4695 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4696 tem++;
4697 if (tem > n_spills)
4698 must_reuse = 1;
4699 }
4700#endif
4701
4702#ifdef SMALL_REGISTER_CLASSES
4703 /* Don't use the subroutine call return reg for a reload
4704 if we are supposed to avoid it. */
4705 if (avoid_return_reg)
4706 {
4707 int regno = REGNO (avoid_return_reg);
4708 int nregs
4709 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4710 int r;
4711
4712 for (r = regno; r < regno + nregs; r++)
4713 if (spill_reg_order[r] >= 0)
4714 SET_HARD_REG_BIT (reload_reg_used, r);
4715 }
4716#endif /* SMALL_REGISTER_CLASSES */
4717
4718 /* In order to be certain of getting the registers we need,
4719 we must sort the reloads into order of increasing register class.
4720 Then our grabbing of reload registers will parallel the process
a8fdc208 4721 that provided the reload registers.
32131a9c
RK
4722
4723 Also note whether any of the reloads wants a consecutive group of regs.
4724 If so, record the maximum size of the group desired and what
4725 register class contains all the groups needed by this insn. */
4726
4727 for (j = 0; j < n_reloads; j++)
4728 {
4729 reload_order[j] = j;
4730 reload_spill_index[j] = -1;
4731
4732 reload_mode[j]
546b63fb
RK
4733 = (reload_inmode[j] == VOIDmode
4734 || (GET_MODE_SIZE (reload_outmode[j])
4735 > GET_MODE_SIZE (reload_inmode[j])))
4736 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
4737
4738 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4739
4740 if (reload_nregs[j] > 1)
4741 {
4742 max_group_size = MAX (reload_nregs[j], max_group_size);
4743 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4744 }
4745
4746 /* If we have already decided to use a certain register,
4747 don't use it in another way. */
4748 if (reload_reg_rtx[j])
546b63fb 4749 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
4750 reload_when_needed[j], reload_mode[j]);
4751 }
4752
4753 if (n_reloads > 1)
4754 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4755
4756 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4757 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4758 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4759 sizeof reload_inheritance_insn);
4760 bcopy (reload_override_in, save_reload_override_in,
4761 sizeof reload_override_in);
4762 bcopy (reload_spill_index, save_reload_spill_index,
4763 sizeof reload_spill_index);
4764 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4765 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
4766 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4767 reload_reg_used_in_op_addr);
546b63fb
RK
4768 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4769 reload_reg_used_in_insn);
4770 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4771 reload_reg_used_in_other_addr);
4772
4773 for (i = 0; i < reload_n_operands; i++)
4774 {
4775 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4776 reload_reg_used_in_output[i]);
4777 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4778 reload_reg_used_in_input[i]);
4779 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4780 reload_reg_used_in_input_addr[i]);
4781 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4782 reload_reg_used_in_output_addr[i]);
4783 }
32131a9c 4784
58b1581b
RS
4785 /* If -O, try first with inheritance, then turning it off.
4786 If not -O, don't do inheritance.
4787 Using inheritance when not optimizing leads to paradoxes
4788 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4789 because one side of the comparison might be inherited. */
32131a9c 4790
58b1581b 4791 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
4792 {
4793 /* Process the reloads in order of preference just found.
4794 Beyond this point, subregs can be found in reload_reg_rtx.
4795
4796 This used to look for an existing reloaded home for all
4797 of the reloads, and only then perform any new reloads.
4798 But that could lose if the reloads were done out of reg-class order
4799 because a later reload with a looser constraint might have an old
4800 home in a register needed by an earlier reload with a tighter constraint.
4801
4802 To solve this, we make two passes over the reloads, in the order
4803 described above. In the first pass we try to inherit a reload
4804 from a previous insn. If there is a later reload that needs a
4805 class that is a proper subset of the class being processed, we must
4806 also allocate a spill register during the first pass.
4807
4808 Then make a second pass over the reloads to allocate any reloads
4809 that haven't been given registers yet. */
4810
be7ae2a4
RK
4811 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4812
32131a9c
RK
4813 for (j = 0; j < n_reloads; j++)
4814 {
4815 register int r = reload_order[j];
4816
4817 /* Ignore reloads that got marked inoperative. */
4818 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4819 continue;
4820
4821 /* If find_reloads chose a to use reload_in or reload_out as a reload
4822 register, we don't need to chose one. Otherwise, try even if it found
4823 one since we might save an insn if we find the value lying around. */
4824 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4825 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4826 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4827 continue;
4828
4829#if 0 /* No longer needed for correct operation.
4830 It might give better code, or might not; worth an experiment? */
4831 /* If this is an optional reload, we can't inherit from earlier insns
4832 until we are sure that any non-optional reloads have been allocated.
4833 The following code takes advantage of the fact that optional reloads
4834 are at the end of reload_order. */
4835 if (reload_optional[r] != 0)
4836 for (i = 0; i < j; i++)
4837 if ((reload_out[reload_order[i]] != 0
4838 || reload_in[reload_order[i]] != 0
4839 || reload_secondary_p[reload_order[i]])
4840 && ! reload_optional[reload_order[i]]
4841 && reload_reg_rtx[reload_order[i]] == 0)
4842 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4843#endif
4844
4845 /* First see if this pseudo is already available as reloaded
4846 for a previous insn. We cannot try to inherit for reloads
4847 that are smaller than the maximum number of registers needed
4848 for groups unless the register we would allocate cannot be used
4849 for the groups.
4850
4851 We could check here to see if this is a secondary reload for
4852 an object that is already in a register of the desired class.
4853 This would avoid the need for the secondary reload register.
4854 But this is complex because we can't easily determine what
4855 objects might want to be loaded via this reload. So let a register
4856 be allocated here. In `emit_reload_insns' we suppress one of the
4857 loads in the case described above. */
4858
4859 if (inheritance)
4860 {
4861 register int regno = -1;
db660765 4862 enum machine_mode mode;
32131a9c
RK
4863
4864 if (reload_in[r] == 0)
4865 ;
4866 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
4867 {
4868 regno = REGNO (reload_in[r]);
4869 mode = GET_MODE (reload_in[r]);
4870 }
32131a9c 4871 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
4872 {
4873 regno = REGNO (reload_in_reg[r]);
4874 mode = GET_MODE (reload_in_reg[r]);
4875 }
32131a9c
RK
4876#if 0
4877 /* This won't work, since REGNO can be a pseudo reg number.
4878 Also, it takes much more hair to keep track of all the things
4879 that can invalidate an inherited reload of part of a pseudoreg. */
4880 else if (GET_CODE (reload_in[r]) == SUBREG
4881 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4882 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4883#endif
4884
4885 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4886 {
4887 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4888
4889 if (reg_reloaded_contents[i] == regno
db660765
TW
4890 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4891 >= GET_MODE_SIZE (mode))
32131a9c
RK
4892 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4893 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4894 spill_regs[i])
4895 && (reload_nregs[r] == max_group_size
4896 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4897 spill_regs[i]))
546b63fb
RK
4898 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
4899 reload_when_needed[r])
32131a9c 4900 && reload_reg_free_before_p (spill_regs[i],
546b63fb 4901 reload_opnum[r],
32131a9c
RK
4902 reload_when_needed[r]))
4903 {
4904 /* If a group is needed, verify that all the subsequent
4905 registers still have their values intact. */
4906 int nr
4907 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4908 int k;
4909
4910 for (k = 1; k < nr; k++)
4911 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4912 != regno)
4913 break;
4914
4915 if (k == nr)
4916 {
c74fa651
RS
4917 int i1;
4918
4919 /* We found a register that contains the
4920 value we need. If this register is the
4921 same as an `earlyclobber' operand of the
4922 current insn, just mark it as a place to
4923 reload from since we can't use it as the
4924 reload register itself. */
4925
4926 for (i1 = 0; i1 < n_earlyclobbers; i1++)
4927 if (reg_overlap_mentioned_for_reload_p
4928 (reg_last_reload_reg[regno],
4929 reload_earlyclobbers[i1]))
4930 break;
4931
8908158d
RS
4932 if (i1 != n_earlyclobbers
4933 /* Don't really use the inherited spill reg
4934 if we need it wider than we've got it. */
4935 || (GET_MODE_SIZE (reload_mode[r])
4936 > GET_MODE_SIZE (mode)))
c74fa651
RS
4937 reload_override_in[r] = reg_last_reload_reg[regno];
4938 else
4939 {
4940 /* We can use this as a reload reg. */
4941 /* Mark the register as in use for this part of
4942 the insn. */
4943 mark_reload_reg_in_use (spill_regs[i],
4944 reload_opnum[r],
4945 reload_when_needed[r],
4946 reload_mode[r]);
4947 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4948 reload_inherited[r] = 1;
4949 reload_inheritance_insn[r]
4950 = reg_reloaded_insn[i];
4951 reload_spill_index[r] = i;
4952 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
4953 spill_regs[i]);
4954 }
32131a9c
RK
4955 }
4956 }
4957 }
4958 }
4959
4960 /* Here's another way to see if the value is already lying around. */
4961 if (inheritance
4962 && reload_in[r] != 0
4963 && ! reload_inherited[r]
4964 && reload_out[r] == 0
4965 && (CONSTANT_P (reload_in[r])
4966 || GET_CODE (reload_in[r]) == PLUS
4967 || GET_CODE (reload_in[r]) == REG
4968 || GET_CODE (reload_in[r]) == MEM)
4969 && (reload_nregs[r] == max_group_size
4970 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4971 {
4972 register rtx equiv
4973 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 4974 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
4975 int regno;
4976
4977 if (equiv != 0)
4978 {
4979 if (GET_CODE (equiv) == REG)
4980 regno = REGNO (equiv);
4981 else if (GET_CODE (equiv) == SUBREG)
4982 {
4983 regno = REGNO (SUBREG_REG (equiv));
4984 if (regno < FIRST_PSEUDO_REGISTER)
4985 regno += SUBREG_WORD (equiv);
4986 }
4987 else
4988 abort ();
4989 }
4990
4991 /* If we found a spill reg, reject it unless it is free
4992 and of the desired class. */
4993 if (equiv != 0
4994 && ((spill_reg_order[regno] >= 0
546b63fb 4995 && ! reload_reg_free_before_p (regno, reload_opnum[r],
32131a9c
RK
4996 reload_when_needed[r]))
4997 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4998 regno)))
4999 equiv = 0;
5000
5001 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5002 equiv = 0;
5003
5004 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5005 equiv = 0;
5006
5007 /* We found a register that contains the value we need.
5008 If this register is the same as an `earlyclobber' operand
5009 of the current insn, just mark it as a place to reload from
5010 since we can't use it as the reload register itself. */
5011
5012 if (equiv != 0)
5013 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5014 if (reg_overlap_mentioned_for_reload_p (equiv,
5015 reload_earlyclobbers[i]))
32131a9c
RK
5016 {
5017 reload_override_in[r] = equiv;
5018 equiv = 0;
5019 break;
5020 }
5021
5022 /* JRV: If the equiv register we have found is explicitly
5023 clobbered in the current insn, mark but don't use, as above. */
5024
5025 if (equiv != 0 && regno_clobbered_p (regno, insn))
5026 {
5027 reload_override_in[r] = equiv;
5028 equiv = 0;
5029 }
5030
5031 /* If we found an equivalent reg, say no code need be generated
5032 to load it, and use it as our reload reg. */
5033 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
5034 {
5035 reload_reg_rtx[r] = equiv;
5036 reload_inherited[r] = 1;
5037 /* If it is a spill reg,
5038 mark the spill reg as in use for this insn. */
5039 i = spill_reg_order[regno];
5040 if (i >= 0)
be7ae2a4
RK
5041 {
5042 mark_reload_reg_in_use (regno, reload_opnum[r],
5043 reload_when_needed[r],
5044 reload_mode[r]);
5045 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno);
5046 }
32131a9c
RK
5047 }
5048 }
5049
5050 /* If we found a register to use already, or if this is an optional
5051 reload, we are done. */
5052 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5053 continue;
5054
5055#if 0 /* No longer needed for correct operation. Might or might not
5056 give better code on the average. Want to experiment? */
5057
5058 /* See if there is a later reload that has a class different from our
5059 class that intersects our class or that requires less register
5060 than our reload. If so, we must allocate a register to this
5061 reload now, since that reload might inherit a previous reload
5062 and take the only available register in our class. Don't do this
5063 for optional reloads since they will force all previous reloads
5064 to be allocated. Also don't do this for reloads that have been
5065 turned off. */
5066
5067 for (i = j + 1; i < n_reloads; i++)
5068 {
5069 int s = reload_order[i];
5070
d45cf215
RS
5071 if ((reload_in[s] == 0 && reload_out[s] == 0
5072 && ! reload_secondary_p[s])
32131a9c
RK
5073 || reload_optional[s])
5074 continue;
5075
5076 if ((reload_reg_class[s] != reload_reg_class[r]
5077 && reg_classes_intersect_p (reload_reg_class[r],
5078 reload_reg_class[s]))
5079 || reload_nregs[s] < reload_nregs[r])
5080 break;
5081 }
5082
5083 if (i == n_reloads)
5084 continue;
5085
5086 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5087#endif
5088 }
5089
5090 /* Now allocate reload registers for anything non-optional that
5091 didn't get one yet. */
5092 for (j = 0; j < n_reloads; j++)
5093 {
5094 register int r = reload_order[j];
5095
5096 /* Ignore reloads that got marked inoperative. */
5097 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5098 continue;
5099
5100 /* Skip reloads that already have a register allocated or are
5101 optional. */
5102 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5103 continue;
5104
5105 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5106 break;
5107 }
5108
5109 /* If that loop got all the way, we have won. */
5110 if (j == n_reloads)
5111 break;
5112
5113 fail:
5114 /* Loop around and try without any inheritance. */
5115 /* First undo everything done by the failed attempt
5116 to allocate with inheritance. */
5117 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5118 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5119 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5120 sizeof reload_inheritance_insn);
5121 bcopy (save_reload_override_in, reload_override_in,
5122 sizeof reload_override_in);
5123 bcopy (save_reload_spill_index, reload_spill_index,
5124 sizeof reload_spill_index);
5125 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5126 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5127 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5128 save_reload_reg_used_in_op_addr);
546b63fb
RK
5129 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5130 save_reload_reg_used_in_insn);
5131 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5132 save_reload_reg_used_in_other_addr);
5133
5134 for (i = 0; i < reload_n_operands; i++)
5135 {
5136 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5137 save_reload_reg_used_in_input[i]);
5138 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5139 save_reload_reg_used_in_output[i]);
5140 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5141 save_reload_reg_used_in_input_addr[i]);
5142 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5143 save_reload_reg_used_in_output_addr[i]);
5144 }
32131a9c
RK
5145 }
5146
5147 /* If we thought we could inherit a reload, because it seemed that
5148 nothing else wanted the same reload register earlier in the insn,
5149 verify that assumption, now that all reloads have been assigned. */
5150
5151 for (j = 0; j < n_reloads; j++)
5152 {
5153 register int r = reload_order[j];
5154
5155 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5156 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
546b63fb 5157 reload_opnum[r],
32131a9c
RK
5158 reload_when_needed[r]))
5159 reload_inherited[r] = 0;
5160
5161 /* If we found a better place to reload from,
5162 validate it in the same fashion, if it is a reload reg. */
5163 if (reload_override_in[r]
5164 && (GET_CODE (reload_override_in[r]) == REG
5165 || GET_CODE (reload_override_in[r]) == SUBREG))
5166 {
5167 int regno = true_regnum (reload_override_in[r]);
5168 if (spill_reg_order[regno] >= 0
546b63fb
RK
5169 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5170 reload_when_needed[r]))
32131a9c
RK
5171 reload_override_in[r] = 0;
5172 }
5173 }
5174
5175 /* Now that reload_override_in is known valid,
5176 actually override reload_in. */
5177 for (j = 0; j < n_reloads; j++)
5178 if (reload_override_in[j])
5179 reload_in[j] = reload_override_in[j];
5180
5181 /* If this reload won't be done because it has been cancelled or is
5182 optional and not inherited, clear reload_reg_rtx so other
5183 routines (such as subst_reloads) don't get confused. */
5184 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
5185 if (reload_reg_rtx[j] != 0
5186 && ((reload_optional[j] && ! reload_inherited[j])
5187 || (reload_in[j] == 0 && reload_out[j] == 0
5188 && ! reload_secondary_p[j])))
5189 {
5190 int regno = true_regnum (reload_reg_rtx[j]);
5191
5192 if (spill_reg_order[regno] >= 0)
5193 clear_reload_reg_in_use (regno, reload_opnum[j],
5194 reload_when_needed[j], reload_mode[j]);
5195 reload_reg_rtx[j] = 0;
5196 }
32131a9c
RK
5197
5198 /* Record which pseudos and which spill regs have output reloads. */
5199 for (j = 0; j < n_reloads; j++)
5200 {
5201 register int r = reload_order[j];
5202
5203 i = reload_spill_index[r];
5204
5205 /* I is nonneg if this reload used one of the spill regs.
5206 If reload_reg_rtx[r] is 0, this is an optional reload
5207 that we opted to ignore. */
5208 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5209 && reload_reg_rtx[r] != 0)
5210 {
5211 register int nregno = REGNO (reload_out[r]);
372e033b
RS
5212 int nr = 1;
5213
5214 if (nregno < FIRST_PSEUDO_REGISTER)
5215 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
5216
5217 while (--nr >= 0)
372e033b
RS
5218 reg_has_output_reload[nregno + nr] = 1;
5219
5220 if (i >= 0)
32131a9c 5221 {
372e033b
RS
5222 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5223 while (--nr >= 0)
32131a9c
RK
5224 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5225 }
5226
5227 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
5228 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5229 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
5230 abort ();
5231 }
5232 }
5233}
5234\f
546b63fb
RK
5235/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5236 reloads of the same item for fear that we might not have enough reload
5237 registers. However, normally they will get the same reload register
5238 and hence actually need not be loaded twice.
5239
5240 Here we check for the most common case of this phenomenon: when we have
5241 a number of reloads for the same object, each of which were allocated
5242 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5243 reload, and is not modified in the insn itself. If we find such,
5244 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5245 This will not increase the number of spill registers needed and will
5246 prevent redundant code. */
5247
5248#ifdef SMALL_REGISTER_CLASSES
5249
5250static void
5251merge_assigned_reloads (insn)
5252 rtx insn;
5253{
5254 int i, j;
5255
5256 /* Scan all the reloads looking for ones that only load values and
5257 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5258 assigned and not modified by INSN. */
5259
5260 for (i = 0; i < n_reloads; i++)
5261 {
5262 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5263 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5264 || reg_set_p (reload_reg_rtx[i], insn))
5265 continue;
5266
5267 /* Look at all other reloads. Ensure that the only use of this
5268 reload_reg_rtx is in a reload that just loads the same value
5269 as we do. Note that any secondary reloads must be of the identical
5270 class since the values, modes, and result registers are the
5271 same, so we need not do anything with any secondary reloads. */
5272
5273 for (j = 0; j < n_reloads; j++)
5274 {
5275 if (i == j || reload_reg_rtx[j] == 0
5276 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5277 reload_reg_rtx[i]))
5278 continue;
5279
5280 /* If the reload regs aren't exactly the same (e.g, different modes)
5281 or if the values are different, we can't merge anything with this
5282 reload register. */
5283
5284 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5285 || reload_out[j] != 0 || reload_in[j] == 0
5286 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5287 break;
5288 }
5289
5290 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5291 we, in fact, found any matching reloads. */
5292
5293 if (j == n_reloads)
5294 {
5295 for (j = 0; j < n_reloads; j++)
5296 if (i != j && reload_reg_rtx[j] != 0
5297 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5298 {
5299 reload_when_needed[i] = RELOAD_OTHER;
5300 reload_in[j] = 0;
5301 transfer_replacements (i, j);
5302 }
5303
5304 /* If this is now RELOAD_OTHER, look for any reloads that load
5305 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5306 if they were for inputs, RELOAD_OTHER for outputs. Note that
5307 this test is equivalent to looking for reloads for this operand
5308 number. */
5309
5310 if (reload_when_needed[i] == RELOAD_OTHER)
5311 for (j = 0; j < n_reloads; j++)
5312 if (reload_in[j] != 0
5313 && reload_when_needed[i] != RELOAD_OTHER
5314 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5315 reload_in[i]))
5316 reload_when_needed[j]
5317 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5318 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5319 }
5320 }
5321}
5322#endif /* SMALL_RELOAD_CLASSES */
5323\f
32131a9c
RK
5324/* Output insns to reload values in and out of the chosen reload regs. */
5325
5326static void
5327emit_reload_insns (insn)
5328 rtx insn;
5329{
5330 register int j;
546b63fb
RK
5331 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5332 rtx other_input_address_reload_insns = 0;
5333 rtx other_input_reload_insns = 0;
5334 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5335 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5336 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5337 rtx operand_reload_insns = 0;
32131a9c 5338 rtx following_insn = NEXT_INSN (insn);
a8efe40d 5339 rtx before_insn = insn;
32131a9c
RK
5340 int special;
5341 /* Values to be put in spill_reg_store are put here first. */
5342 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5343
546b63fb
RK
5344 for (j = 0; j < reload_n_operands; j++)
5345 input_reload_insns[j] = input_address_reload_insns[j]
5346 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5347
d45cf215 5348 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
5349 must go in front of the first USE insn, not in front of INSN. */
5350
5351 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
5352 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
5353 while (GET_CODE (PREV_INSN (before_insn)) == INSN
5354 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
546b63fb
RK
5355 before_insn = PREV_INSN (before_insn);
5356
a34a369b 5357 /* If INSN is followed by any CLOBBER insns made by find_reloads,
546b63fb
RK
5358 put our reloads after them since they may otherwise be
5359 misinterpreted. */
5360
a34a369b
DE
5361 while (GET_CODE (following_insn) == INSN
5362 && GET_MODE (following_insn) == DImode
5363 && GET_CODE (PATTERN (following_insn)) == CLOBBER
5364 && NEXT_INSN (following_insn) != 0)
546b63fb 5365 following_insn = NEXT_INSN (following_insn);
a8efe40d 5366
32131a9c
RK
5367 /* Now output the instructions to copy the data into and out of the
5368 reload registers. Do these in the order that the reloads were reported,
5369 since reloads of base and index registers precede reloads of operands
5370 and the operands may need the base and index registers reloaded. */
5371
5372 for (j = 0; j < n_reloads; j++)
5373 {
5374 register rtx old;
5375 rtx oldequiv_reg = 0;
32131a9c
RK
5376 rtx store_insn = 0;
5377
5378 old = reload_in[j];
5379 if (old != 0 && ! reload_inherited[j]
5380 && ! rtx_equal_p (reload_reg_rtx[j], old)
5381 && reload_reg_rtx[j] != 0)
5382 {
5383 register rtx reloadreg = reload_reg_rtx[j];
5384 rtx oldequiv = 0;
5385 enum machine_mode mode;
546b63fb 5386 rtx *where;
32131a9c
RK
5387
5388 /* Determine the mode to reload in.
5389 This is very tricky because we have three to choose from.
5390 There is the mode the insn operand wants (reload_inmode[J]).
5391 There is the mode of the reload register RELOADREG.
5392 There is the intrinsic mode of the operand, which we could find
5393 by stripping some SUBREGs.
5394 It turns out that RELOADREG's mode is irrelevant:
5395 we can change that arbitrarily.
5396
5397 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5398 then the reload reg may not support QImode moves, so use SImode.
5399 If foo is in memory due to spilling a pseudo reg, this is safe,
5400 because the QImode value is in the least significant part of a
5401 slot big enough for a SImode. If foo is some other sort of
5402 memory reference, then it is impossible to reload this case,
5403 so previous passes had better make sure this never happens.
5404
5405 Then consider a one-word union which has SImode and one of its
5406 members is a float, being fetched as (SUBREG:SF union:SI).
5407 We must fetch that as SFmode because we could be loading into
5408 a float-only register. In this case OLD's mode is correct.
5409
5410 Consider an immediate integer: it has VOIDmode. Here we need
5411 to get a mode from something else.
5412
5413 In some cases, there is a fourth mode, the operand's
5414 containing mode. If the insn specifies a containing mode for
5415 this operand, it overrides all others.
5416
5417 I am not sure whether the algorithm here is always right,
5418 but it does the right things in those cases. */
5419
5420 mode = GET_MODE (old);
5421 if (mode == VOIDmode)
5422 mode = reload_inmode[j];
32131a9c
RK
5423
5424#ifdef SECONDARY_INPUT_RELOAD_CLASS
5425 /* If we need a secondary register for this operation, see if
5426 the value is already in a register in that class. Don't
5427 do this if the secondary register will be used as a scratch
5428 register. */
5429
5430 if (reload_secondary_reload[j] >= 0
58b1581b
RS
5431 && reload_secondary_icode[j] == CODE_FOR_nothing
5432 && optimize)
32131a9c
RK
5433 oldequiv
5434 = find_equiv_reg (old, insn,
5435 reload_reg_class[reload_secondary_reload[j]],
fb3821f7 5436 -1, NULL_PTR, 0, mode);
32131a9c
RK
5437#endif
5438
5439 /* If reloading from memory, see if there is a register
5440 that already holds the same value. If so, reload from there.
5441 We can pass 0 as the reload_reg_p argument because
5442 any other reload has either already been emitted,
5443 in which case find_equiv_reg will see the reload-insn,
5444 or has yet to be emitted, in which case it doesn't matter
5445 because we will use this equiv reg right away. */
5446
58b1581b 5447 if (oldequiv == 0 && optimize
32131a9c
RK
5448 && (GET_CODE (old) == MEM
5449 || (GET_CODE (old) == REG
5450 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5451 && reg_renumber[REGNO (old)] < 0)))
546b63fb 5452 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 5453 -1, NULL_PTR, 0, mode);
32131a9c
RK
5454
5455 if (oldequiv)
5456 {
5457 int regno = true_regnum (oldequiv);
5458
5459 /* If OLDEQUIV is a spill register, don't use it for this
5460 if any other reload needs it at an earlier stage of this insn
a8fdc208 5461 or at this stage. */
32131a9c 5462 if (spill_reg_order[regno] >= 0
546b63fb
RK
5463 && (! reload_reg_free_p (regno, reload_opnum[j],
5464 reload_when_needed[j])
5465 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
5466 reload_when_needed[j])))
5467 oldequiv = 0;
5468
5469 /* If OLDEQUIV is not a spill register,
5470 don't use it if any other reload wants it. */
5471 if (spill_reg_order[regno] < 0)
5472 {
5473 int k;
5474 for (k = 0; k < n_reloads; k++)
5475 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
5476 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5477 oldequiv))
32131a9c
RK
5478 {
5479 oldequiv = 0;
5480 break;
5481 }
5482 }
546b63fb
RK
5483
5484 /* If it is no cheaper to copy from OLDEQUIV into the
5485 reload register than it would be to move from memory,
5486 don't use it. Likewise, if we need a secondary register
5487 or memory. */
5488
5489 if (oldequiv != 0
5490 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5491 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5492 reload_reg_class[j])
5493 >= MEMORY_MOVE_COST (mode)))
5494#ifdef SECONDARY_INPUT_RELOAD_CLASS
5495 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5496 mode, oldequiv)
5497 != NO_REGS)
5498#endif
5499#ifdef SECONDARY_MEMORY_NEEDED
5500 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5501 REGNO_REG_CLASS (regno),
5502 mode)
5503#endif
5504 ))
5505 oldequiv = 0;
32131a9c
RK
5506 }
5507
5508 if (oldequiv == 0)
5509 oldequiv = old;
5510 else if (GET_CODE (oldequiv) == REG)
5511 oldequiv_reg = oldequiv;
5512 else if (GET_CODE (oldequiv) == SUBREG)
5513 oldequiv_reg = SUBREG_REG (oldequiv);
5514
5515 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5516 then load RELOADREG from OLDEQUIV. */
5517
5518 if (GET_MODE (reloadreg) != mode)
b6983ae3 5519 reloadreg = gen_lowpart_common (mode, reloadreg);
32131a9c
RK
5520 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5521 oldequiv = SUBREG_REG (oldequiv);
5522 if (GET_MODE (oldequiv) != VOIDmode
5523 && mode != GET_MODE (oldequiv))
5524 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5525
546b63fb 5526 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
5527 switch (reload_when_needed[j])
5528 {
32131a9c 5529 case RELOAD_OTHER:
546b63fb
RK
5530 where = &other_input_reload_insns;
5531 break;
5532 case RELOAD_FOR_INPUT:
5533 where = &input_reload_insns[reload_opnum[j]];
32131a9c 5534 break;
546b63fb
RK
5535 case RELOAD_FOR_INPUT_ADDRESS:
5536 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 5537 break;
546b63fb
RK
5538 case RELOAD_FOR_OUTPUT_ADDRESS:
5539 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c
RK
5540 break;
5541 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5542 where = &operand_reload_insns;
5543 break;
5544 case RELOAD_FOR_OTHER_ADDRESS:
5545 where = &other_input_address_reload_insns;
5546 break;
5547 default:
5548 abort ();
32131a9c
RK
5549 }
5550
546b63fb 5551 push_to_sequence (*where);
32131a9c
RK
5552 special = 0;
5553
5554 /* Auto-increment addresses must be reloaded in a special way. */
5555 if (GET_CODE (oldequiv) == POST_INC
5556 || GET_CODE (oldequiv) == POST_DEC
5557 || GET_CODE (oldequiv) == PRE_INC
5558 || GET_CODE (oldequiv) == PRE_DEC)
5559 {
5560 /* We are not going to bother supporting the case where a
5561 incremented register can't be copied directly from
5562 OLDEQUIV since this seems highly unlikely. */
5563 if (reload_secondary_reload[j] >= 0)
5564 abort ();
5565 /* Prevent normal processing of this reload. */
5566 special = 1;
5567 /* Output a special code sequence for this case. */
546b63fb 5568 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
5569 }
5570
5571 /* If we are reloading a pseudo-register that was set by the previous
5572 insn, see if we can get rid of that pseudo-register entirely
5573 by redirecting the previous insn into our reload register. */
5574
5575 else if (optimize && GET_CODE (old) == REG
5576 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5577 && dead_or_set_p (insn, old)
5578 /* This is unsafe if some other reload
5579 uses the same reg first. */
546b63fb
RK
5580 && reload_reg_free_before_p (REGNO (reloadreg),
5581 reload_opnum[j],
5582 reload_when_needed[j]))
32131a9c
RK
5583 {
5584 rtx temp = PREV_INSN (insn);
5585 while (temp && GET_CODE (temp) == NOTE)
5586 temp = PREV_INSN (temp);
5587 if (temp
5588 && GET_CODE (temp) == INSN
5589 && GET_CODE (PATTERN (temp)) == SET
5590 && SET_DEST (PATTERN (temp)) == old
5591 /* Make sure we can access insn_operand_constraint. */
5592 && asm_noperands (PATTERN (temp)) < 0
5593 /* This is unsafe if prev insn rejects our reload reg. */
5594 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5595 reloadreg)
5596 /* This is unsafe if operand occurs more than once in current
5597 insn. Perhaps some occurrences aren't reloaded. */
5598 && count_occurrences (PATTERN (insn), old) == 1
5599 /* Don't risk splitting a matching pair of operands. */
5600 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5601 {
5602 /* Store into the reload register instead of the pseudo. */
5603 SET_DEST (PATTERN (temp)) = reloadreg;
5604 /* If these are the only uses of the pseudo reg,
5605 pretend for GDB it lives in the reload reg we used. */
5606 if (reg_n_deaths[REGNO (old)] == 1
5607 && reg_n_sets[REGNO (old)] == 1)
5608 {
5609 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5610 alter_reg (REGNO (old), -1);
5611 }
5612 special = 1;
5613 }
5614 }
5615
546b63fb
RK
5616 /* We can't do that, so output an insn to load RELOADREG. */
5617
32131a9c
RK
5618 if (! special)
5619 {
5620#ifdef SECONDARY_INPUT_RELOAD_CLASS
5621 rtx second_reload_reg = 0;
5622 enum insn_code icode;
5623
5624 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
5625 and icode, if any. If OLDEQUIV and OLD are different or
5626 if this is an in-out reload, recompute whether or not we
5627 still need a secondary register and what the icode should
5628 be. If we still need a secondary register and the class or
5629 icode is different, go back to reloading from OLD if using
5630 OLDEQUIV means that we got the wrong type of register. We
5631 cannot have different class or icode due to an in-out reload
5632 because we don't make such reloads when both the input and
5633 output need secondary reload registers. */
32131a9c
RK
5634
5635 if (reload_secondary_reload[j] >= 0)
5636 {
5637 int secondary_reload = reload_secondary_reload[j];
1554c2c6
RK
5638 rtx real_oldequiv = oldequiv;
5639 rtx real_old = old;
5640
5641 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5642 and similarly for OLD.
5643 See comments in find_secondary_reload in reload.c. */
5644 if (GET_CODE (oldequiv) == REG
5645 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5646 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5647 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5648
5649 if (GET_CODE (old) == REG
5650 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5651 && reg_equiv_mem[REGNO (old)] != 0)
5652 real_old = reg_equiv_mem[REGNO (old)];
5653
32131a9c
RK
5654 second_reload_reg = reload_reg_rtx[secondary_reload];
5655 icode = reload_secondary_icode[j];
5656
d445b551
RK
5657 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5658 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
5659 {
5660 enum reg_class new_class
5661 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 5662 mode, real_oldequiv);
32131a9c
RK
5663
5664 if (new_class == NO_REGS)
5665 second_reload_reg = 0;
5666 else
5667 {
5668 enum insn_code new_icode;
5669 enum machine_mode new_mode;
5670
5671 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5672 REGNO (second_reload_reg)))
1554c2c6 5673 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5674 else
5675 {
5676 new_icode = reload_in_optab[(int) mode];
5677 if (new_icode != CODE_FOR_nothing
5678 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 5679 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 5680 (reloadreg, mode)))
a8fdc208
RS
5681 || (insn_operand_predicate[(int) new_icode][1]
5682 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 5683 (real_oldequiv, mode)))))
32131a9c
RK
5684 new_icode = CODE_FOR_nothing;
5685
5686 if (new_icode == CODE_FOR_nothing)
5687 new_mode = mode;
5688 else
196ddf8a 5689 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
5690
5691 if (GET_MODE (second_reload_reg) != new_mode)
5692 {
5693 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5694 new_mode))
1554c2c6 5695 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5696 else
5697 second_reload_reg
3aaa90c7
MM
5698 = gen_rtx (REG, new_mode,
5699 REGNO (second_reload_reg));
32131a9c
RK
5700 }
5701 }
5702 }
5703 }
5704
5705 /* If we still need a secondary reload register, check
5706 to see if it is being used as a scratch or intermediate
1554c2c6
RK
5707 register and generate code appropriately. If we need
5708 a scratch register, use REAL_OLDEQUIV since the form of
5709 the insn may depend on the actual address if it is
5710 a MEM. */
32131a9c
RK
5711
5712 if (second_reload_reg)
5713 {
5714 if (icode != CODE_FOR_nothing)
5715 {
546b63fb
RK
5716 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5717 second_reload_reg));
32131a9c
RK
5718 special = 1;
5719 }
5720 else
5721 {
5722 /* See if we need a scratch register to load the
5723 intermediate register (a tertiary reload). */
5724 enum insn_code tertiary_icode
5725 = reload_secondary_icode[secondary_reload];
5726
5727 if (tertiary_icode != CODE_FOR_nothing)
5728 {
5729 rtx third_reload_reg
5730 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5731
546b63fb
RK
5732 emit_insn ((GEN_FCN (tertiary_icode)
5733 (second_reload_reg, real_oldequiv,
5734 third_reload_reg)));
32131a9c
RK
5735 }
5736 else
546b63fb
RK
5737 gen_input_reload (second_reload_reg, oldequiv,
5738 reload_opnum[j],
5739 reload_when_needed[j]);
5740
5741 oldequiv = second_reload_reg;
32131a9c
RK
5742 }
5743 }
5744 }
5745#endif
5746
5747 if (! special)
546b63fb
RK
5748 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5749 reload_when_needed[j]);
32131a9c
RK
5750
5751#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5752 /* We may have to make a REG_DEAD note for the secondary reload
5753 register in the insns we just made. Find the last insn that
5754 mentioned the register. */
5755 if (! special && second_reload_reg
5756 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5757 {
5758 rtx prev;
5759
546b63fb 5760 for (prev = get_last_insn (); prev;
32131a9c
RK
5761 prev = PREV_INSN (prev))
5762 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5763 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5764 PATTERN (prev)))
32131a9c
RK
5765 {
5766 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5767 second_reload_reg,
5768 REG_NOTES (prev));
5769 break;
5770 }
5771 }
5772#endif
5773 }
5774
546b63fb
RK
5775 /* End this sequence. */
5776 *where = get_insns ();
5777 end_sequence ();
32131a9c
RK
5778 }
5779
5780 /* Add a note saying the input reload reg
5781 dies in this insn, if anyone cares. */
5782#ifdef PRESERVE_DEATH_INFO_REGNO_P
5783 if (old != 0
5784 && reload_reg_rtx[j] != old
5785 && reload_reg_rtx[j] != 0
5786 && reload_out[j] == 0
5787 && ! reload_inherited[j]
5788 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5789 {
5790 register rtx reloadreg = reload_reg_rtx[j];
5791
a8fdc208 5792#if 0
32131a9c
RK
5793 /* We can't abort here because we need to support this for sched.c.
5794 It's not terrible to miss a REG_DEAD note, but we should try
5795 to figure out how to do this correctly. */
5796 /* The code below is incorrect for address-only reloads. */
5797 if (reload_when_needed[j] != RELOAD_OTHER
5798 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5799 abort ();
5800#endif
5801
5802 /* Add a death note to this insn, for an input reload. */
5803
5804 if ((reload_when_needed[j] == RELOAD_OTHER
5805 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5806 && ! dead_or_set_p (insn, reloadreg))
5807 REG_NOTES (insn)
5808 = gen_rtx (EXPR_LIST, REG_DEAD,
5809 reloadreg, REG_NOTES (insn));
5810 }
5811
5812 /* When we inherit a reload, the last marked death of the reload reg
5813 may no longer really be a death. */
5814 if (reload_reg_rtx[j] != 0
5815 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5816 && reload_inherited[j])
5817 {
5818 /* Handle inheriting an output reload.
5819 Remove the death note from the output reload insn. */
5820 if (reload_spill_index[j] >= 0
5821 && GET_CODE (reload_in[j]) == REG
5822 && spill_reg_store[reload_spill_index[j]] != 0
5823 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5824 REG_DEAD, REGNO (reload_reg_rtx[j])))
5825 remove_death (REGNO (reload_reg_rtx[j]),
5826 spill_reg_store[reload_spill_index[j]]);
5827 /* Likewise for input reloads that were inherited. */
5828 else if (reload_spill_index[j] >= 0
5829 && GET_CODE (reload_in[j]) == REG
5830 && spill_reg_store[reload_spill_index[j]] == 0
5831 && reload_inheritance_insn[j] != 0
a8fdc208 5832 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5833 REGNO (reload_reg_rtx[j])))
5834 remove_death (REGNO (reload_reg_rtx[j]),
5835 reload_inheritance_insn[j]);
5836 else
5837 {
5838 rtx prev;
5839
5840 /* We got this register from find_equiv_reg.
5841 Search back for its last death note and get rid of it.
5842 But don't search back too far.
5843 Don't go past a place where this reg is set,
5844 since a death note before that remains valid. */
5845 for (prev = PREV_INSN (insn);
5846 prev && GET_CODE (prev) != CODE_LABEL;
5847 prev = PREV_INSN (prev))
5848 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5849 && dead_or_set_p (prev, reload_reg_rtx[j]))
5850 {
5851 if (find_regno_note (prev, REG_DEAD,
5852 REGNO (reload_reg_rtx[j])))
5853 remove_death (REGNO (reload_reg_rtx[j]), prev);
5854 break;
5855 }
5856 }
5857 }
5858
5859 /* We might have used find_equiv_reg above to choose an alternate
5860 place from which to reload. If so, and it died, we need to remove
5861 that death and move it to one of the insns we just made. */
5862
5863 if (oldequiv_reg != 0
5864 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5865 {
5866 rtx prev, prev1;
5867
5868 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5869 prev = PREV_INSN (prev))
5870 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5871 && dead_or_set_p (prev, oldequiv_reg))
5872 {
5873 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5874 {
5875 for (prev1 = this_reload_insn;
5876 prev1; prev1 = PREV_INSN (prev1))
5877 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5878 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5879 PATTERN (prev1)))
32131a9c
RK
5880 {
5881 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5882 oldequiv_reg,
5883 REG_NOTES (prev1));
5884 break;
5885 }
5886 remove_death (REGNO (oldequiv_reg), prev);
5887 }
5888 break;
5889 }
5890 }
5891#endif
5892
5893 /* If we are reloading a register that was recently stored in with an
5894 output-reload, see if we can prove there was
5895 actually no need to store the old value in it. */
5896
5897 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 5898 && reload_in[j] != 0
32131a9c
RK
5899 && GET_CODE (reload_in[j]) == REG
5900#if 0
5901 /* There doesn't seem to be any reason to restrict this to pseudos
5902 and doing so loses in the case where we are copying from a
5903 register of the wrong class. */
5904 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5905#endif
5906 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb
RK
5907 /* This is unsafe if some other reload uses the same reg first. */
5908 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
5909 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
5910 && dead_or_set_p (insn, reload_in[j])
5911 /* This is unsafe if operand occurs more than once in current
5912 insn. Perhaps some occurrences weren't reloaded. */
5913 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5914 delete_output_reload (insn, j,
5915 spill_reg_store[reload_spill_index[j]]);
5916
5917 /* Input-reloading is done. Now do output-reloading,
5918 storing the value from the reload-register after the main insn
5919 if reload_out[j] is nonzero.
5920
5921 ??? At some point we need to support handling output reloads of
5922 JUMP_INSNs or insns that set cc0. */
5923 old = reload_out[j];
5924 if (old != 0
5925 && reload_reg_rtx[j] != old
5926 && reload_reg_rtx[j] != 0)
5927 {
5928 register rtx reloadreg = reload_reg_rtx[j];
5929 register rtx second_reloadreg = 0;
32131a9c
RK
5930 rtx note, p;
5931 enum machine_mode mode;
5932 int special = 0;
5933
5934 /* An output operand that dies right away does need a reload,
5935 but need not be copied from it. Show the new location in the
5936 REG_UNUSED note. */
5937 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5938 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5939 {
5940 XEXP (note, 0) = reload_reg_rtx[j];
5941 continue;
5942 }
5943 else if (GET_CODE (old) == SCRATCH)
5944 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5945 but we don't want to make an output reload. */
5946 continue;
5947
5948#if 0
5949 /* Strip off of OLD any size-increasing SUBREGs such as
5950 (SUBREG:SI foo:QI 0). */
5951
5952 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5953 && (GET_MODE_SIZE (GET_MODE (old))
5954 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5955 old = SUBREG_REG (old);
5956#endif
5957
5958 /* If is a JUMP_INSN, we can't support output reloads yet. */
5959 if (GET_CODE (insn) == JUMP_INSN)
5960 abort ();
5961
546b63fb
RK
5962 push_to_sequence (output_reload_insns[reload_opnum[j]]);
5963
32131a9c
RK
5964 /* Determine the mode to reload in.
5965 See comments above (for input reloading). */
5966
5967 mode = GET_MODE (old);
5968 if (mode == VOIDmode)
79a365a7
RS
5969 {
5970 /* VOIDmode should never happen for an output. */
5971 if (asm_noperands (PATTERN (insn)) < 0)
5972 /* It's the compiler's fault. */
5973 abort ();
5974 error_for_asm (insn, "output operand is constant in `asm'");
5975 /* Prevent crash--use something we know is valid. */
5976 mode = word_mode;
5977 old = gen_rtx (REG, mode, REGNO (reloadreg));
5978 }
32131a9c 5979
32131a9c 5980 if (GET_MODE (reloadreg) != mode)
b6983ae3 5981 reloadreg = gen_lowpart_common (mode, reloadreg);
32131a9c
RK
5982
5983#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5984
5985 /* If we need two reload regs, set RELOADREG to the intermediate
5986 one, since it will be stored into OUT. We might need a secondary
5987 register only for an input reload, so check again here. */
5988
1554c2c6 5989 if (reload_secondary_reload[j] >= 0)
32131a9c 5990 {
1554c2c6 5991 rtx real_old = old;
32131a9c 5992
1554c2c6
RK
5993 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5994 && reg_equiv_mem[REGNO (old)] != 0)
5995 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 5996
1554c2c6
RK
5997 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5998 mode, real_old)
5999 != NO_REGS))
6000 {
6001 second_reloadreg = reloadreg;
6002 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
32131a9c 6003
1554c2c6
RK
6004 /* See if RELOADREG is to be used as a scratch register
6005 or as an intermediate register. */
6006 if (reload_secondary_icode[j] != CODE_FOR_nothing)
32131a9c 6007 {
546b63fb
RK
6008 emit_insn ((GEN_FCN (reload_secondary_icode[j])
6009 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6010 special = 1;
32131a9c
RK
6011 }
6012 else
1554c2c6
RK
6013 {
6014 /* See if we need both a scratch and intermediate reload
6015 register. */
6016 int secondary_reload = reload_secondary_reload[j];
6017 enum insn_code tertiary_icode
6018 = reload_secondary_icode[secondary_reload];
6019 rtx pat;
32131a9c 6020
1554c2c6
RK
6021 if (GET_MODE (reloadreg) != mode)
6022 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6023
6024 if (tertiary_icode != CODE_FOR_nothing)
6025 {
6026 rtx third_reloadreg
6027 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
6028 pat = (GEN_FCN (tertiary_icode)
6029 (reloadreg, second_reloadreg, third_reloadreg));
6030 }
9ad5f9f6
JW
6031#ifdef SECONDARY_MEMORY_NEEDED
6032 /* If we need a memory location to do the move, do it that way. */
6033 else if (GET_CODE (reloadreg) == REG
6034 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6035 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6036 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6037 GET_MODE (second_reloadreg)))
6038 {
6039 /* Get the memory to use and rewrite both registers
6040 to its mode. */
546b63fb
RK
6041 rtx loc
6042 = get_secondary_mem (reloadreg,
6043 GET_MODE (second_reloadreg),
6044 reload_opnum[j],
6045 reload_when_needed[j]);
9ad5f9f6
JW
6046 rtx tmp_reloadreg;
6047
6048 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6049 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6050 REGNO (second_reloadreg));
6051
6052 if (GET_MODE (loc) != GET_MODE (reloadreg))
6053 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6054 REGNO (reloadreg));
6055 else
6056 tmp_reloadreg = reloadreg;
6057
546b63fb 6058 emit_move_insn (loc, second_reloadreg);
9ad5f9f6
JW
6059 pat = gen_move_insn (tmp_reloadreg, loc);
6060 }
6061#endif
1554c2c6
RK
6062 else
6063 pat = gen_move_insn (reloadreg, second_reloadreg);
6064
546b63fb 6065 emit_insn (pat);
1554c2c6 6066 }
32131a9c
RK
6067 }
6068 }
6069#endif
6070
6071 /* Output the last reload insn. */
6072 if (! special)
0dadecf6
RK
6073 {
6074#ifdef SECONDARY_MEMORY_NEEDED
6075 /* If we need a memory location to do the move, do it that way. */
6076 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6077 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6078 REGNO_REG_CLASS (REGNO (reloadreg)),
6079 GET_MODE (reloadreg)))
6080 {
6081 /* Get the memory to use and rewrite both registers to
6082 its mode. */
546b63fb
RK
6083 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6084 reload_opnum[j],
6085 reload_when_needed[j]);
0dadecf6
RK
6086
6087 if (GET_MODE (loc) != GET_MODE (reloadreg))
6088 reloadreg = gen_rtx (REG, GET_MODE (loc),
6089 REGNO (reloadreg));
6090
6091 if (GET_MODE (loc) != GET_MODE (old))
6092 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6093
546b63fb
RK
6094 emit_insn (gen_move_insn (loc, reloadreg));
6095 emit_insn (gen_move_insn (old, loc));
0dadecf6
RK
6096 }
6097 else
6098#endif
546b63fb 6099 emit_insn (gen_move_insn (old, reloadreg));
0dadecf6 6100 }
32131a9c
RK
6101
6102#ifdef PRESERVE_DEATH_INFO_REGNO_P
6103 /* If final will look at death notes for this reg,
6104 put one on the last output-reload insn to use it. Similarly
6105 for any secondary register. */
6106 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 6107 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6108 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6109 && reg_overlap_mentioned_for_reload_p (reloadreg,
6110 PATTERN (p)))
32131a9c
RK
6111 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6112 reloadreg, REG_NOTES (p));
6113
6114#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6115 if (! special
6116 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 6117 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6118 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6119 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6120 PATTERN (p)))
32131a9c
RK
6121 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6122 second_reloadreg, REG_NOTES (p));
6123#endif
6124#endif
6125 /* Look at all insns we emitted, just to be safe. */
546b63fb 6126 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
6127 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6128 {
6129 /* If this output reload doesn't come from a spill reg,
6130 clear any memory of reloaded copies of the pseudo reg.
6131 If this output reload comes from a spill reg,
6132 reg_has_output_reload will make this do nothing. */
6133 note_stores (PATTERN (p), forget_old_reloads_1);
6134
6135 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6136 store_insn = p;
6137 }
6138
546b63fb
RK
6139 output_reload_insns[reload_opnum[j]] = get_insns ();
6140 end_sequence ();
6141
32131a9c
RK
6142 }
6143
6144 if (reload_spill_index[j] >= 0)
6145 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6146 }
6147
546b63fb
RK
6148 /* Now write all the insns we made for reloads in the order expected by
6149 the allocation functions. Prior to the insn being reloaded, we write
6150 the following reloads:
6151
6152 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6153
6154 RELOAD_OTHER reloads.
6155
6156 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6157 the RELOAD_FOR_INPUT reload for the operand.
6158
6159 RELOAD_FOR_OPERAND_ADDRESS reloads.
6160
6161 After the insn being reloaded, we write the following:
6162
6163 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6164 the RELOAD_FOR_OUTPUT reload for that operand. */
6165
6166 emit_insns_before (other_input_address_reload_insns, before_insn);
6167 emit_insns_before (other_input_reload_insns, before_insn);
6168
6169 for (j = 0; j < reload_n_operands; j++)
6170 {
6171 emit_insns_before (input_address_reload_insns[j], before_insn);
6172 emit_insns_before (input_reload_insns[j], before_insn);
6173 }
6174
6175 emit_insns_before (operand_reload_insns, before_insn);
6176
6177 for (j = 0; j < reload_n_operands; j++)
6178 {
6179 emit_insns_before (output_address_reload_insns[j], following_insn);
6180 emit_insns_before (output_reload_insns[j], following_insn);
6181 }
6182
32131a9c
RK
6183 /* Move death notes from INSN
6184 to output-operand-address and output reload insns. */
6185#ifdef PRESERVE_DEATH_INFO_REGNO_P
6186 {
6187 rtx insn1;
6188 /* Loop over those insns, last ones first. */
6189 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6190 insn1 = PREV_INSN (insn1))
6191 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6192 {
6193 rtx source = SET_SRC (PATTERN (insn1));
6194 rtx dest = SET_DEST (PATTERN (insn1));
6195
6196 /* The note we will examine next. */
6197 rtx reg_notes = REG_NOTES (insn);
6198 /* The place that pointed to this note. */
6199 rtx *prev_reg_note = &REG_NOTES (insn);
6200
6201 /* If the note is for something used in the source of this
6202 reload insn, or in the output address, move the note. */
6203 while (reg_notes)
6204 {
6205 rtx next_reg_notes = XEXP (reg_notes, 1);
6206 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6207 && GET_CODE (XEXP (reg_notes, 0)) == REG
6208 && ((GET_CODE (dest) != REG
bfa30b22
RK
6209 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6210 dest))
6211 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6212 source)))
32131a9c
RK
6213 {
6214 *prev_reg_note = next_reg_notes;
6215 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6216 REG_NOTES (insn1) = reg_notes;
6217 }
6218 else
6219 prev_reg_note = &XEXP (reg_notes, 1);
6220
6221 reg_notes = next_reg_notes;
6222 }
6223 }
6224 }
6225#endif
6226
6227 /* For all the spill regs newly reloaded in this instruction,
6228 record what they were reloaded from, so subsequent instructions
d445b551
RK
6229 can inherit the reloads.
6230
6231 Update spill_reg_store for the reloads of this insn.
e9e79d69 6232 Copy the elements that were updated in the loop above. */
32131a9c
RK
6233
6234 for (j = 0; j < n_reloads; j++)
6235 {
6236 register int r = reload_order[j];
6237 register int i = reload_spill_index[r];
6238
6239 /* I is nonneg if this reload used one of the spill regs.
6240 If reload_reg_rtx[r] is 0, this is an optional reload
546b63fb
RK
6241 that we opted to ignore.
6242
6243 Also ignore reloads that don't reach the end of the insn,
6244 since we will eventually see the one that does. */
d445b551 6245
546b63fb
RK
6246 if (i >= 0 && reload_reg_rtx[r] != 0
6247 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6248 reload_when_needed[r]))
32131a9c
RK
6249 {
6250 /* First, clear out memory of what used to be in this spill reg.
6251 If consecutive registers are used, clear them all. */
6252 int nr
6253 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6254 int k;
6255
6256 for (k = 0; k < nr; k++)
6257 {
6258 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6259 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6260 }
6261
6262 /* Maybe the spill reg contains a copy of reload_out. */
6263 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6264 {
6265 register int nregno = REGNO (reload_out[r]);
d08ea79f
RK
6266 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6267 : HARD_REGNO_NREGS (nregno,
6268 GET_MODE (reload_reg_rtx[r])));
d445b551
RK
6269
6270 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 6271 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6272
d08ea79f
RK
6273 /* If NREGNO is a hard register, it may occupy more than
6274 one register. If it does, say what is in the
6275 rest of the registers assuming that both registers
6276 agree on how many words the object takes. If not,
6277 invalidate the subsequent registers. */
6278
6279 if (nregno < FIRST_PSEUDO_REGISTER)
6280 for (k = 1; k < nnr; k++)
6281 reg_last_reload_reg[nregno + k]
6282 = (nr == nnr ? gen_rtx (REG, word_mode,
6283 REGNO (reload_reg_rtx[r]) + k)
6284 : 0);
6285
6286 /* Now do the inverse operation. */
32131a9c
RK
6287 for (k = 0; k < nr; k++)
6288 {
6289 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6290 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6291 : nregno + k);
32131a9c
RK
6292 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6293 }
6294 }
d445b551 6295
2c9ce2ef
RK
6296 /* Maybe the spill reg contains a copy of reload_in. Only do
6297 something if there will not be an output reload for
6298 the register being reloaded. */
32131a9c
RK
6299 else if (reload_out[r] == 0
6300 && reload_in[r] != 0
2c9ce2ef
RK
6301 && ((GET_CODE (reload_in[r]) == REG
6302 && ! reg_has_output_reload[REGNO (reload_in[r])]
6303 || (GET_CODE (reload_in_reg[r]) == REG
6304 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
32131a9c
RK
6305 {
6306 register int nregno;
d08ea79f
RK
6307 int nnr;
6308
32131a9c
RK
6309 if (GET_CODE (reload_in[r]) == REG)
6310 nregno = REGNO (reload_in[r]);
6311 else
6312 nregno = REGNO (reload_in_reg[r]);
6313
d08ea79f
RK
6314 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6315 : HARD_REGNO_NREGS (nregno,
6316 GET_MODE (reload_reg_rtx[r])));
6317
546b63fb 6318 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 6319
d08ea79f
RK
6320 if (nregno < FIRST_PSEUDO_REGISTER)
6321 for (k = 1; k < nnr; k++)
6322 reg_last_reload_reg[nregno + k]
6323 = (nr == nnr ? gen_rtx (REG, word_mode,
6324 REGNO (reload_reg_rtx[r]) + k)
6325 : 0);
6326
546b63fb
RK
6327 /* Unless we inherited this reload, show we haven't
6328 recently done a store. */
6329 if (! reload_inherited[r])
6330 spill_reg_store[i] = 0;
d445b551 6331
546b63fb
RK
6332 for (k = 0; k < nr; k++)
6333 {
6334 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
d08ea79f
RK
6335 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6336 : nregno + k);
546b63fb
RK
6337 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6338 = insn;
32131a9c
RK
6339 }
6340 }
6341 }
6342
6343 /* The following if-statement was #if 0'd in 1.34 (or before...).
6344 It's reenabled in 1.35 because supposedly nothing else
6345 deals with this problem. */
6346
6347 /* If a register gets output-reloaded from a non-spill register,
6348 that invalidates any previous reloaded copy of it.
6349 But forget_old_reloads_1 won't get to see it, because
6350 it thinks only about the original insn. So invalidate it here. */
6351 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6352 {
6353 register int nregno = REGNO (reload_out[r]);
6354 reg_last_reload_reg[nregno] = 0;
6355 }
6356 }
6357}
6358\f
546b63fb
RK
6359/* Emit code to perform an input reload of IN to RELOADREG. IN is from
6360 operand OPNUM with reload type TYPE.
6361
3c3eeea6 6362 Returns first insn emitted. */
32131a9c
RK
6363
6364rtx
546b63fb 6365gen_input_reload (reloadreg, in, opnum, type)
32131a9c
RK
6366 rtx reloadreg;
6367 rtx in;
546b63fb
RK
6368 int opnum;
6369 enum reload_type type;
32131a9c 6370{
546b63fb 6371 rtx last = get_last_insn ();
32131a9c 6372
a8fdc208 6373 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
6374 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6375 register that didn't get a hard register. In that case we can just
6376 call emit_move_insn.
6377
3002e160
JW
6378 We can also be asked to reload a PLUS that adds either two registers, or
6379 a register and a constant or MEM, or a MEM and a constant. This can
6380 occur during frame pointer elimination and while reloading addresses.
6381 This case is handled by trying to emit a single insn
32131a9c
RK
6382 to perform the add. If it is not valid, we use a two insn sequence.
6383
6384 Finally, we could be called to handle an 'o' constraint by putting
6385 an address into a register. In that case, we first try to do this
6386 with a named pattern of "reload_load_address". If no such pattern
6387 exists, we just emit a SET insn and hope for the best (it will normally
6388 be valid on machines that use 'o').
6389
6390 This entire process is made complex because reload will never
6391 process the insns we generate here and so we must ensure that
6392 they will fit their constraints and also by the fact that parts of
6393 IN might be being reloaded separately and replaced with spill registers.
6394 Because of this, we are, in some sense, just guessing the right approach
6395 here. The one listed above seems to work.
6396
6397 ??? At some point, this whole thing needs to be rethought. */
6398
6399 if (GET_CODE (in) == PLUS
3002e160
JW
6400 && ((GET_CODE (XEXP (in, 0)) == REG
6401 && (GET_CODE (XEXP (in, 1)) == REG
6402 || CONSTANT_P (XEXP (in, 1))
6403 || GET_CODE (XEXP (in, 1)) == MEM))
6404 || (GET_CODE (XEXP (in, 0)) == MEM
6405 && CONSTANT_P (XEXP (in, 1)))))
32131a9c
RK
6406 {
6407 /* We need to compute the sum of what is either a register and a
3002e160
JW
6408 constant, a register and memory, a hard register and a pseudo
6409 register, or memory and a constant and put it into the reload
6410 register. The best possible way of doing this is if the machine
6411 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
6412
6413 The simplest approach is to try to generate such an insn and see if it
6414 is recognized and matches its constraints. If so, it can be used.
6415
6416 It might be better not to actually emit the insn unless it is valid,
0009eff2 6417 but we need to pass the insn as an operand to `recog' and
b36d7dd7 6418 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 6419 not valid than to dummy things up. */
a8fdc208 6420
af929c62 6421 rtx op0, op1, tem, insn;
32131a9c 6422 int code;
a8fdc208 6423
af929c62
RK
6424 op0 = find_replacement (&XEXP (in, 0));
6425 op1 = find_replacement (&XEXP (in, 1));
6426
32131a9c
RK
6427 /* Since constraint checking is strict, commutativity won't be
6428 checked, so we need to do that here to avoid spurious failure
6429 if the add instruction is two-address and the second operand
6430 of the add is the same as the reload reg, which is frequently
6431 the case. If the insn would be A = B + A, rearrange it so
6432 it will be A = A + B as constrain_operands expects. */
a8fdc208 6433
32131a9c
RK
6434 if (GET_CODE (XEXP (in, 1)) == REG
6435 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
6436 tem = op0, op0 = op1, op1 = tem;
6437
6438 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6439 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c 6440
546b63fb 6441 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6442 code = recog_memoized (insn);
6443
6444 if (code >= 0)
6445 {
6446 insn_extract (insn);
6447 /* We want constrain operands to treat this insn strictly in
6448 its validity determination, i.e., the way it would after reload
6449 has completed. */
6450 if (constrain_operands (code, 1))
6451 return insn;
6452 }
6453
546b63fb 6454 delete_insns_since (last);
32131a9c
RK
6455
6456 /* If that failed, we must use a conservative two-insn sequence.
6457 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
6458 register since "move" will be able to handle an arbitrary operand,
6459 unlike add which can't, in general. Then add the registers.
32131a9c
RK
6460
6461 If there is another way to do this for a specific machine, a
6462 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6463 we emit below. */
6464
af929c62
RK
6465 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6466 || (GET_CODE (op1) == REG
6467 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6468 tem = op0, op0 = op1, op1 = tem;
32131a9c 6469
546b63fb 6470 emit_insn (gen_move_insn (reloadreg, op0));
39b56c2a
RK
6471
6472 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6473 This fixes a problem on the 32K where the stack pointer cannot
6474 be used as an operand of an add insn. */
6475
6476 if (rtx_equal_p (op0, op1))
6477 op1 = reloadreg;
6478
546b63fb 6479 emit_insn (gen_add2_insn (reloadreg, op1));
32131a9c
RK
6480 }
6481
0dadecf6
RK
6482#ifdef SECONDARY_MEMORY_NEEDED
6483 /* If we need a memory location to do the move, do it that way. */
6484 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6485 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6486 REGNO_REG_CLASS (REGNO (reloadreg)),
6487 GET_MODE (reloadreg)))
6488 {
6489 /* Get the memory to use and rewrite both registers to its mode. */
546b63fb 6490 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
0dadecf6
RK
6491
6492 if (GET_MODE (loc) != GET_MODE (reloadreg))
6493 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6494
6495 if (GET_MODE (loc) != GET_MODE (in))
6496 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6497
546b63fb
RK
6498 emit_insn (gen_move_insn (loc, in));
6499 emit_insn (gen_move_insn (reloadreg, loc));
0dadecf6
RK
6500 }
6501#endif
6502
32131a9c
RK
6503 /* If IN is a simple operand, use gen_move_insn. */
6504 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
546b63fb 6505 emit_insn (gen_move_insn (reloadreg, in));
32131a9c
RK
6506
6507#ifdef HAVE_reload_load_address
6508 else if (HAVE_reload_load_address)
546b63fb 6509 emit_insn (gen_reload_load_address (reloadreg, in));
32131a9c
RK
6510#endif
6511
6512 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6513 else
546b63fb 6514 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
32131a9c
RK
6515
6516 /* Return the first insn emitted.
546b63fb 6517 We can not just return get_last_insn, because there may have
32131a9c
RK
6518 been multiple instructions emitted. Also note that gen_move_insn may
6519 emit more than one insn itself, so we can not assume that there is one
6520 insn emitted per emit_insn_before call. */
6521
546b63fb 6522 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
6523}
6524\f
6525/* Delete a previously made output-reload
6526 whose result we now believe is not needed.
6527 First we double-check.
6528
6529 INSN is the insn now being processed.
6530 OUTPUT_RELOAD_INSN is the insn of the output reload.
6531 J is the reload-number for this insn. */
6532
6533static void
6534delete_output_reload (insn, j, output_reload_insn)
6535 rtx insn;
6536 int j;
6537 rtx output_reload_insn;
6538{
6539 register rtx i1;
6540
6541 /* Get the raw pseudo-register referred to. */
6542
6543 rtx reg = reload_in[j];
6544 while (GET_CODE (reg) == SUBREG)
6545 reg = SUBREG_REG (reg);
6546
6547 /* If the pseudo-reg we are reloading is no longer referenced
6548 anywhere between the store into it and here,
6549 and no jumps or labels intervene, then the value can get
6550 here through the reload reg alone.
6551 Otherwise, give up--return. */
6552 for (i1 = NEXT_INSN (output_reload_insn);
6553 i1 != insn; i1 = NEXT_INSN (i1))
6554 {
6555 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6556 return;
6557 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6558 && reg_mentioned_p (reg, PATTERN (i1)))
6559 return;
6560 }
6561
208dffa5
RS
6562 if (cannot_omit_stores[REGNO (reg)])
6563 return;
6564
32131a9c
RK
6565 /* If this insn will store in the pseudo again,
6566 the previous store can be removed. */
6567 if (reload_out[j] == reload_in[j])
6568 delete_insn (output_reload_insn);
6569
6570 /* See if the pseudo reg has been completely replaced
6571 with reload regs. If so, delete the store insn
6572 and forget we had a stack slot for the pseudo. */
6573 else if (reg_n_deaths[REGNO (reg)] == 1
6574 && reg_basic_block[REGNO (reg)] >= 0
6575 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6576 {
6577 rtx i2;
6578
6579 /* We know that it was used only between here
6580 and the beginning of the current basic block.
6581 (We also know that the last use before INSN was
6582 the output reload we are thinking of deleting, but never mind that.)
6583 Search that range; see if any ref remains. */
6584 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6585 {
d445b551
RK
6586 rtx set = single_set (i2);
6587
32131a9c
RK
6588 /* Uses which just store in the pseudo don't count,
6589 since if they are the only uses, they are dead. */
d445b551 6590 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6591 continue;
6592 if (GET_CODE (i2) == CODE_LABEL
6593 || GET_CODE (i2) == JUMP_INSN)
6594 break;
6595 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6596 && reg_mentioned_p (reg, PATTERN (i2)))
6597 /* Some other ref remains;
6598 we can't do anything. */
6599 return;
6600 }
6601
6602 /* Delete the now-dead stores into this pseudo. */
6603 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6604 {
d445b551
RK
6605 rtx set = single_set (i2);
6606
6607 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
6608 delete_insn (i2);
6609 if (GET_CODE (i2) == CODE_LABEL
6610 || GET_CODE (i2) == JUMP_INSN)
6611 break;
6612 }
6613
6614 /* For the debugging info,
6615 say the pseudo lives in this reload reg. */
6616 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6617 alter_reg (REGNO (reg), -1);
6618 }
6619}
32131a9c 6620\f
a8fdc208 6621/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 6622 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
6623 is a register or memory location;
6624 so reloading involves incrementing that location.
6625
6626 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 6627 This cannot be deduced from VALUE. */
32131a9c 6628
546b63fb
RK
6629static void
6630inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
6631 rtx reloadreg;
6632 rtx value;
6633 int inc_amount;
32131a9c
RK
6634{
6635 /* REG or MEM to be copied and incremented. */
6636 rtx incloc = XEXP (value, 0);
6637 /* Nonzero if increment after copying. */
6638 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 6639 rtx last;
0009eff2
RK
6640 rtx inc;
6641 rtx add_insn;
6642 int code;
32131a9c
RK
6643
6644 /* No hard register is equivalent to this register after
6645 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6646 we could inc/dec that register as well (maybe even using it for
6647 the source), but I'm not sure it's worth worrying about. */
6648 if (GET_CODE (incloc) == REG)
6649 reg_last_reload_reg[REGNO (incloc)] = 0;
6650
6651 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6652 inc_amount = - inc_amount;
6653
fb3821f7 6654 inc = GEN_INT (inc_amount);
0009eff2
RK
6655
6656 /* If this is post-increment, first copy the location to the reload reg. */
6657 if (post)
546b63fb 6658 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
6659
6660 /* See if we can directly increment INCLOC. Use a method similar to that
6661 in gen_input_reload. */
6662
546b63fb
RK
6663 last = get_last_insn ();
6664 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6665 gen_rtx (PLUS, GET_MODE (incloc),
6666 incloc, inc)));
0009eff2
RK
6667
6668 code = recog_memoized (add_insn);
6669 if (code >= 0)
32131a9c 6670 {
0009eff2
RK
6671 insn_extract (add_insn);
6672 if (constrain_operands (code, 1))
32131a9c 6673 {
0009eff2
RK
6674 /* If this is a pre-increment and we have incremented the value
6675 where it lives, copy the incremented value to RELOADREG to
6676 be used as an address. */
6677
6678 if (! post)
546b63fb
RK
6679 emit_insn (gen_move_insn (reloadreg, incloc));
6680
6681 return;
32131a9c
RK
6682 }
6683 }
0009eff2 6684
546b63fb 6685 delete_insns_since (last);
0009eff2
RK
6686
6687 /* If couldn't do the increment directly, must increment in RELOADREG.
6688 The way we do this depends on whether this is pre- or post-increment.
6689 For pre-increment, copy INCLOC to the reload register, increment it
6690 there, then save back. */
6691
6692 if (! post)
6693 {
546b63fb
RK
6694 emit_insn (gen_move_insn (reloadreg, incloc));
6695 emit_insn (gen_add2_insn (reloadreg, inc));
6696 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 6697 }
32131a9c
RK
6698 else
6699 {
0009eff2
RK
6700 /* Postincrement.
6701 Because this might be a jump insn or a compare, and because RELOADREG
6702 may not be available after the insn in an input reload, we must do
6703 the incrementation before the insn being reloaded for.
6704
6705 We have already copied INCLOC to RELOADREG. Increment the copy in
6706 RELOADREG, save that back, then decrement RELOADREG so it has
6707 the original value. */
6708
546b63fb
RK
6709 emit_insn (gen_add2_insn (reloadreg, inc));
6710 emit_insn (gen_move_insn (incloc, reloadreg));
6711 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 6712 }
0009eff2 6713
546b63fb 6714 return;
32131a9c
RK
6715}
6716\f
6717/* Return 1 if we are certain that the constraint-string STRING allows
6718 the hard register REG. Return 0 if we can't be sure of this. */
6719
6720static int
6721constraint_accepts_reg_p (string, reg)
6722 char *string;
6723 rtx reg;
6724{
6725 int value = 0;
6726 int regno = true_regnum (reg);
6727 int c;
6728
6729 /* Initialize for first alternative. */
6730 value = 0;
6731 /* Check that each alternative contains `g' or `r'. */
6732 while (1)
6733 switch (c = *string++)
6734 {
6735 case 0:
6736 /* If an alternative lacks `g' or `r', we lose. */
6737 return value;
6738 case ',':
6739 /* If an alternative lacks `g' or `r', we lose. */
6740 if (value == 0)
6741 return 0;
6742 /* Initialize for next alternative. */
6743 value = 0;
6744 break;
6745 case 'g':
6746 case 'r':
6747 /* Any general reg wins for this alternative. */
6748 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6749 value = 1;
6750 break;
6751 default:
6752 /* Any reg in specified class wins for this alternative. */
6753 {
0009eff2 6754 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 6755
0009eff2 6756 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
6757 value = 1;
6758 }
6759 }
6760}
6761\f
d445b551
RK
6762/* Return the number of places FIND appears within X, but don't count
6763 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
6764
6765static int
6766count_occurrences (x, find)
6767 register rtx x, find;
6768{
6769 register int i, j;
6770 register enum rtx_code code;
6771 register char *format_ptr;
6772 int count;
6773
6774 if (x == find)
6775 return 1;
6776 if (x == 0)
6777 return 0;
6778
6779 code = GET_CODE (x);
6780
6781 switch (code)
6782 {
6783 case REG:
6784 case QUEUED:
6785 case CONST_INT:
6786 case CONST_DOUBLE:
6787 case SYMBOL_REF:
6788 case CODE_LABEL:
6789 case PC:
6790 case CC0:
6791 return 0;
d445b551
RK
6792
6793 case SET:
6794 if (SET_DEST (x) == find)
6795 return count_occurrences (SET_SRC (x), find);
6796 break;
32131a9c
RK
6797 }
6798
6799 format_ptr = GET_RTX_FORMAT (code);
6800 count = 0;
6801
6802 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6803 {
6804 switch (*format_ptr++)
6805 {
6806 case 'e':
6807 count += count_occurrences (XEXP (x, i), find);
6808 break;
6809
6810 case 'E':
6811 if (XVEC (x, i) != NULL)
6812 {
6813 for (j = 0; j < XVECLEN (x, i); j++)
6814 count += count_occurrences (XVECEXP (x, i, j), find);
6815 }
6816 break;
6817 }
6818 }
6819 return count;
6820}
This page took 0.922989 seconds and 5 git commands to generate.