]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(default_conversion): Add bitfield promotions.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
2c5d9e37 2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e99215a3
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
32131a9c
RK
20
21
ff2da9fc 22#include <stdio.h>
32131a9c
RK
23#include "config.h"
24#include "rtl.h"
25#include "obstack.h"
26#include "insn-config.h"
27#include "insn-flags.h"
28#include "insn-codes.h"
29#include "flags.h"
30#include "expr.h"
31#include "regs.h"
32#include "hard-reg-set.h"
33#include "reload.h"
34#include "recog.h"
35#include "basic-block.h"
36#include "output.h"
a9c366bf 37#include "real.h"
32131a9c
RK
38
39/* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
45
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
49
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
53
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
60
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
64
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
546b63fb
RK
71
72
73#ifndef REGISTER_MOVE_COST
74#define REGISTER_MOVE_COST(x, y) 2
75#endif
76
77#ifndef MEMORY_MOVE_COST
78#define MEMORY_MOVE_COST(x) 4
79#endif
32131a9c
RK
80\f
81/* During reload_as_needed, element N contains a REG rtx for the hard reg
d08ea79f 82 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
83static rtx *reg_last_reload_reg;
84
85/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87static char *reg_has_output_reload;
88
89/* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91static HARD_REG_SET reg_is_output_reload;
92
93/* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97rtx *reg_equiv_constant;
98
99/* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 103rtx *reg_equiv_memory_loc;
32131a9c
RK
104
105/* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108rtx *reg_equiv_address;
109
110/* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112rtx *reg_equiv_mem;
113
114/* Widest width in which each pseudo reg is referred to (via subreg). */
115static int *reg_max_ref_width;
116
117/* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119static rtx *reg_equiv_init;
120
121/* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
127
128/* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
133
134/* Number of spill-regs so far; number of valid elements of spill_regs. */
135static int n_spills;
136
137/* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
142
143/* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
147
148/* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153
154/* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157HARD_REG_SET forbidden_regs;
158
159/* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
546b63fb
RK
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
32131a9c
RK
163
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166static HARD_REG_SET bad_spill_regs;
167
168/* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171static short spill_regs[FIRST_PSEUDO_REGISTER];
172
8b4f9969
JW
173/* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
177
178HARD_REG_SET used_spill_regs;
179
4079cd63
JW
180/* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
182
183static int last_spill_reg;
184
32131a9c
RK
185/* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
191
192/* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
196
197/* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199static HARD_REG_SET counted_for_groups;
200
201/* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205static HARD_REG_SET counted_for_nongroups;
206
208dffa5
RS
207/* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211static char *cannot_omit_stores;
212
32131a9c
RK
213/* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
219
220static char spill_indirect_levels;
221
222/* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
225
226char indirect_symref_ok;
227
228/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
229
230char double_reg_address_ok;
231
232/* Record the stack slot for each spilled hard register. */
233
234static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
235
236/* Width allocated so far for that stack slot. */
237
238static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240/* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
244
245char *basic_block_needs[N_REG_CLASSES];
246
247/* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249int reload_first_uid;
250
251/* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253
254int caller_save_needed;
255
256/* Set to 1 while reload_as_needed is operating.
257 Required by some machines to handle any generated moves differently. */
258
259int reload_in_progress = 0;
260
261/* These arrays record the insn_code of insns that may be needed to
262 perform input and output reloads of special objects. They provide a
263 place to pass a scratch register. */
264
265enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266enum insn_code reload_out_optab[NUM_MACHINE_MODES];
267
d45cf215 268/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
269 The allocated storage can be freed once find_reloads has processed the
270 insn. */
271
272struct obstack reload_obstack;
273char *reload_firstobj;
274
275#define obstack_chunk_alloc xmalloc
276#define obstack_chunk_free free
277
32131a9c
RK
278/* List of labels that must never be deleted. */
279extern rtx forced_labels;
2c5d9e37
RK
280
281/* Allocation number table from global register allocation. */
282extern int *reg_allocno;
32131a9c
RK
283\f
284/* This structure is used to record information about register eliminations.
285 Each array entry describes one possible way of eliminating a register
286 in favor of another. If there is more than one way of eliminating a
287 particular register, the most preferred should be specified first. */
288
289static struct elim_table
290{
291 int from; /* Register number to be eliminated. */
292 int to; /* Register number used as replacement. */
293 int initial_offset; /* Initial difference between values. */
294 int can_eliminate; /* Non-zero if this elimination can be done. */
295 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
296 insns made by reload. */
297 int offset; /* Current offset between the two regs. */
a8efe40d 298 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
299 int previous_offset; /* Offset at end of previous insn. */
300 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
301 rtx from_rtx; /* REG rtx for the register to be eliminated.
302 We cannot simply compare the number since
303 we might then spuriously replace a hard
304 register corresponding to a pseudo
305 assigned to the reg to be eliminated. */
306 rtx to_rtx; /* REG rtx for the replacement. */
307} reg_eliminate[] =
308
309/* If a set of eliminable registers was specified, define the table from it.
310 Otherwise, default to the normal case of the frame pointer being
311 replaced by the stack pointer. */
312
313#ifdef ELIMINABLE_REGS
314 ELIMINABLE_REGS;
315#else
316 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
317#endif
318
319#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
320
321/* Record the number of pending eliminations that have an offset not equal
322 to their initial offset. If non-zero, we use a new copy of each
323 replacement result in any insns encountered. */
324static int num_not_at_initial_offset;
325
326/* Count the number of registers that we may be able to eliminate. */
327static int num_eliminable;
328
329/* For each label, we record the offset of each elimination. If we reach
330 a label by more than one path and an offset differs, we cannot do the
331 elimination. This information is indexed by the number of the label.
332 The first table is an array of flags that records whether we have yet
333 encountered a label and the second table is an array of arrays, one
334 entry in the latter array for each elimination. */
335
336static char *offsets_known_at;
337static int (*offsets_at)[NUM_ELIMINABLE_REGS];
338
339/* Number of labels in the current function. */
340
341static int num_labels;
546b63fb
RK
342
343struct hard_reg_n_uses { int regno; int uses; };
32131a9c 344\f
546b63fb
RK
345static int possible_group_p PROTO((int, int *));
346static void count_possible_groups PROTO((int *, enum machine_mode *,
066aca28 347 int *, int));
546b63fb
RK
348static int modes_equiv_for_class_p PROTO((enum machine_mode,
349 enum machine_mode,
350 enum reg_class));
351static void spill_failure PROTO((rtx));
352static int new_spill_reg PROTO((int, int, int *, int *, int,
353 FILE *));
354static void delete_dead_insn PROTO((rtx));
355static void alter_reg PROTO((int, int));
c307c237 356static void mark_scratch_live PROTO((rtx));
546b63fb
RK
357static void set_label_offsets PROTO((rtx, rtx, int));
358static int eliminate_regs_in_insn PROTO((rtx, int));
359static void mark_not_eliminable PROTO((rtx, rtx));
360static int spill_hard_reg PROTO((int, int, FILE *, int));
361static void scan_paradoxical_subregs PROTO((rtx));
362static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
363 struct hard_reg_n_uses *));
2c5d9e37 364static void order_regs_for_reload PROTO((int));
1d1a832c 365static int compare_spill_regs PROTO((short *, short *));
546b63fb 366static void reload_as_needed PROTO((rtx, int));
9a881562 367static void forget_old_reloads_1 PROTO((rtx, rtx));
546b63fb
RK
368static int reload_reg_class_lower PROTO((short *, short *));
369static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
370 enum machine_mode));
be7ae2a4
RK
371static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
372 enum machine_mode));
546b63fb
RK
373static int reload_reg_free_p PROTO((int, int, enum reload_type));
374static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
375static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
351aa1c1 376static int reloads_conflict PROTO((int, int));
546b63fb
RK
377static int allocate_reload_reg PROTO((int, rtx, int, int));
378static void choose_reload_regs PROTO((rtx, rtx));
379static void merge_assigned_reloads PROTO((rtx));
380static void emit_reload_insns PROTO((rtx));
381static void delete_output_reload PROTO((rtx, int, rtx));
382static void inc_for_reload PROTO((rtx, rtx, int));
383static int constraint_accepts_reg_p PROTO((char *, rtx));
384static int count_occurrences PROTO((rtx, rtx));
32131a9c 385\f
546b63fb
RK
386/* Initialize the reload pass once per compilation. */
387
32131a9c
RK
388void
389init_reload ()
390{
391 register int i;
392
393 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
394 Set spill_indirect_levels to the number of levels such addressing is
395 permitted, zero if it is not permitted at all. */
396
397 register rtx tem
398 = gen_rtx (MEM, Pmode,
399 gen_rtx (PLUS, Pmode,
400 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 401 GEN_INT (4)));
32131a9c
RK
402 spill_indirect_levels = 0;
403
404 while (memory_address_p (QImode, tem))
405 {
406 spill_indirect_levels++;
407 tem = gen_rtx (MEM, Pmode, tem);
408 }
409
410 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
411
412 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
413 indirect_symref_ok = memory_address_p (QImode, tem);
414
415 /* See if reg+reg is a valid (and offsettable) address. */
416
65701fd2 417 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
418 {
419 tem = gen_rtx (PLUS, Pmode,
3ec2ea3e 420 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
57caa638
RS
421 gen_rtx (REG, Pmode, i));
422 /* This way, we make sure that reg+reg is an offsettable address. */
423 tem = plus_constant (tem, 4);
424
425 if (memory_address_p (QImode, tem))
426 {
427 double_reg_address_ok = 1;
428 break;
429 }
430 }
32131a9c
RK
431
432 /* Initialize obstack for our rtl allocation. */
433 gcc_obstack_init (&reload_obstack);
434 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
435}
436
546b63fb 437/* Main entry point for the reload pass.
32131a9c
RK
438
439 FIRST is the first insn of the function being compiled.
440
441 GLOBAL nonzero means we were called from global_alloc
442 and should attempt to reallocate any pseudoregs that we
443 displace from hard regs we will use for reloads.
444 If GLOBAL is zero, we do not have enough information to do that,
445 so any pseudo reg that is spilled must go to the stack.
446
447 DUMPFILE is the global-reg debugging dump file stream, or 0.
448 If it is nonzero, messages are written to it to describe
449 which registers are seized as reload regs, which pseudo regs
5352b11a 450 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 451
5352b11a
RS
452 Return value is nonzero if reload failed
453 and we must not do any more for this function. */
454
455int
32131a9c
RK
456reload (first, global, dumpfile)
457 rtx first;
458 int global;
459 FILE *dumpfile;
460{
461 register int class;
8b3e912b 462 register int i, j, k;
32131a9c
RK
463 register rtx insn;
464 register struct elim_table *ep;
465
466 int something_changed;
467 int something_needs_reloads;
468 int something_needs_elimination;
469 int new_basic_block_needs;
a8efe40d
RK
470 enum reg_class caller_save_spill_class = NO_REGS;
471 int caller_save_group_size = 1;
32131a9c 472
5352b11a
RS
473 /* Nonzero means we couldn't get enough spill regs. */
474 int failure = 0;
475
32131a9c
RK
476 /* The basic block number currently being processed for INSN. */
477 int this_block;
478
479 /* Make sure even insns with volatile mem refs are recognizable. */
480 init_recog ();
481
482 /* Enable find_equiv_reg to distinguish insns made by reload. */
483 reload_first_uid = get_max_uid ();
484
485 for (i = 0; i < N_REG_CLASSES; i++)
486 basic_block_needs[i] = 0;
487
0dadecf6
RK
488#ifdef SECONDARY_MEMORY_NEEDED
489 /* Initialize the secondary memory table. */
490 clear_secondary_mem ();
491#endif
492
32131a9c
RK
493 /* Remember which hard regs appear explicitly
494 before we merge into `regs_ever_live' the ones in which
495 pseudo regs have been allocated. */
496 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
497
498 /* We don't have a stack slot for any spill reg yet. */
4c9a05bc
RK
499 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
500 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
32131a9c 501
a8efe40d
RK
502 /* Initialize the save area information for caller-save, in case some
503 are needed. */
504 init_save_areas ();
a8fdc208 505
32131a9c
RK
506 /* Compute which hard registers are now in use
507 as homes for pseudo registers.
508 This is done here rather than (eg) in global_alloc
509 because this point is reached even if not optimizing. */
510
511 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
512 mark_home_live (i);
513
c307c237
RK
514 for (i = 0; i < scratch_list_length; i++)
515 if (scratch_list[i])
516 mark_scratch_live (scratch_list[i]);
517
32131a9c
RK
518 /* Make sure that the last insn in the chain
519 is not something that needs reloading. */
fb3821f7 520 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
521
522 /* Find all the pseudo registers that didn't get hard regs
523 but do have known equivalent constants or memory slots.
524 These include parameters (known equivalent to parameter slots)
525 and cse'd or loop-moved constant memory addresses.
526
527 Record constant equivalents in reg_equiv_constant
528 so they will be substituted by find_reloads.
529 Record memory equivalents in reg_mem_equiv so they can
530 be substituted eventually by altering the REG-rtx's. */
531
532 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 533 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
32131a9c 534 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 535 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
32131a9c 536 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 537 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
32131a9c 538 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 539 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
32131a9c 540 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 541 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
32131a9c 542 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
4c9a05bc 543 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
208dffa5
RS
544 cannot_omit_stores = (char *) alloca (max_regno);
545 bzero (cannot_omit_stores, max_regno);
32131a9c 546
56f58d3a
RK
547#ifdef SMALL_REGISTER_CLASSES
548 CLEAR_HARD_REG_SET (forbidden_regs);
549#endif
550
32131a9c 551 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
56f58d3a
RK
552 Also find all paradoxical subregs and find largest such for each pseudo.
553 On machines with small register classes, record hard registers that
b453cb0b
RK
554 are used for user variables. These can never be used for spills.
555 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
556 caller-saved registers must be marked live. */
32131a9c
RK
557
558 for (insn = first; insn; insn = NEXT_INSN (insn))
559 {
560 rtx set = single_set (insn);
561
b453cb0b
RK
562 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
563 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
564 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
565 if (! call_used_regs[i])
566 regs_ever_live[i] = 1;
567
32131a9c
RK
568 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
569 {
fb3821f7 570 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
571 if (note
572#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 573 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
574 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
575#endif
576 )
32131a9c
RK
577 {
578 rtx x = XEXP (note, 0);
579 i = REGNO (SET_DEST (set));
580 if (i > LAST_VIRTUAL_REGISTER)
581 {
582 if (GET_CODE (x) == MEM)
583 reg_equiv_memory_loc[i] = x;
584 else if (CONSTANT_P (x))
585 {
586 if (LEGITIMATE_CONSTANT_P (x))
587 reg_equiv_constant[i] = x;
588 else
589 reg_equiv_memory_loc[i]
d445b551 590 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
591 }
592 else
593 continue;
594
595 /* If this register is being made equivalent to a MEM
596 and the MEM is not SET_SRC, the equivalencing insn
597 is one with the MEM as a SET_DEST and it occurs later.
598 So don't mark this insn now. */
599 if (GET_CODE (x) != MEM
600 || rtx_equal_p (SET_SRC (set), x))
601 reg_equiv_init[i] = insn;
602 }
603 }
604 }
605
606 /* If this insn is setting a MEM from a register equivalent to it,
607 this is the equivalencing insn. */
608 else if (set && GET_CODE (SET_DEST (set)) == MEM
609 && GET_CODE (SET_SRC (set)) == REG
610 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
611 && rtx_equal_p (SET_DEST (set),
612 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
613 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
614
615 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
616 scan_paradoxical_subregs (PATTERN (insn));
617 }
618
619 /* Does this function require a frame pointer? */
620
621 frame_pointer_needed = (! flag_omit_frame_pointer
622#ifdef EXIT_IGNORE_STACK
623 /* ?? If EXIT_IGNORE_STACK is set, we will not save
624 and restore sp for alloca. So we can't eliminate
625 the frame pointer in that case. At some point,
626 we should improve this by emitting the
627 sp-adjusting insns for this case. */
628 || (current_function_calls_alloca
629 && EXIT_IGNORE_STACK)
630#endif
631 || FRAME_POINTER_REQUIRED);
632
633 num_eliminable = 0;
634
635 /* Initialize the table of registers to eliminate. The way we do this
636 depends on how the eliminable registers were defined. */
637#ifdef ELIMINABLE_REGS
638 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
639 {
640 ep->can_eliminate = ep->can_eliminate_previous
641 = (CAN_ELIMINATE (ep->from, ep->to)
9ff3516a 642 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
32131a9c
RK
643 }
644#else
645 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
646 = ! frame_pointer_needed;
647#endif
648
649 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 650 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
651 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
652 We depend on this. */
653 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
654 {
655 num_eliminable += ep->can_eliminate;
656 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
657 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
658 }
659
660 num_labels = max_label_num () - get_first_label_num ();
661
662 /* Allocate the tables used to store offset information at labels. */
663 offsets_known_at = (char *) alloca (num_labels);
664 offsets_at
665 = (int (*)[NUM_ELIMINABLE_REGS])
666 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
667
668 offsets_known_at -= get_first_label_num ();
669 offsets_at -= get_first_label_num ();
670
671 /* Alter each pseudo-reg rtx to contain its hard reg number.
672 Assign stack slots to the pseudos that lack hard regs or equivalents.
673 Do not touch virtual registers. */
674
675 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
676 alter_reg (i, -1);
677
678 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
679 because the stack size may be a part of the offset computation for
680 register elimination. */
681 assign_stack_local (BLKmode, 0, 0);
682
683 /* If we have some registers we think can be eliminated, scan all insns to
684 see if there is an insn that sets one of these registers to something
685 other than itself plus a constant. If so, the register cannot be
686 eliminated. Doing this scan here eliminates an extra pass through the
687 main reload loop in the most common case where register elimination
688 cannot be done. */
689 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
690 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
691 || GET_CODE (insn) == CALL_INSN)
692 note_stores (PATTERN (insn), mark_not_eliminable);
693
694#ifndef REGISTER_CONSTRAINTS
695 /* If all the pseudo regs have hard regs,
696 except for those that are never referenced,
697 we know that no reloads are needed. */
698 /* But that is not true if there are register constraints, since
699 in that case some pseudos might be in the wrong kind of hard reg. */
700
701 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
702 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
703 break;
704
b8093d02 705 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
706 return;
707#endif
708
709 /* Compute the order of preference for hard registers to spill.
710 Store them by decreasing preference in potential_reload_regs. */
711
2c5d9e37 712 order_regs_for_reload (global);
32131a9c
RK
713
714 /* So far, no hard regs have been spilled. */
715 n_spills = 0;
716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
717 spill_reg_order[i] = -1;
718
4079cd63
JW
719 /* Initialize to -1, which means take the first spill register. */
720 last_spill_reg = -1;
721
32131a9c
RK
722 /* On most machines, we can't use any register explicitly used in the
723 rtl as a spill register. But on some, we have to. Those will have
724 taken care to keep the life of hard regs as short as possible. */
725
56f58d3a 726#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
727 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
728#endif
729
730 /* Spill any hard regs that we know we can't eliminate. */
731 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
732 if (! ep->can_eliminate)
9ff3516a
RK
733 spill_hard_reg (ep->from, global, dumpfile, 1);
734
735#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
736 if (frame_pointer_needed)
737 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
738#endif
32131a9c
RK
739
740 if (global)
741 for (i = 0; i < N_REG_CLASSES; i++)
742 {
4c9a05bc 743 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
32131a9c
RK
744 bzero (basic_block_needs[i], n_basic_blocks);
745 }
746
b2f15f94
RK
747 /* From now on, we need to emit any moves without making new pseudos. */
748 reload_in_progress = 1;
749
32131a9c
RK
750 /* This loop scans the entire function each go-round
751 and repeats until one repetition spills no additional hard regs. */
752
d45cf215 753 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
754 to require another pass. Note that getting an additional reload
755 reg does not necessarily imply any pseudo reg was spilled;
756 sometimes we find a reload reg that no pseudo reg was allocated in. */
757 something_changed = 1;
758 /* This flag is set if there are any insns that require reloading. */
759 something_needs_reloads = 0;
760 /* This flag is set if there are any insns that require register
761 eliminations. */
762 something_needs_elimination = 0;
763 while (something_changed)
764 {
765 rtx after_call = 0;
766
767 /* For each class, number of reload regs needed in that class.
768 This is the maximum over all insns of the needs in that class
769 of the individual insn. */
770 int max_needs[N_REG_CLASSES];
771 /* For each class, size of group of consecutive regs
772 that is needed for the reloads of this class. */
773 int group_size[N_REG_CLASSES];
774 /* For each class, max number of consecutive groups needed.
775 (Each group contains group_size[CLASS] consecutive registers.) */
776 int max_groups[N_REG_CLASSES];
777 /* For each class, max number needed of regs that don't belong
778 to any of the groups. */
779 int max_nongroups[N_REG_CLASSES];
780 /* For each class, the machine mode which requires consecutive
781 groups of regs of that class.
782 If two different modes ever require groups of one class,
783 they must be the same size and equally restrictive for that class,
784 otherwise we can't handle the complexity. */
785 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
786 /* Record the insn where each maximum need is first found. */
787 rtx max_needs_insn[N_REG_CLASSES];
788 rtx max_groups_insn[N_REG_CLASSES];
789 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 790 rtx x;
0dadecf6 791 int starting_frame_size = get_frame_size ();
9ff3516a 792 int previous_frame_pointer_needed = frame_pointer_needed;
e404a39a 793 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
794
795 something_changed = 0;
4c9a05bc
RK
796 bzero ((char *) max_needs, sizeof max_needs);
797 bzero ((char *) max_groups, sizeof max_groups);
798 bzero ((char *) max_nongroups, sizeof max_nongroups);
799 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
800 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
801 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
802 bzero ((char *) group_size, sizeof group_size);
32131a9c
RK
803 for (i = 0; i < N_REG_CLASSES; i++)
804 group_mode[i] = VOIDmode;
805
806 /* Keep track of which basic blocks are needing the reloads. */
807 this_block = 0;
808
809 /* Remember whether any element of basic_block_needs
810 changes from 0 to 1 in this pass. */
811 new_basic_block_needs = 0;
812
813 /* Reset all offsets on eliminable registers to their initial values. */
814#ifdef ELIMINABLE_REGS
815 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
816 {
817 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
818 ep->previous_offset = ep->offset
819 = ep->max_offset = ep->initial_offset;
32131a9c
RK
820 }
821#else
822#ifdef INITIAL_FRAME_POINTER_OFFSET
823 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
824#else
825 if (!FRAME_POINTER_REQUIRED)
826 abort ();
827 reg_eliminate[0].initial_offset = 0;
828#endif
a8efe40d 829 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
830 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
831#endif
832
833 num_not_at_initial_offset = 0;
834
4c9a05bc 835 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
32131a9c
RK
836
837 /* Set a known offset for each forced label to be at the initial offset
838 of each elimination. We do this because we assume that all
839 computed jumps occur from a location where each elimination is
840 at its initial offset. */
841
842 for (x = forced_labels; x; x = XEXP (x, 1))
843 if (XEXP (x, 0))
fb3821f7 844 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
845
846 /* For each pseudo register that has an equivalent location defined,
847 try to eliminate any eliminable registers (such as the frame pointer)
848 assuming initial offsets for the replacement register, which
849 is the normal case.
850
851 If the resulting location is directly addressable, substitute
852 the MEM we just got directly for the old REG.
853
854 If it is not addressable but is a constant or the sum of a hard reg
855 and constant, it is probably not addressable because the constant is
856 out of range, in that case record the address; we will generate
857 hairy code to compute the address in a register each time it is
6491dbbb
RK
858 needed. Similarly if it is a hard register, but one that is not
859 valid as an address register.
32131a9c
RK
860
861 If the location is not addressable, but does not have one of the
862 above forms, assign a stack slot. We have to do this to avoid the
863 potential of producing lots of reloads if, e.g., a location involves
864 a pseudo that didn't get a hard register and has an equivalent memory
865 location that also involves a pseudo that didn't get a hard register.
866
867 Perhaps at some point we will improve reload_when_needed handling
868 so this problem goes away. But that's very hairy. */
869
870 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
871 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
872 {
fb3821f7 873 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
874
875 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
876 XEXP (x, 0)))
877 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
878 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
879 || (GET_CODE (XEXP (x, 0)) == REG
880 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
881 || (GET_CODE (XEXP (x, 0)) == PLUS
882 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
883 && (REGNO (XEXP (XEXP (x, 0), 0))
884 < FIRST_PSEUDO_REGISTER)
885 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
886 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
887 else
888 {
889 /* Make a new stack slot. Then indicate that something
a8fdc208 890 changed so we go back and recompute offsets for
32131a9c
RK
891 eliminable registers because the allocation of memory
892 below might change some offset. reg_equiv_{mem,address}
893 will be set up for this pseudo on the next pass around
894 the loop. */
895 reg_equiv_memory_loc[i] = 0;
896 reg_equiv_init[i] = 0;
897 alter_reg (i, -1);
898 something_changed = 1;
899 }
900 }
a8fdc208 901
d45cf215 902 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
903 bookkeeping. */
904 if (something_changed)
905 continue;
906
a8efe40d
RK
907 /* If caller-saves needs a group, initialize the group to include
908 the size and mode required for caller-saves. */
909
910 if (caller_save_group_size > 1)
911 {
912 group_mode[(int) caller_save_spill_class] = Pmode;
913 group_size[(int) caller_save_spill_class] = caller_save_group_size;
914 }
915
32131a9c
RK
916 /* Compute the most additional registers needed by any instruction.
917 Collect information separately for each class of regs. */
918
919 for (insn = first; insn; insn = NEXT_INSN (insn))
920 {
921 if (global && this_block + 1 < n_basic_blocks
922 && insn == basic_block_head[this_block+1])
923 ++this_block;
924
925 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
926 might include REG_LABEL), we need to see what effects this
927 has on the known offsets at labels. */
928
929 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
930 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
931 && REG_NOTES (insn) != 0))
932 set_label_offsets (insn, insn, 0);
933
934 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
935 {
936 /* Nonzero means don't use a reload reg that overlaps
937 the place where a function value can be returned. */
938 rtx avoid_return_reg = 0;
939
940 rtx old_body = PATTERN (insn);
941 int old_code = INSN_CODE (insn);
942 rtx old_notes = REG_NOTES (insn);
943 int did_elimination = 0;
546b63fb
RK
944
945 /* To compute the number of reload registers of each class
9faa82d8 946 needed for an insn, we must simulate what choose_reload_regs
546b63fb
RK
947 can do. We do this by splitting an insn into an "input" and
948 an "output" part. RELOAD_OTHER reloads are used in both.
949 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
950 which must be live over the entire input section of reloads,
951 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
952 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
953 inputs.
954
955 The registers needed for output are RELOAD_OTHER and
956 RELOAD_FOR_OUTPUT, which are live for the entire output
957 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
958 reloads for each operand.
959
960 The total number of registers needed is the maximum of the
961 inputs and outputs. */
962
8b3e912b 963 struct needs
32131a9c 964 {
8b3e912b
RK
965 /* [0] is normal, [1] is nongroup. */
966 int regs[2][N_REG_CLASSES];
967 int groups[N_REG_CLASSES];
968 };
969
970 /* Each `struct needs' corresponds to one RELOAD_... type. */
971 struct {
972 struct needs other;
973 struct needs input;
974 struct needs output;
975 struct needs insn;
976 struct needs other_addr;
977 struct needs op_addr;
893bc853 978 struct needs op_addr_reload;
8b3e912b
RK
979 struct needs in_addr[MAX_RECOG_OPERANDS];
980 struct needs out_addr[MAX_RECOG_OPERANDS];
981 } insn_needs;
32131a9c
RK
982
983 /* If needed, eliminate any eliminable registers. */
984 if (num_eliminable)
985 did_elimination = eliminate_regs_in_insn (insn, 0);
986
987#ifdef SMALL_REGISTER_CLASSES
988 /* Set avoid_return_reg if this is an insn
989 that might use the value of a function call. */
990 if (GET_CODE (insn) == CALL_INSN)
991 {
992 if (GET_CODE (PATTERN (insn)) == SET)
993 after_call = SET_DEST (PATTERN (insn));
994 else if (GET_CODE (PATTERN (insn)) == PARALLEL
995 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
996 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
997 else
998 after_call = 0;
999 }
1000 else if (after_call != 0
1001 && !(GET_CODE (PATTERN (insn)) == SET
1002 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1003 {
2b979c57 1004 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
1005 avoid_return_reg = after_call;
1006 after_call = 0;
1007 }
1008#endif /* SMALL_REGISTER_CLASSES */
1009
1010 /* Analyze the instruction. */
1011 find_reloads (insn, 0, spill_indirect_levels, global,
1012 spill_reg_order);
1013
1014 /* Remember for later shortcuts which insns had any reloads or
1015 register eliminations.
1016
1017 One might think that it would be worthwhile to mark insns
1018 that need register replacements but not reloads, but this is
1019 not safe because find_reloads may do some manipulation of
1020 the insn (such as swapping commutative operands), which would
1021 be lost when we restore the old pattern after register
1022 replacement. So the actions of find_reloads must be redone in
1023 subsequent passes or in reload_as_needed.
1024
1025 However, it is safe to mark insns that need reloads
1026 but not register replacement. */
1027
1028 PUT_MODE (insn, (did_elimination ? QImode
1029 : n_reloads ? HImode
546b63fb 1030 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
1031 : VOIDmode));
1032
1033 /* Discard any register replacements done. */
1034 if (did_elimination)
1035 {
1036 obstack_free (&reload_obstack, reload_firstobj);
1037 PATTERN (insn) = old_body;
1038 INSN_CODE (insn) = old_code;
1039 REG_NOTES (insn) = old_notes;
1040 something_needs_elimination = 1;
1041 }
1042
a8efe40d 1043 /* If this insn has no reloads, we need not do anything except
a8fdc208 1044 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1045 caller-save needs reloads. */
1046
1047 if (n_reloads == 0
1048 && ! (GET_CODE (insn) == CALL_INSN
1049 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1050 continue;
1051
1052 something_needs_reloads = 1;
4c9a05bc 1053 bzero ((char *) &insn_needs, sizeof insn_needs);
32131a9c
RK
1054
1055 /* Count each reload once in every class
1056 containing the reload's own class. */
1057
1058 for (i = 0; i < n_reloads; i++)
1059 {
1060 register enum reg_class *p;
e85ddd99 1061 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1062 int size;
1063 enum machine_mode mode;
ce0e109b 1064 int nongroup_need;
8b3e912b 1065 struct needs *this_needs;
32131a9c
RK
1066
1067 /* Don't count the dummy reloads, for which one of the
1068 regs mentioned in the insn can be used for reloading.
1069 Don't count optional reloads.
1070 Don't count reloads that got combined with others. */
1071 if (reload_reg_rtx[i] != 0
1072 || reload_optional[i] != 0
1073 || (reload_out[i] == 0 && reload_in[i] == 0
1074 && ! reload_secondary_p[i]))
1075 continue;
1076
e85ddd99
RK
1077 /* Show that a reload register of this class is needed
1078 in this basic block. We do not use insn_needs and
1079 insn_groups because they are overly conservative for
1080 this purpose. */
1081 if (global && ! basic_block_needs[(int) class][this_block])
1082 {
1083 basic_block_needs[(int) class][this_block] = 1;
1084 new_basic_block_needs = 1;
1085 }
1086
ee249c09
RK
1087
1088 mode = reload_inmode[i];
1089 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1090 mode = reload_outmode[i];
1091 size = CLASS_MAX_NREGS (class, mode);
1092
8b3e912b
RK
1093 /* If this class doesn't want a group, determine if we have
1094 a nongroup need or a regular need. We have a nongroup
1095 need if this reload conflicts with a group reload whose
1096 class intersects with this reload's class. */
ce0e109b
RK
1097
1098 nongroup_need = 0;
ee249c09 1099 if (size == 1)
b8f4c738
RK
1100 for (j = 0; j < n_reloads; j++)
1101 if ((CLASS_MAX_NREGS (reload_reg_class[j],
255cf280
RK
1102 (GET_MODE_SIZE (reload_outmode[j])
1103 > GET_MODE_SIZE (reload_inmode[j]))
1104 ? reload_outmode[j]
1105 : reload_inmode[j])
b8f4c738 1106 > 1)
893bc853
RK
1107 && (!reload_optional[j])
1108 && (reload_in[j] != 0 || reload_out[j] != 0
1109 || reload_secondary_p[j])
b8f4c738 1110 && reloads_conflict (i, j)
ce0e109b
RK
1111 && reg_classes_intersect_p (class,
1112 reload_reg_class[j]))
1113 {
1114 nongroup_need = 1;
1115 break;
1116 }
1117
32131a9c
RK
1118 /* Decide which time-of-use to count this reload for. */
1119 switch (reload_when_needed[i])
1120 {
1121 case RELOAD_OTHER:
8b3e912b 1122 this_needs = &insn_needs.other;
32131a9c 1123 break;
546b63fb 1124 case RELOAD_FOR_INPUT:
8b3e912b 1125 this_needs = &insn_needs.input;
32131a9c 1126 break;
546b63fb 1127 case RELOAD_FOR_OUTPUT:
8b3e912b 1128 this_needs = &insn_needs.output;
32131a9c 1129 break;
546b63fb 1130 case RELOAD_FOR_INSN:
8b3e912b 1131 this_needs = &insn_needs.insn;
546b63fb 1132 break;
546b63fb 1133 case RELOAD_FOR_OTHER_ADDRESS:
8b3e912b 1134 this_needs = &insn_needs.other_addr;
546b63fb 1135 break;
546b63fb 1136 case RELOAD_FOR_INPUT_ADDRESS:
8b3e912b 1137 this_needs = &insn_needs.in_addr[reload_opnum[i]];
546b63fb 1138 break;
546b63fb 1139 case RELOAD_FOR_OUTPUT_ADDRESS:
8b3e912b 1140 this_needs = &insn_needs.out_addr[reload_opnum[i]];
546b63fb 1141 break;
32131a9c 1142 case RELOAD_FOR_OPERAND_ADDRESS:
8b3e912b 1143 this_needs = &insn_needs.op_addr;
32131a9c 1144 break;
893bc853
RK
1145 case RELOAD_FOR_OPADDR_ADDR:
1146 this_needs = &insn_needs.op_addr_reload;
1147 break;
32131a9c
RK
1148 }
1149
32131a9c
RK
1150 if (size > 1)
1151 {
1152 enum machine_mode other_mode, allocate_mode;
1153
1154 /* Count number of groups needed separately from
1155 number of individual regs needed. */
8b3e912b 1156 this_needs->groups[(int) class]++;
e85ddd99 1157 p = reg_class_superclasses[(int) class];
32131a9c 1158 while (*p != LIM_REG_CLASSES)
8b3e912b 1159 this_needs->groups[(int) *p++]++;
32131a9c
RK
1160
1161 /* Record size and mode of a group of this class. */
1162 /* If more than one size group is needed,
1163 make all groups the largest needed size. */
e85ddd99 1164 if (group_size[(int) class] < size)
32131a9c 1165 {
e85ddd99 1166 other_mode = group_mode[(int) class];
32131a9c
RK
1167 allocate_mode = mode;
1168
e85ddd99
RK
1169 group_size[(int) class] = size;
1170 group_mode[(int) class] = mode;
32131a9c
RK
1171 }
1172 else
1173 {
1174 other_mode = mode;
e85ddd99 1175 allocate_mode = group_mode[(int) class];
32131a9c
RK
1176 }
1177
1178 /* Crash if two dissimilar machine modes both need
1179 groups of consecutive regs of the same class. */
1180
8b3e912b 1181 if (other_mode != VOIDmode && other_mode != allocate_mode
32131a9c 1182 && ! modes_equiv_for_class_p (allocate_mode,
8b3e912b 1183 other_mode, class))
a89b2cc4
MM
1184 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1185 insn);
32131a9c
RK
1186 }
1187 else if (size == 1)
1188 {
8b3e912b 1189 this_needs->regs[nongroup_need][(int) class] += 1;
e85ddd99 1190 p = reg_class_superclasses[(int) class];
32131a9c 1191 while (*p != LIM_REG_CLASSES)
8b3e912b 1192 this_needs->regs[nongroup_need][(int) *p++] += 1;
32131a9c
RK
1193 }
1194 else
1195 abort ();
1196 }
1197
1198 /* All reloads have been counted for this insn;
1199 now merge the various times of use.
1200 This sets insn_needs, etc., to the maximum total number
1201 of registers needed at any point in this insn. */
1202
1203 for (i = 0; i < N_REG_CLASSES; i++)
1204 {
546b63fb
RK
1205 int in_max, out_max;
1206
8b3e912b
RK
1207 /* Compute normal and nongroup needs. */
1208 for (j = 0; j <= 1; j++)
546b63fb 1209 {
8b3e912b
RK
1210 for (in_max = 0, out_max = 0, k = 0;
1211 k < reload_n_operands; k++)
1212 {
1213 in_max
1214 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1215 out_max
1216 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1217 }
546b63fb 1218
8b3e912b
RK
1219 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1220 and operand addresses but not things used to reload
1221 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1222 don't conflict with things needed to reload inputs or
1223 outputs. */
546b63fb 1224
893bc853
RK
1225 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1226 insn_needs.op_addr_reload.regs[j][i]),
1227 in_max);
1228
8b3e912b 1229 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
546b63fb 1230
8b3e912b
RK
1231 insn_needs.input.regs[j][i]
1232 = MAX (insn_needs.input.regs[j][i]
1233 + insn_needs.op_addr.regs[j][i]
1234 + insn_needs.insn.regs[j][i],
1235 in_max + insn_needs.input.regs[j][i]);
546b63fb 1236
8b3e912b
RK
1237 insn_needs.output.regs[j][i] += out_max;
1238 insn_needs.other.regs[j][i]
1239 += MAX (MAX (insn_needs.input.regs[j][i],
1240 insn_needs.output.regs[j][i]),
1241 insn_needs.other_addr.regs[j][i]);
546b63fb 1242
ce0e109b
RK
1243 }
1244
8b3e912b 1245 /* Now compute group needs. */
546b63fb
RK
1246 for (in_max = 0, out_max = 0, j = 0;
1247 j < reload_n_operands; j++)
1248 {
8b3e912b
RK
1249 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1250 out_max
1251 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
546b63fb
RK
1252 }
1253
893bc853
RK
1254 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1255 insn_needs.op_addr_reload.groups[i]),
1256 in_max);
8b3e912b 1257 out_max = MAX (out_max, insn_needs.insn.groups[i]);
546b63fb 1258
8b3e912b
RK
1259 insn_needs.input.groups[i]
1260 = MAX (insn_needs.input.groups[i]
1261 + insn_needs.op_addr.groups[i]
1262 + insn_needs.insn.groups[i],
1263 in_max + insn_needs.input.groups[i]);
546b63fb 1264
8b3e912b
RK
1265 insn_needs.output.groups[i] += out_max;
1266 insn_needs.other.groups[i]
1267 += MAX (MAX (insn_needs.input.groups[i],
1268 insn_needs.output.groups[i]),
1269 insn_needs.other_addr.groups[i]);
546b63fb
RK
1270 }
1271
a8efe40d
RK
1272 /* If this is a CALL_INSN and caller-saves will need
1273 a spill register, act as if the spill register is
1274 needed for this insn. However, the spill register
1275 can be used by any reload of this insn, so we only
1276 need do something if no need for that class has
a8fdc208 1277 been recorded.
a8efe40d
RK
1278
1279 The assumption that every CALL_INSN will trigger a
1280 caller-save is highly conservative, however, the number
1281 of cases where caller-saves will need a spill register but
1282 a block containing a CALL_INSN won't need a spill register
1283 of that class should be quite rare.
1284
1285 If a group is needed, the size and mode of the group will
d45cf215 1286 have been set up at the beginning of this loop. */
a8efe40d
RK
1287
1288 if (GET_CODE (insn) == CALL_INSN
1289 && caller_save_spill_class != NO_REGS)
1290 {
8b3e912b
RK
1291 /* See if this register would conflict with any reload
1292 that needs a group. */
1293 int nongroup_need = 0;
1294 int *caller_save_needs;
1295
1296 for (j = 0; j < n_reloads; j++)
1297 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1298 (GET_MODE_SIZE (reload_outmode[j])
1299 > GET_MODE_SIZE (reload_inmode[j]))
1300 ? reload_outmode[j]
1301 : reload_inmode[j])
1302 > 1)
1303 && reg_classes_intersect_p (caller_save_spill_class,
1304 reload_reg_class[j]))
1305 {
1306 nongroup_need = 1;
1307 break;
1308 }
1309
1310 caller_save_needs
1311 = (caller_save_group_size > 1
1312 ? insn_needs.other.groups
1313 : insn_needs.other.regs[nongroup_need]);
a8efe40d
RK
1314
1315 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1316 {
1317 register enum reg_class *p
1318 = reg_class_superclasses[(int) caller_save_spill_class];
1319
1320 caller_save_needs[(int) caller_save_spill_class]++;
1321
1322 while (*p != LIM_REG_CLASSES)
0aaa6af8 1323 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1324 }
1325
8b3e912b 1326 /* Show that this basic block will need a register of
d1c1397e
RS
1327 this class. */
1328
8b3e912b
RK
1329 if (global
1330 && ! (basic_block_needs[(int) caller_save_spill_class]
1331 [this_block]))
1332 {
1333 basic_block_needs[(int) caller_save_spill_class]
1334 [this_block] = 1;
1335 new_basic_block_needs = 1;
1336 }
a8efe40d
RK
1337 }
1338
32131a9c
RK
1339#ifdef SMALL_REGISTER_CLASSES
1340 /* If this insn stores the value of a function call,
1341 and that value is in a register that has been spilled,
1342 and if the insn needs a reload in a class
1343 that might use that register as the reload register,
1344 then add add an extra need in that class.
1345 This makes sure we have a register available that does
1346 not overlap the return value. */
8b3e912b 1347
32131a9c
RK
1348 if (avoid_return_reg)
1349 {
1350 int regno = REGNO (avoid_return_reg);
1351 int nregs
1352 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1353 int r;
546b63fb
RK
1354 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1355
1356 /* First compute the "basic needs", which counts a
1357 need only in the smallest class in which it
1358 is required. */
1359
9b232232
RK
1360 bcopy ((char *) insn_needs.other.regs[0],
1361 (char *) basic_needs, sizeof basic_needs);
1362 bcopy ((char *) insn_needs.other.groups,
1363 (char *) basic_groups, sizeof basic_groups);
546b63fb
RK
1364
1365 for (i = 0; i < N_REG_CLASSES; i++)
1366 {
1367 enum reg_class *p;
1368
1369 if (basic_needs[i] >= 0)
1370 for (p = reg_class_superclasses[i];
1371 *p != LIM_REG_CLASSES; p++)
1372 basic_needs[(int) *p] -= basic_needs[i];
1373
1374 if (basic_groups[i] >= 0)
1375 for (p = reg_class_superclasses[i];
1376 *p != LIM_REG_CLASSES; p++)
1377 basic_groups[(int) *p] -= basic_groups[i];
1378 }
1379
1380 /* Now count extra regs if there might be a conflict with
af432130 1381 the return value register. */
546b63fb 1382
32131a9c
RK
1383 for (r = regno; r < regno + nregs; r++)
1384 if (spill_reg_order[r] >= 0)
1385 for (i = 0; i < N_REG_CLASSES; i++)
1386 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1387 {
af432130 1388 if (basic_needs[i] > 0)
546b63fb
RK
1389 {
1390 enum reg_class *p;
1391
8b3e912b 1392 insn_needs.other.regs[0][i]++;
546b63fb
RK
1393 p = reg_class_superclasses[i];
1394 while (*p != LIM_REG_CLASSES)
8b3e912b 1395 insn_needs.other.regs[0][(int) *p++]++;
546b63fb 1396 }
af432130
RK
1397 if (basic_groups[i] > 0)
1398 {
1399 enum reg_class *p;
1400
1401 insn_needs.other.groups[i]++;
1402 p = reg_class_superclasses[i];
1403 while (*p != LIM_REG_CLASSES)
1404 insn_needs.other.groups[(int) *p++]++;
1405 }
32131a9c 1406 }
32131a9c
RK
1407 }
1408#endif /* SMALL_REGISTER_CLASSES */
1409
1410 /* For each class, collect maximum need of any insn. */
1411
1412 for (i = 0; i < N_REG_CLASSES; i++)
1413 {
8b3e912b 1414 if (max_needs[i] < insn_needs.other.regs[0][i])
5352b11a 1415 {
8b3e912b 1416 max_needs[i] = insn_needs.other.regs[0][i];
5352b11a
RS
1417 max_needs_insn[i] = insn;
1418 }
8b3e912b 1419 if (max_groups[i] < insn_needs.other.groups[i])
5352b11a 1420 {
8b3e912b 1421 max_groups[i] = insn_needs.other.groups[i];
5352b11a
RS
1422 max_groups_insn[i] = insn;
1423 }
8b3e912b 1424 if (max_nongroups[i] < insn_needs.other.regs[1][i])
ce0e109b 1425 {
8b3e912b 1426 max_nongroups[i] = insn_needs.other.regs[1][i];
ce0e109b
RK
1427 max_nongroups_insn[i] = insn;
1428 }
32131a9c
RK
1429 }
1430 }
1431 /* Note that there is a continue statement above. */
1432 }
1433
0dadecf6
RK
1434 /* If we allocated any new memory locations, make another pass
1435 since it might have changed elimination offsets. */
1436 if (starting_frame_size != get_frame_size ())
1437 something_changed = 1;
1438
e404a39a
RK
1439 if (dumpfile)
1440 for (i = 0; i < N_REG_CLASSES; i++)
1441 {
1442 if (max_needs[i] > 0)
1443 fprintf (dumpfile,
1444 ";; Need %d reg%s of class %s (for insn %d).\n",
1445 max_needs[i], max_needs[i] == 1 ? "" : "s",
1446 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1447 if (max_nongroups[i] > 0)
1448 fprintf (dumpfile,
1449 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1450 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1451 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1452 if (max_groups[i] > 0)
1453 fprintf (dumpfile,
1454 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1455 max_groups[i], max_groups[i] == 1 ? "" : "s",
1456 mode_name[(int) group_mode[i]],
1457 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1458 }
1459
d445b551 1460 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1461 will need a spill register. */
32131a9c 1462
d445b551 1463 if (caller_save_needed
a8efe40d
RK
1464 && ! setup_save_areas (&something_changed)
1465 && caller_save_spill_class == NO_REGS)
32131a9c 1466 {
a8efe40d
RK
1467 /* The class we will need depends on whether the machine
1468 supports the sum of two registers for an address; see
1469 find_address_reloads for details. */
1470
a8fdc208 1471 caller_save_spill_class
a8efe40d
RK
1472 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1473 caller_save_group_size
1474 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1475 something_changed = 1;
32131a9c
RK
1476 }
1477
5c23c401
RK
1478 /* See if anything that happened changes which eliminations are valid.
1479 For example, on the Sparc, whether or not the frame pointer can
1480 be eliminated can depend on what registers have been used. We need
1481 not check some conditions again (such as flag_omit_frame_pointer)
1482 since they can't have changed. */
1483
1484 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3ec2ea3e 1485 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5c23c401
RK
1486#ifdef ELIMINABLE_REGS
1487 || ! CAN_ELIMINATE (ep->from, ep->to)
1488#endif
1489 )
1490 ep->can_eliminate = 0;
1491
32131a9c
RK
1492 /* Look for the case where we have discovered that we can't replace
1493 register A with register B and that means that we will now be
1494 trying to replace register A with register C. This means we can
1495 no longer replace register C with register B and we need to disable
1496 such an elimination, if it exists. This occurs often with A == ap,
1497 B == sp, and C == fp. */
a8fdc208 1498
32131a9c
RK
1499 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1500 {
1501 struct elim_table *op;
1502 register int new_to = -1;
1503
1504 if (! ep->can_eliminate && ep->can_eliminate_previous)
1505 {
1506 /* Find the current elimination for ep->from, if there is a
1507 new one. */
1508 for (op = reg_eliminate;
1509 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1510 if (op->from == ep->from && op->can_eliminate)
1511 {
1512 new_to = op->to;
1513 break;
1514 }
1515
1516 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1517 disable it. */
1518 for (op = reg_eliminate;
1519 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1520 if (op->from == new_to && op->to == ep->to)
1521 op->can_eliminate = 0;
1522 }
1523 }
1524
1525 /* See if any registers that we thought we could eliminate the previous
1526 time are no longer eliminable. If so, something has changed and we
1527 must spill the register. Also, recompute the number of eliminable
1528 registers and see if the frame pointer is needed; it is if there is
1529 no elimination of the frame pointer that we can perform. */
1530
1531 frame_pointer_needed = 1;
1532 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1533 {
3ec2ea3e
DE
1534 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1535 && ep->to != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
1536 frame_pointer_needed = 0;
1537
1538 if (! ep->can_eliminate && ep->can_eliminate_previous)
1539 {
1540 ep->can_eliminate_previous = 0;
1541 spill_hard_reg (ep->from, global, dumpfile, 1);
32131a9c
RK
1542 something_changed = 1;
1543 num_eliminable--;
1544 }
1545 }
1546
9ff3516a
RK
1547#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1548 /* If we didn't need a frame pointer last time, but we do now, spill
1549 the hard frame pointer. */
1550 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1551 {
1552 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1553 something_changed = 1;
1554 }
1555#endif
1556
32131a9c
RK
1557 /* If all needs are met, we win. */
1558
1559 for (i = 0; i < N_REG_CLASSES; i++)
1560 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1561 break;
1562 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1563 break;
1564
546b63fb
RK
1565 /* Not all needs are met; must spill some hard regs. */
1566
1567 /* Put all registers spilled so far back in potential_reload_regs, but
1568 put them at the front, since we've already spilled most of the
9faa82d8 1569 pseudos in them (we might have left some pseudos unspilled if they
546b63fb
RK
1570 were in a block that didn't need any spill registers of a conflicting
1571 class. We used to try to mark off the need for those registers,
1572 but doing so properly is very complex and reallocating them is the
1573 simpler approach. First, "pack" potential_reload_regs by pushing
1574 any nonnegative entries towards the end. That will leave room
1575 for the registers we already spilled.
1576
1577 Also, undo the marking of the spill registers from the last time
1578 around in FORBIDDEN_REGS since we will be probably be allocating
1579 them again below.
1580
1581 ??? It is theoretically possible that we might end up not using one
1582 of our previously-spilled registers in this allocation, even though
1583 they are at the head of the list. It's not clear what to do about
1584 this, but it was no better before, when we marked off the needs met
1585 by the previously-spilled registers. With the current code, globals
1586 can be allocated into these registers, but locals cannot. */
1587
1588 if (n_spills)
1589 {
1590 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1591 if (potential_reload_regs[i] != -1)
1592 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1593
546b63fb
RK
1594 for (i = 0; i < n_spills; i++)
1595 {
1596 potential_reload_regs[i] = spill_regs[i];
1597 spill_reg_order[spill_regs[i]] = -1;
1598 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1599 }
32131a9c 1600
546b63fb
RK
1601 n_spills = 0;
1602 }
32131a9c
RK
1603
1604 /* Now find more reload regs to satisfy the remaining need
1605 Do it by ascending class number, since otherwise a reg
1606 might be spilled for a big class and might fail to count
1607 for a smaller class even though it belongs to that class.
1608
1609 Count spilled regs in `spills', and add entries to
1610 `spill_regs' and `spill_reg_order'.
1611
1612 ??? Note there is a problem here.
1613 When there is a need for a group in a high-numbered class,
1614 and also need for non-group regs that come from a lower class,
1615 the non-group regs are chosen first. If there aren't many regs,
1616 they might leave no room for a group.
1617
1618 This was happening on the 386. To fix it, we added the code
1619 that calls possible_group_p, so that the lower class won't
1620 break up the last possible group.
1621
1622 Really fixing the problem would require changes above
1623 in counting the regs already spilled, and in choose_reload_regs.
1624 It might be hard to avoid introducing bugs there. */
1625
546b63fb
RK
1626 CLEAR_HARD_REG_SET (counted_for_groups);
1627 CLEAR_HARD_REG_SET (counted_for_nongroups);
1628
32131a9c
RK
1629 for (class = 0; class < N_REG_CLASSES; class++)
1630 {
1631 /* First get the groups of registers.
1632 If we got single registers first, we might fragment
1633 possible groups. */
1634 while (max_groups[class] > 0)
1635 {
1636 /* If any single spilled regs happen to form groups,
1637 count them now. Maybe we don't really need
1638 to spill another group. */
066aca28
RK
1639 count_possible_groups (group_size, group_mode, max_groups,
1640 class);
32131a9c 1641
93193ab5
RK
1642 if (max_groups[class] <= 0)
1643 break;
1644
32131a9c
RK
1645 /* Groups of size 2 (the only groups used on most machines)
1646 are treated specially. */
1647 if (group_size[class] == 2)
1648 {
1649 /* First, look for a register that will complete a group. */
1650 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1651 {
32131a9c 1652 int other;
546b63fb
RK
1653
1654 j = potential_reload_regs[i];
32131a9c
RK
1655 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1656 &&
1657 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1658 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1659 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1660 && HARD_REGNO_MODE_OK (other, group_mode[class])
1661 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1662 other)
1663 /* We don't want one part of another group.
1664 We could get "two groups" that overlap! */
1665 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1666 ||
1667 (j < FIRST_PSEUDO_REGISTER - 1
1668 && (other = j + 1, spill_reg_order[other] >= 0)
1669 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1670 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1671 && HARD_REGNO_MODE_OK (j, group_mode[class])
1672 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1673 other)
1674 && ! TEST_HARD_REG_BIT (counted_for_groups,
1675 other))))
1676 {
1677 register enum reg_class *p;
1678
1679 /* We have found one that will complete a group,
1680 so count off one group as provided. */
1681 max_groups[class]--;
1682 p = reg_class_superclasses[class];
1683 while (*p != LIM_REG_CLASSES)
d601d5da
JW
1684 {
1685 if (group_size [(int) *p] <= group_size [class])
1686 max_groups[(int) *p]--;
1687 p++;
1688 }
32131a9c
RK
1689
1690 /* Indicate both these regs are part of a group. */
1691 SET_HARD_REG_BIT (counted_for_groups, j);
1692 SET_HARD_REG_BIT (counted_for_groups, other);
1693 break;
1694 }
1695 }
1696 /* We can't complete a group, so start one. */
92b0556d
RS
1697#ifdef SMALL_REGISTER_CLASSES
1698 /* Look for a pair neither of which is explicitly used. */
1699 if (i == FIRST_PSEUDO_REGISTER)
1700 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1701 {
1702 int k;
1703 j = potential_reload_regs[i];
1704 /* Verify that J+1 is a potential reload reg. */
1705 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1706 if (potential_reload_regs[k] == j + 1)
1707 break;
1708 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1709 && k < FIRST_PSEUDO_REGISTER
1710 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1711 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1712 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1713 && HARD_REGNO_MODE_OK (j, group_mode[class])
1714 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1715 j + 1)
1716 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1717 /* Reject J at this stage
1718 if J+1 was explicitly used. */
1719 && ! regs_explicitly_used[j + 1])
1720 break;
1721 }
1722#endif
1723 /* Now try any group at all
1724 whose registers are not in bad_spill_regs. */
32131a9c
RK
1725 if (i == FIRST_PSEUDO_REGISTER)
1726 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1727 {
57697575 1728 int k;
546b63fb 1729 j = potential_reload_regs[i];
57697575
RS
1730 /* Verify that J+1 is a potential reload reg. */
1731 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1732 if (potential_reload_regs[k] == j + 1)
1733 break;
32131a9c 1734 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1735 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1736 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1737 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1738 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1739 && HARD_REGNO_MODE_OK (j, group_mode[class])
1740 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1741 j + 1)
1742 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1743 break;
1744 }
1745
1746 /* I should be the index in potential_reload_regs
1747 of the new reload reg we have found. */
1748
5352b11a
RS
1749 if (i >= FIRST_PSEUDO_REGISTER)
1750 {
1751 /* There are no groups left to spill. */
1752 spill_failure (max_groups_insn[class]);
1753 failure = 1;
1754 goto failed;
1755 }
1756 else
1757 something_changed
fb3821f7 1758 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1759 global, dumpfile);
32131a9c
RK
1760 }
1761 else
1762 {
1763 /* For groups of more than 2 registers,
1764 look for a sufficient sequence of unspilled registers,
1765 and spill them all at once. */
1766 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1767 {
32131a9c 1768 int k;
546b63fb
RK
1769
1770 j = potential_reload_regs[i];
9d1a4667
RS
1771 if (j >= 0
1772 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1773 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1774 {
1775 /* Check each reg in the sequence. */
1776 for (k = 0; k < group_size[class]; k++)
1777 if (! (spill_reg_order[j + k] < 0
1778 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1779 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1780 break;
1781 /* We got a full sequence, so spill them all. */
1782 if (k == group_size[class])
1783 {
1784 register enum reg_class *p;
1785 for (k = 0; k < group_size[class]; k++)
1786 {
1787 int idx;
1788 SET_HARD_REG_BIT (counted_for_groups, j + k);
1789 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1790 if (potential_reload_regs[idx] == j + k)
1791 break;
9d1a4667
RS
1792 something_changed
1793 |= new_spill_reg (idx, class,
1794 max_needs, NULL_PTR,
1795 global, dumpfile);
32131a9c
RK
1796 }
1797
1798 /* We have found one that will complete a group,
1799 so count off one group as provided. */
1800 max_groups[class]--;
1801 p = reg_class_superclasses[class];
1802 while (*p != LIM_REG_CLASSES)
d601d5da
JW
1803 {
1804 if (group_size [(int) *p]
1805 <= group_size [class])
1806 max_groups[(int) *p]--;
1807 p++;
1808 }
32131a9c
RK
1809 break;
1810 }
1811 }
1812 }
fa52261e 1813 /* We couldn't find any registers for this reload.
9d1a4667
RS
1814 Avoid going into an infinite loop. */
1815 if (i >= FIRST_PSEUDO_REGISTER)
1816 {
1817 /* There are no groups left. */
1818 spill_failure (max_groups_insn[class]);
1819 failure = 1;
1820 goto failed;
1821 }
32131a9c
RK
1822 }
1823 }
1824
1825 /* Now similarly satisfy all need for single registers. */
1826
1827 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1828 {
9a6cde3a
RS
1829 /* If we spilled enough regs, but they weren't counted
1830 against the non-group need, see if we can count them now.
1831 If so, we can avoid some actual spilling. */
1832 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1833 for (i = 0; i < n_spills; i++)
1834 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1835 spill_regs[i])
1836 && !TEST_HARD_REG_BIT (counted_for_groups,
1837 spill_regs[i])
1838 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1839 spill_regs[i])
1840 && max_nongroups[class] > 0)
1841 {
1842 register enum reg_class *p;
1843
1844 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1845 max_nongroups[class]--;
1846 p = reg_class_superclasses[class];
1847 while (*p != LIM_REG_CLASSES)
1848 max_nongroups[(int) *p++]--;
1849 }
1850 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1851 break;
9a6cde3a 1852
32131a9c
RK
1853 /* Consider the potential reload regs that aren't
1854 yet in use as reload regs, in order of preference.
1855 Find the most preferred one that's in this class. */
1856
1857 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1858 if (potential_reload_regs[i] >= 0
1859 && TEST_HARD_REG_BIT (reg_class_contents[class],
1860 potential_reload_regs[i])
1861 /* If this reg will not be available for groups,
1862 pick one that does not foreclose possible groups.
1863 This is a kludge, and not very general,
1864 but it should be sufficient to make the 386 work,
1865 and the problem should not occur on machines with
1866 more registers. */
1867 && (max_nongroups[class] == 0
1868 || possible_group_p (potential_reload_regs[i], max_groups)))
1869 break;
1870
e404a39a
RK
1871 /* If we couldn't get a register, try to get one even if we
1872 might foreclose possible groups. This may cause problems
1873 later, but that's better than aborting now, since it is
1874 possible that we will, in fact, be able to form the needed
1875 group even with this allocation. */
1876
1877 if (i >= FIRST_PSEUDO_REGISTER
1878 && (asm_noperands (max_needs[class] > 0
1879 ? max_needs_insn[class]
1880 : max_nongroups_insn[class])
1881 < 0))
1882 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1883 if (potential_reload_regs[i] >= 0
1884 && TEST_HARD_REG_BIT (reg_class_contents[class],
1885 potential_reload_regs[i]))
1886 break;
1887
32131a9c
RK
1888 /* I should be the index in potential_reload_regs
1889 of the new reload reg we have found. */
1890
5352b11a
RS
1891 if (i >= FIRST_PSEUDO_REGISTER)
1892 {
1893 /* There are no possible registers left to spill. */
1894 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1895 : max_nongroups_insn[class]);
1896 failure = 1;
1897 goto failed;
1898 }
1899 else
1900 something_changed
1901 |= new_spill_reg (i, class, max_needs, max_nongroups,
1902 global, dumpfile);
32131a9c
RK
1903 }
1904 }
1905 }
1906
1907 /* If global-alloc was run, notify it of any register eliminations we have
1908 done. */
1909 if (global)
1910 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1911 if (ep->can_eliminate)
1912 mark_elimination (ep->from, ep->to);
1913
32131a9c 1914 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1915 around calls. Tell if what mode to use so that we will process
1916 those insns in reload_as_needed if we have to. */
32131a9c
RK
1917
1918 if (caller_save_needed)
a8efe40d
RK
1919 save_call_clobbered_regs (num_eliminable ? QImode
1920 : caller_save_spill_class != NO_REGS ? HImode
1921 : VOIDmode);
32131a9c
RK
1922
1923 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1924 If that insn didn't set the register (i.e., it copied the register to
1925 memory), just delete that insn instead of the equivalencing insn plus
1926 anything now dead. If we call delete_dead_insn on that insn, we may
1927 delete the insn that actually sets the register if the register die
1928 there and that is incorrect. */
1929
1930 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1931 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1932 && GET_CODE (reg_equiv_init[i]) != NOTE)
1933 {
1934 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1935 delete_dead_insn (reg_equiv_init[i]);
1936 else
1937 {
1938 PUT_CODE (reg_equiv_init[i], NOTE);
1939 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1940 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1941 }
1942 }
1943
1944 /* Use the reload registers where necessary
1945 by generating move instructions to move the must-be-register
1946 values into or out of the reload registers. */
1947
a8efe40d
RK
1948 if (something_needs_reloads || something_needs_elimination
1949 || (caller_save_needed && num_eliminable)
1950 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1951 reload_as_needed (first, global);
1952
2a1f8b6b 1953 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1954 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1955 virtue of being in a pseudo, that pseudo will be marked live
1956 and hence the frame pointer will be known to be live via that
1957 pseudo. */
1958
1959 if (! frame_pointer_needed)
1960 for (i = 0; i < n_basic_blocks; i++)
3ec2ea3e
DE
1961 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1962 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1963 % REGSET_ELT_BITS));
2a1f8b6b 1964
5352b11a
RS
1965 /* Come here (with failure set nonzero) if we can't get enough spill regs
1966 and we decide not to abort about it. */
1967 failed:
1968
a3ec87a8
RS
1969 reload_in_progress = 0;
1970
32131a9c
RK
1971 /* Now eliminate all pseudo regs by modifying them into
1972 their equivalent memory references.
1973 The REG-rtx's for the pseudos are modified in place,
1974 so all insns that used to refer to them now refer to memory.
1975
1976 For a reg that has a reg_equiv_address, all those insns
1977 were changed by reloading so that no insns refer to it any longer;
1978 but the DECL_RTL of a variable decl may refer to it,
1979 and if so this causes the debugging info to mention the variable. */
1980
1981 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1982 {
1983 rtx addr = 0;
ab1fd483 1984 int in_struct = 0;
32131a9c 1985 if (reg_equiv_mem[i])
ab1fd483
RS
1986 {
1987 addr = XEXP (reg_equiv_mem[i], 0);
1988 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1989 }
32131a9c
RK
1990 if (reg_equiv_address[i])
1991 addr = reg_equiv_address[i];
1992 if (addr)
1993 {
1994 if (reg_renumber[i] < 0)
1995 {
1996 rtx reg = regno_reg_rtx[i];
1997 XEXP (reg, 0) = addr;
1998 REG_USERVAR_P (reg) = 0;
ab1fd483 1999 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
2000 PUT_CODE (reg, MEM);
2001 }
2002 else if (reg_equiv_mem[i])
2003 XEXP (reg_equiv_mem[i], 0) = addr;
2004 }
2005 }
2006
2007#ifdef PRESERVE_DEATH_INFO_REGNO_P
2008 /* Make a pass over all the insns and remove death notes for things that
2009 are no longer registers or no longer die in the insn (e.g., an input
2010 and output pseudo being tied). */
2011
2012 for (insn = first; insn; insn = NEXT_INSN (insn))
2013 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2014 {
2015 rtx note, next;
2016
2017 for (note = REG_NOTES (insn); note; note = next)
2018 {
2019 next = XEXP (note, 1);
2020 if (REG_NOTE_KIND (note) == REG_DEAD
2021 && (GET_CODE (XEXP (note, 0)) != REG
2022 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2023 remove_note (insn, note);
2024 }
2025 }
2026#endif
2027
2028 /* Indicate that we no longer have known memory locations or constants. */
2029 reg_equiv_constant = 0;
2030 reg_equiv_memory_loc = 0;
5352b11a 2031
c8ab4464
RS
2032 if (scratch_list)
2033 free (scratch_list);
c307c237 2034 scratch_list = 0;
c8ab4464
RS
2035 if (scratch_block)
2036 free (scratch_block);
c307c237
RK
2037 scratch_block = 0;
2038
8b4f9969
JW
2039 CLEAR_HARD_REG_SET (used_spill_regs);
2040 for (i = 0; i < n_spills; i++)
2041 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2042
5352b11a 2043 return failure;
32131a9c
RK
2044}
2045\f
2046/* Nonzero if, after spilling reg REGNO for non-groups,
2047 it will still be possible to find a group if we still need one. */
2048
2049static int
2050possible_group_p (regno, max_groups)
2051 int regno;
2052 int *max_groups;
2053{
2054 int i;
2055 int class = (int) NO_REGS;
2056
2057 for (i = 0; i < (int) N_REG_CLASSES; i++)
2058 if (max_groups[i] > 0)
2059 {
2060 class = i;
2061 break;
2062 }
2063
2064 if (class == (int) NO_REGS)
2065 return 1;
2066
2067 /* Consider each pair of consecutive registers. */
2068 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2069 {
2070 /* Ignore pairs that include reg REGNO. */
2071 if (i == regno || i + 1 == regno)
2072 continue;
2073
2074 /* Ignore pairs that are outside the class that needs the group.
2075 ??? Here we fail to handle the case where two different classes
2076 independently need groups. But this never happens with our
2077 current machine descriptions. */
2078 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2079 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2080 continue;
2081
2082 /* A pair of consecutive regs we can still spill does the trick. */
2083 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2084 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2085 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2086 return 1;
2087
2088 /* A pair of one already spilled and one we can spill does it
2089 provided the one already spilled is not otherwise reserved. */
2090 if (spill_reg_order[i] < 0
2091 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2092 && spill_reg_order[i + 1] >= 0
2093 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2094 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2095 return 1;
2096 if (spill_reg_order[i + 1] < 0
2097 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2098 && spill_reg_order[i] >= 0
2099 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2100 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2101 return 1;
2102 }
2103
2104 return 0;
2105}
2106\f
066aca28
RK
2107/* Count any groups of CLASS that can be formed from the registers recently
2108 spilled. */
32131a9c
RK
2109
2110static void
066aca28 2111count_possible_groups (group_size, group_mode, max_groups, class)
546b63fb 2112 int *group_size;
32131a9c 2113 enum machine_mode *group_mode;
546b63fb 2114 int *max_groups;
066aca28 2115 int class;
32131a9c 2116{
066aca28
RK
2117 HARD_REG_SET new;
2118 int i, j;
2119
32131a9c
RK
2120 /* Now find all consecutive groups of spilled registers
2121 and mark each group off against the need for such groups.
2122 But don't count them against ordinary need, yet. */
2123
066aca28
RK
2124 if (group_size[class] == 0)
2125 return;
2126
2127 CLEAR_HARD_REG_SET (new);
2128
2129 /* Make a mask of all the regs that are spill regs in class I. */
2130 for (i = 0; i < n_spills; i++)
2131 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2132 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2133 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2134 SET_HARD_REG_BIT (new, spill_regs[i]);
2135
2136 /* Find each consecutive group of them. */
2137 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2138 if (TEST_HARD_REG_BIT (new, i)
2139 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2140 && HARD_REGNO_MODE_OK (i, group_mode[class]))
32131a9c 2141 {
066aca28
RK
2142 for (j = 1; j < group_size[class]; j++)
2143 if (! TEST_HARD_REG_BIT (new, i + j))
2144 break;
32131a9c 2145
066aca28
RK
2146 if (j == group_size[class])
2147 {
2148 /* We found a group. Mark it off against this class's need for
2149 groups, and against each superclass too. */
2150 register enum reg_class *p;
2151
2152 max_groups[class]--;
2153 p = reg_class_superclasses[class];
2154 while (*p != LIM_REG_CLASSES)
d601d5da
JW
2155 {
2156 if (group_size [(int) *p] <= group_size [class])
2157 max_groups[(int) *p]--;
2158 p++;
2159 }
066aca28
RK
2160
2161 /* Don't count these registers again. */
46a70e45 2162 for (j = 0; j < group_size[class]; j++)
066aca28
RK
2163 SET_HARD_REG_BIT (counted_for_groups, i + j);
2164 }
2165
2166 /* Skip to the last reg in this group. When i is incremented above,
2167 it will then point to the first reg of the next possible group. */
2168 i += j - 1;
2169 }
32131a9c
RK
2170}
2171\f
2172/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2173 another mode that needs to be reloaded for the same register class CLASS.
2174 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2175 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2176
2177 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2178 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2179 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2180 causes unnecessary failures on machines requiring alignment of register
2181 groups when the two modes are different sizes, because the larger mode has
2182 more strict alignment rules than the smaller mode. */
2183
2184static int
2185modes_equiv_for_class_p (allocate_mode, other_mode, class)
2186 enum machine_mode allocate_mode, other_mode;
2187 enum reg_class class;
2188{
2189 register int regno;
2190 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2191 {
2192 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2193 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2194 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2195 return 0;
2196 }
2197 return 1;
2198}
2199
5352b11a
RS
2200/* Handle the failure to find a register to spill.
2201 INSN should be one of the insns which needed this particular spill reg. */
2202
2203static void
2204spill_failure (insn)
2205 rtx insn;
2206{
2207 if (asm_noperands (PATTERN (insn)) >= 0)
2208 error_for_asm (insn, "`asm' needs too many reloads");
2209 else
a89b2cc4 2210 fatal_insn ("Unable to find a register to spill.", insn);
5352b11a
RS
2211}
2212
32131a9c
RK
2213/* Add a new register to the tables of available spill-registers
2214 (as well as spilling all pseudos allocated to the register).
2215 I is the index of this register in potential_reload_regs.
2216 CLASS is the regclass whose need is being satisfied.
2217 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2218 so that this register can count off against them.
2219 MAX_NONGROUPS is 0 if this register is part of a group.
2220 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2221
2222static int
2223new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2224 int i;
2225 int class;
2226 int *max_needs;
2227 int *max_nongroups;
2228 int global;
2229 FILE *dumpfile;
2230{
2231 register enum reg_class *p;
2232 int val;
2233 int regno = potential_reload_regs[i];
2234
2235 if (i >= FIRST_PSEUDO_REGISTER)
2236 abort (); /* Caller failed to find any register. */
2237
2238 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2239 fatal ("fixed or forbidden register was spilled.\n\
56f58d3a
RK
2240This may be due to a compiler bug or to impossible asm\n\
2241statements or clauses.");
32131a9c
RK
2242
2243 /* Make reg REGNO an additional reload reg. */
2244
2245 potential_reload_regs[i] = -1;
2246 spill_regs[n_spills] = regno;
2247 spill_reg_order[regno] = n_spills;
2248 if (dumpfile)
2249 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2250
2251 /* Clear off the needs we just satisfied. */
2252
2253 max_needs[class]--;
2254 p = reg_class_superclasses[class];
2255 while (*p != LIM_REG_CLASSES)
2256 max_needs[(int) *p++]--;
2257
2258 if (max_nongroups && max_nongroups[class] > 0)
2259 {
2260 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2261 max_nongroups[class]--;
2262 p = reg_class_superclasses[class];
2263 while (*p != LIM_REG_CLASSES)
2264 max_nongroups[(int) *p++]--;
2265 }
2266
2267 /* Spill every pseudo reg that was allocated to this reg
2268 or to something that overlaps this reg. */
2269
2270 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2271
2272 /* If there are some registers still to eliminate and this register
2273 wasn't ever used before, additional stack space may have to be
2274 allocated to store this register. Thus, we may have changed the offset
2275 between the stack and frame pointers, so mark that something has changed.
2276 (If new pseudos were spilled, thus requiring more space, VAL would have
2277 been set non-zero by the call to spill_hard_reg above since additional
2278 reloads may be needed in that case.
2279
2280 One might think that we need only set VAL to 1 if this is a call-used
2281 register. However, the set of registers that must be saved by the
2282 prologue is not identical to the call-used set. For example, the
2283 register used by the call insn for the return PC is a call-used register,
2284 but must be saved by the prologue. */
2285 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2286 val = 1;
2287
2288 regs_ever_live[spill_regs[n_spills]] = 1;
2289 n_spills++;
2290
2291 return val;
2292}
2293\f
2294/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2295 data that is dead in INSN. */
2296
2297static void
2298delete_dead_insn (insn)
2299 rtx insn;
2300{
2301 rtx prev = prev_real_insn (insn);
2302 rtx prev_dest;
2303
2304 /* If the previous insn sets a register that dies in our insn, delete it
2305 too. */
2306 if (prev && GET_CODE (PATTERN (prev)) == SET
2307 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2308 && reg_mentioned_p (prev_dest, PATTERN (insn))
2309 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2310 delete_dead_insn (prev);
2311
2312 PUT_CODE (insn, NOTE);
2313 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2314 NOTE_SOURCE_FILE (insn) = 0;
2315}
2316
2317/* Modify the home of pseudo-reg I.
2318 The new home is present in reg_renumber[I].
2319
2320 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2321 or it may be -1, meaning there is none or it is not relevant.
2322 This is used so that all pseudos spilled from a given hard reg
2323 can share one stack slot. */
2324
2325static void
2326alter_reg (i, from_reg)
2327 register int i;
2328 int from_reg;
2329{
2330 /* When outputting an inline function, this can happen
2331 for a reg that isn't actually used. */
2332 if (regno_reg_rtx[i] == 0)
2333 return;
2334
2335 /* If the reg got changed to a MEM at rtl-generation time,
2336 ignore it. */
2337 if (GET_CODE (regno_reg_rtx[i]) != REG)
2338 return;
2339
2340 /* Modify the reg-rtx to contain the new hard reg
2341 number or else to contain its pseudo reg number. */
2342 REGNO (regno_reg_rtx[i])
2343 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2344
2345 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2346 allocate a stack slot for it. */
2347
2348 if (reg_renumber[i] < 0
2349 && reg_n_refs[i] > 0
2350 && reg_equiv_constant[i] == 0
2351 && reg_equiv_memory_loc[i] == 0)
2352 {
2353 register rtx x;
2354 int inherent_size = PSEUDO_REGNO_BYTES (i);
2355 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2356 int adjust = 0;
2357
2358 /* Each pseudo reg has an inherent size which comes from its own mode,
2359 and a total size which provides room for paradoxical subregs
2360 which refer to the pseudo reg in wider modes.
2361
2362 We can use a slot already allocated if it provides both
2363 enough inherent space and enough total space.
2364 Otherwise, we allocate a new slot, making sure that it has no less
2365 inherent space, and no less total space, then the previous slot. */
2366 if (from_reg == -1)
2367 {
2368 /* No known place to spill from => no slot to reuse. */
2369 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
f76b9db2 2370 if (BYTES_BIG_ENDIAN)
02db8dd0
RK
2371 /* Cancel the big-endian correction done in assign_stack_local.
2372 Get the address of the beginning of the slot.
2373 This is so we can do a big-endian correction unconditionally
2374 below. */
2375 adjust = inherent_size - total_size;
2376
2377 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
32131a9c
RK
2378 }
2379 /* Reuse a stack slot if possible. */
2380 else if (spill_stack_slot[from_reg] != 0
2381 && spill_stack_slot_width[from_reg] >= total_size
2382 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2383 >= inherent_size))
2384 x = spill_stack_slot[from_reg];
2385 /* Allocate a bigger slot. */
2386 else
2387 {
2388 /* Compute maximum size needed, both for inherent size
2389 and for total size. */
2390 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
4f2d3674 2391 rtx stack_slot;
32131a9c
RK
2392 if (spill_stack_slot[from_reg])
2393 {
2394 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2395 > inherent_size)
2396 mode = GET_MODE (spill_stack_slot[from_reg]);
2397 if (spill_stack_slot_width[from_reg] > total_size)
2398 total_size = spill_stack_slot_width[from_reg];
2399 }
2400 /* Make a slot with that size. */
2401 x = assign_stack_local (mode, total_size, -1);
4f2d3674 2402 stack_slot = x;
f76b9db2
ILT
2403 if (BYTES_BIG_ENDIAN)
2404 {
2405 /* Cancel the big-endian correction done in assign_stack_local.
2406 Get the address of the beginning of the slot.
2407 This is so we can do a big-endian correction unconditionally
2408 below. */
2409 adjust = GET_MODE_SIZE (mode) - total_size;
4f2d3674 2410 if (adjust)
02db8dd0
RK
2411 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2412 * BITS_PER_UNIT,
2413 MODE_INT, 1),
2414 plus_constant (XEXP (x, 0), adjust));
f76b9db2 2415 }
4f2d3674 2416 spill_stack_slot[from_reg] = stack_slot;
32131a9c
RK
2417 spill_stack_slot_width[from_reg] = total_size;
2418 }
2419
32131a9c
RK
2420 /* On a big endian machine, the "address" of the slot
2421 is the address of the low part that fits its inherent mode. */
f76b9db2 2422 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
32131a9c 2423 adjust += (total_size - inherent_size);
32131a9c
RK
2424
2425 /* If we have any adjustment to make, or if the stack slot is the
2426 wrong mode, make a new stack slot. */
2427 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2428 {
2429 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2430 plus_constant (XEXP (x, 0), adjust));
2431 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2432 }
2433
2434 /* Save the stack slot for later. */
2435 reg_equiv_memory_loc[i] = x;
2436 }
2437}
2438
2439/* Mark the slots in regs_ever_live for the hard regs
2440 used by pseudo-reg number REGNO. */
2441
2442void
2443mark_home_live (regno)
2444 int regno;
2445{
2446 register int i, lim;
2447 i = reg_renumber[regno];
2448 if (i < 0)
2449 return;
2450 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2451 while (i < lim)
2452 regs_ever_live[i++] = 1;
2453}
c307c237
RK
2454
2455/* Mark the registers used in SCRATCH as being live. */
2456
2457static void
2458mark_scratch_live (scratch)
2459 rtx scratch;
2460{
2461 register int i;
2462 int regno = REGNO (scratch);
2463 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2464
2465 for (i = regno; i < lim; i++)
2466 regs_ever_live[i] = 1;
2467}
32131a9c
RK
2468\f
2469/* This function handles the tracking of elimination offsets around branches.
2470
2471 X is a piece of RTL being scanned.
2472
2473 INSN is the insn that it came from, if any.
2474
2475 INITIAL_P is non-zero if we are to set the offset to be the initial
2476 offset and zero if we are setting the offset of the label to be the
2477 current offset. */
2478
2479static void
2480set_label_offsets (x, insn, initial_p)
2481 rtx x;
2482 rtx insn;
2483 int initial_p;
2484{
2485 enum rtx_code code = GET_CODE (x);
2486 rtx tem;
2487 int i;
2488 struct elim_table *p;
2489
2490 switch (code)
2491 {
2492 case LABEL_REF:
8be386d9
RS
2493 if (LABEL_REF_NONLOCAL_P (x))
2494 return;
2495
32131a9c
RK
2496 x = XEXP (x, 0);
2497
2498 /* ... fall through ... */
2499
2500 case CODE_LABEL:
2501 /* If we know nothing about this label, set the desired offsets. Note
2502 that this sets the offset at a label to be the offset before a label
2503 if we don't know anything about the label. This is not correct for
2504 the label after a BARRIER, but is the best guess we can make. If
2505 we guessed wrong, we will suppress an elimination that might have
2506 been possible had we been able to guess correctly. */
2507
2508 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2509 {
2510 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2511 offsets_at[CODE_LABEL_NUMBER (x)][i]
2512 = (initial_p ? reg_eliminate[i].initial_offset
2513 : reg_eliminate[i].offset);
2514 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2515 }
2516
2517 /* Otherwise, if this is the definition of a label and it is
d45cf215 2518 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2519 that label. */
2520
2521 else if (x == insn
2522 && (tem = prev_nonnote_insn (insn)) != 0
2523 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2524 {
2525 num_not_at_initial_offset = 0;
2526 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2527 {
2528 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2529 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2530 if (reg_eliminate[i].can_eliminate
2531 && (reg_eliminate[i].offset
2532 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2533 num_not_at_initial_offset++;
2534 }
2535 }
32131a9c
RK
2536
2537 else
2538 /* If neither of the above cases is true, compare each offset
2539 with those previously recorded and suppress any eliminations
2540 where the offsets disagree. */
a8fdc208 2541
32131a9c
RK
2542 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2543 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2544 != (initial_p ? reg_eliminate[i].initial_offset
2545 : reg_eliminate[i].offset))
2546 reg_eliminate[i].can_eliminate = 0;
2547
2548 return;
2549
2550 case JUMP_INSN:
2551 set_label_offsets (PATTERN (insn), insn, initial_p);
2552
2553 /* ... fall through ... */
2554
2555 case INSN:
2556 case CALL_INSN:
2557 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2558 and hence must have all eliminations at their initial offsets. */
2559 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2560 if (REG_NOTE_KIND (tem) == REG_LABEL)
2561 set_label_offsets (XEXP (tem, 0), insn, 1);
2562 return;
2563
2564 case ADDR_VEC:
2565 case ADDR_DIFF_VEC:
2566 /* Each of the labels in the address vector must be at their initial
2567 offsets. We want the first first for ADDR_VEC and the second
2568 field for ADDR_DIFF_VEC. */
2569
2570 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2571 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2572 insn, initial_p);
2573 return;
2574
2575 case SET:
2576 /* We only care about setting PC. If the source is not RETURN,
2577 IF_THEN_ELSE, or a label, disable any eliminations not at
2578 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2579 isn't one of those possibilities. For branches to a label,
2580 call ourselves recursively.
2581
2582 Note that this can disable elimination unnecessarily when we have
2583 a non-local goto since it will look like a non-constant jump to
2584 someplace in the current function. This isn't a significant
2585 problem since such jumps will normally be when all elimination
2586 pairs are back to their initial offsets. */
2587
2588 if (SET_DEST (x) != pc_rtx)
2589 return;
2590
2591 switch (GET_CODE (SET_SRC (x)))
2592 {
2593 case PC:
2594 case RETURN:
2595 return;
2596
2597 case LABEL_REF:
2598 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2599 return;
2600
2601 case IF_THEN_ELSE:
2602 tem = XEXP (SET_SRC (x), 1);
2603 if (GET_CODE (tem) == LABEL_REF)
2604 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2605 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2606 break;
2607
2608 tem = XEXP (SET_SRC (x), 2);
2609 if (GET_CODE (tem) == LABEL_REF)
2610 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2611 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2612 break;
2613 return;
2614 }
2615
2616 /* If we reach here, all eliminations must be at their initial
2617 offset because we are doing a jump to a variable address. */
2618 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2619 if (p->offset != p->initial_offset)
2620 p->can_eliminate = 0;
2621 }
2622}
2623\f
2624/* Used for communication between the next two function to properly share
2625 the vector for an ASM_OPERANDS. */
2626
2627static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2628
a8fdc208 2629/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2630 replacement (such as sp), plus an offset.
2631
2632 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2633 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2634 MEM, we are allowed to replace a sum of a register and the constant zero
2635 with the register, which we cannot do outside a MEM. In addition, we need
2636 to record the fact that a register is referenced outside a MEM.
2637
ff32812a 2638 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2639 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2640 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2641 that the REG is being modified.
2642
ff32812a
RS
2643 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2644 That's used when we eliminate in expressions stored in notes.
2645 This means, do not set ref_outside_mem even if the reference
2646 is outside of MEMs.
2647
32131a9c
RK
2648 If we see a modification to a register we know about, take the
2649 appropriate action (see case SET, below).
2650
2651 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2652 replacements done assuming all offsets are at their initial values. If
2653 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2654 encounter, return the actual location so that find_reloads will do
2655 the proper thing. */
2656
2657rtx
2658eliminate_regs (x, mem_mode, insn)
2659 rtx x;
2660 enum machine_mode mem_mode;
2661 rtx insn;
2662{
2663 enum rtx_code code = GET_CODE (x);
2664 struct elim_table *ep;
2665 int regno;
2666 rtx new;
2667 int i, j;
2668 char *fmt;
2669 int copied = 0;
2670
2671 switch (code)
2672 {
2673 case CONST_INT:
2674 case CONST_DOUBLE:
2675 case CONST:
2676 case SYMBOL_REF:
2677 case CODE_LABEL:
2678 case PC:
2679 case CC0:
2680 case ASM_INPUT:
2681 case ADDR_VEC:
2682 case ADDR_DIFF_VEC:
2683 case RETURN:
2684 return x;
2685
2686 case REG:
2687 regno = REGNO (x);
2688
2689 /* First handle the case where we encounter a bare register that
2690 is eliminable. Replace it with a PLUS. */
2691 if (regno < FIRST_PSEUDO_REGISTER)
2692 {
2693 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2694 ep++)
2695 if (ep->from_rtx == x && ep->can_eliminate)
2696 {
ff32812a
RS
2697 if (! mem_mode
2698 /* Refs inside notes don't count for this purpose. */
fe089a90 2699 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2700 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2701 ep->ref_outside_mem = 1;
2702 return plus_constant (ep->to_rtx, ep->previous_offset);
2703 }
2704
2705 }
2706 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2707 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2708 {
2709 /* In this case, find_reloads would attempt to either use an
2710 incorrect address (if something is not at its initial offset)
2711 or substitute an replaced address into an insn (which loses
2712 if the offset is changed by some later action). So we simply
2713 return the replaced stack slot (assuming it is changed by
2714 elimination) and ignore the fact that this is actually a
2715 reference to the pseudo. Ensure we make a copy of the
2716 address in case it is shared. */
fb3821f7 2717 new = eliminate_regs (reg_equiv_memory_loc[regno],
e5687447 2718 mem_mode, insn);
32131a9c 2719 if (new != reg_equiv_memory_loc[regno])
208dffa5
RS
2720 {
2721 cannot_omit_stores[regno] = 1;
2722 return copy_rtx (new);
2723 }
32131a9c
RK
2724 }
2725 return x;
2726
2727 case PLUS:
2728 /* If this is the sum of an eliminable register and a constant, rework
2729 the sum. */
2730 if (GET_CODE (XEXP (x, 0)) == REG
2731 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2732 && CONSTANT_P (XEXP (x, 1)))
2733 {
2734 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2735 ep++)
2736 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2737 {
e5687447
JW
2738 if (! mem_mode
2739 /* Refs inside notes don't count for this purpose. */
2740 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2741 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2742 ep->ref_outside_mem = 1;
2743
2744 /* The only time we want to replace a PLUS with a REG (this
2745 occurs when the constant operand of the PLUS is the negative
2746 of the offset) is when we are inside a MEM. We won't want
2747 to do so at other times because that would change the
2748 structure of the insn in a way that reload can't handle.
2749 We special-case the commonest situation in
2750 eliminate_regs_in_insn, so just replace a PLUS with a
2751 PLUS here, unless inside a MEM. */
a23b64d5 2752 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2753 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2754 return ep->to_rtx;
2755 else
2756 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2757 plus_constant (XEXP (x, 1),
2758 ep->previous_offset));
2759 }
2760
2761 /* If the register is not eliminable, we are done since the other
2762 operand is a constant. */
2763 return x;
2764 }
2765
2766 /* If this is part of an address, we want to bring any constant to the
2767 outermost PLUS. We will do this by doing register replacement in
2768 our operands and seeing if a constant shows up in one of them.
2769
2770 We assume here this is part of an address (or a "load address" insn)
2771 since an eliminable register is not likely to appear in any other
2772 context.
2773
2774 If we have (plus (eliminable) (reg)), we want to produce
2775 (plus (plus (replacement) (reg) (const))). If this was part of a
2776 normal add insn, (plus (replacement) (reg)) will be pushed as a
2777 reload. This is the desired action. */
2778
2779 {
e5687447
JW
2780 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2781 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2782
2783 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2784 {
2785 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2786 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2787 we must replace the constant here since it may no longer
2788 be in the position of any operand. */
2789 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2790 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2791 && reg_renumber[REGNO (new1)] < 0
2792 && reg_equiv_constant != 0
2793 && reg_equiv_constant[REGNO (new1)] != 0)
2794 new1 = reg_equiv_constant[REGNO (new1)];
2795 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2796 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2797 && reg_renumber[REGNO (new0)] < 0
2798 && reg_equiv_constant[REGNO (new0)] != 0)
2799 new0 = reg_equiv_constant[REGNO (new0)];
2800
2801 new = form_sum (new0, new1);
2802
2803 /* As above, if we are not inside a MEM we do not want to
2804 turn a PLUS into something else. We might try to do so here
2805 for an addition of 0 if we aren't optimizing. */
2806 if (! mem_mode && GET_CODE (new) != PLUS)
2807 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2808 else
2809 return new;
2810 }
2811 }
2812 return x;
2813
981c7390
RK
2814 case MULT:
2815 /* If this is the product of an eliminable register and a
2816 constant, apply the distribute law and move the constant out
2817 so that we have (plus (mult ..) ..). This is needed in order
9faa82d8 2818 to keep load-address insns valid. This case is pathological.
981c7390
RK
2819 We ignore the possibility of overflow here. */
2820 if (GET_CODE (XEXP (x, 0)) == REG
2821 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2822 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2823 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2824 ep++)
2825 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2826 {
2827 if (! mem_mode
2828 /* Refs inside notes don't count for this purpose. */
2829 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2830 || GET_CODE (insn) == INSN_LIST)))
2831 ep->ref_outside_mem = 1;
2832
2833 return
2834 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2835 ep->previous_offset * INTVAL (XEXP (x, 1)));
2836 }
32131a9c
RK
2837
2838 /* ... fall through ... */
2839
32131a9c
RK
2840 case CALL:
2841 case COMPARE:
2842 case MINUS:
32131a9c
RK
2843 case DIV: case UDIV:
2844 case MOD: case UMOD:
2845 case AND: case IOR: case XOR:
45620ed4
RK
2846 case ROTATERT: case ROTATE:
2847 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
2848 case NE: case EQ:
2849 case GE: case GT: case GEU: case GTU:
2850 case LE: case LT: case LEU: case LTU:
2851 {
e5687447 2852 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2853 rtx new1
e5687447 2854 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2855
2856 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2857 return gen_rtx (code, GET_MODE (x), new0, new1);
2858 }
2859 return x;
2860
981c7390
RK
2861 case EXPR_LIST:
2862 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2863 if (XEXP (x, 0))
2864 {
2865 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2866 if (new != XEXP (x, 0))
2867 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2868 }
2869
2870 /* ... fall through ... */
2871
2872 case INSN_LIST:
2873 /* Now do eliminations in the rest of the chain. If this was
2874 an EXPR_LIST, this might result in allocating more memory than is
2875 strictly needed, but it simplifies the code. */
2876 if (XEXP (x, 1))
2877 {
2878 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2879 if (new != XEXP (x, 1))
2880 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2881 }
2882 return x;
2883
32131a9c
RK
2884 case PRE_INC:
2885 case POST_INC:
2886 case PRE_DEC:
2887 case POST_DEC:
2888 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2889 if (ep->to_rtx == XEXP (x, 0))
2890 {
4c05b187
RK
2891 int size = GET_MODE_SIZE (mem_mode);
2892
2893 /* If more bytes than MEM_MODE are pushed, account for them. */
2894#ifdef PUSH_ROUNDING
2895 if (ep->to_rtx == stack_pointer_rtx)
2896 size = PUSH_ROUNDING (size);
2897#endif
32131a9c 2898 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2899 ep->offset += size;
32131a9c 2900 else
4c05b187 2901 ep->offset -= size;
32131a9c
RK
2902 }
2903
2904 /* Fall through to generic unary operation case. */
32131a9c
RK
2905 case STRICT_LOW_PART:
2906 case NEG: case NOT:
2907 case SIGN_EXTEND: case ZERO_EXTEND:
2908 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2909 case FLOAT: case FIX:
2910 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2911 case ABS:
2912 case SQRT:
2913 case FFS:
e5687447 2914 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2915 if (new != XEXP (x, 0))
2916 return gen_rtx (code, GET_MODE (x), new);
2917 return x;
2918
2919 case SUBREG:
2920 /* Similar to above processing, but preserve SUBREG_WORD.
2921 Convert (subreg (mem)) to (mem) if not paradoxical.
2922 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2923 pseudo didn't get a hard reg, we must replace this with the
2924 eliminated version of the memory location because push_reloads
2925 may do the replacement in certain circumstances. */
2926 if (GET_CODE (SUBREG_REG (x)) == REG
2927 && (GET_MODE_SIZE (GET_MODE (x))
2928 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2929 && reg_equiv_memory_loc != 0
2930 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2931 {
2932 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
e5687447 2933 mem_mode, insn);
32131a9c
RK
2934
2935 /* If we didn't change anything, we must retain the pseudo. */
2936 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
59e2c378 2937 new = SUBREG_REG (x);
32131a9c 2938 else
59e2c378
RK
2939 {
2940 /* Otherwise, ensure NEW isn't shared in case we have to reload
2941 it. */
2942 new = copy_rtx (new);
2943
2944 /* In this case, we must show that the pseudo is used in this
2945 insn so that delete_output_reload will do the right thing. */
2946 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2947 && GET_CODE (insn) != INSN_LIST)
2948 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2949 insn);
2950 }
32131a9c
RK
2951 }
2952 else
e5687447 2953 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
2954
2955 if (new != XEXP (x, 0))
2956 {
2957 if (GET_CODE (new) == MEM
2958 && (GET_MODE_SIZE (GET_MODE (x))
a3b75c07 2959 <= GET_MODE_SIZE (GET_MODE (new)))
e90d3cbb 2960#ifdef LOAD_EXTEND_OP
a3b75c07
RS
2961 /* On these machines we will be reloading what is
2962 inside the SUBREG if it originally was a pseudo and
2963 the inner and outer modes are both a word or
2964 smaller. So leave the SUBREG then. */
2965 && ! (GET_CODE (SUBREG_REG (x)) == REG
2966 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
fc4a0dca
JW
2967 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2968 && (GET_MODE_SIZE (GET_MODE (x))
2969 > GET_MODE_SIZE (GET_MODE (new)))
2970 && INTEGRAL_MODE_P (GET_MODE (new))
2971 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
a3b75c07
RS
2972#endif
2973 )
32131a9c
RK
2974 {
2975 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2976 enum machine_mode mode = GET_MODE (x);
2977
f76b9db2
ILT
2978 if (BYTES_BIG_ENDIAN)
2979 offset += (MIN (UNITS_PER_WORD,
2980 GET_MODE_SIZE (GET_MODE (new)))
2981 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
32131a9c
RK
2982
2983 PUT_MODE (new, mode);
2984 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2985 return new;
2986 }
2987 else
2988 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2989 }
2990
2991 return x;
2992
94714ecc
RK
2993 case USE:
2994 /* If using a register that is the source of an eliminate we still
2995 think can be performed, note it cannot be performed since we don't
2996 know how this register is used. */
2997 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2998 if (ep->from_rtx == XEXP (x, 0))
2999 ep->can_eliminate = 0;
3000
3001 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3002 if (new != XEXP (x, 0))
3003 return gen_rtx (code, GET_MODE (x), new);
3004 return x;
3005
32131a9c
RK
3006 case CLOBBER:
3007 /* If clobbering a register that is the replacement register for an
d45cf215 3008 elimination we still think can be performed, note that it cannot
32131a9c
RK
3009 be performed. Otherwise, we need not be concerned about it. */
3010 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3011 if (ep->to_rtx == XEXP (x, 0))
3012 ep->can_eliminate = 0;
3013
e5687447 3014 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c
JVA
3015 if (new != XEXP (x, 0))
3016 return gen_rtx (code, GET_MODE (x), new);
32131a9c
RK
3017 return x;
3018
3019 case ASM_OPERANDS:
3020 {
3021 rtx *temp_vec;
3022 /* Properly handle sharing input and constraint vectors. */
3023 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3024 {
3025 /* When we come to a new vector not seen before,
3026 scan all its elements; keep the old vector if none
3027 of them changes; otherwise, make a copy. */
3028 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3029 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3030 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3031 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
e5687447 3032 mem_mode, insn);
32131a9c
RK
3033
3034 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3035 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3036 break;
3037
3038 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3039 new_asm_operands_vec = old_asm_operands_vec;
3040 else
3041 new_asm_operands_vec
3042 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3043 }
3044
3045 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3046 if (new_asm_operands_vec == old_asm_operands_vec)
3047 return x;
3048
3049 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3050 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3051 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3052 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3053 ASM_OPERANDS_SOURCE_FILE (x),
3054 ASM_OPERANDS_SOURCE_LINE (x));
3055 new->volatil = x->volatil;
3056 return new;
3057 }
3058
3059 case SET:
3060 /* Check for setting a register that we know about. */
3061 if (GET_CODE (SET_DEST (x)) == REG)
3062 {
3063 /* See if this is setting the replacement register for an
a8fdc208 3064 elimination.
32131a9c 3065
3ec2ea3e
DE
3066 If DEST is the hard frame pointer, we do nothing because we
3067 assume that all assignments to the frame pointer are for
3068 non-local gotos and are being done at a time when they are valid
3069 and do not disturb anything else. Some machines want to
3070 eliminate a fake argument pointer (or even a fake frame pointer)
3071 with either the real frame or the stack pointer. Assignments to
3072 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
3073
3074 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3075 ep++)
3076 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 3077 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 3078 {
6dc42e49 3079 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
3080 this elimination can't be done. */
3081 rtx src = SET_SRC (x);
3082
3083 if (GET_CODE (src) == PLUS
3084 && XEXP (src, 0) == SET_DEST (x)
3085 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3086 ep->offset -= INTVAL (XEXP (src, 1));
3087 else
3088 ep->can_eliminate = 0;
3089 }
3090
3091 /* Now check to see we are assigning to a register that can be
3092 eliminated. If so, it must be as part of a PARALLEL, since we
3093 will not have been called if this is a single SET. So indicate
3094 that we can no longer eliminate this reg. */
3095 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3096 ep++)
3097 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3098 ep->can_eliminate = 0;
3099 }
3100
3101 /* Now avoid the loop below in this common case. */
3102 {
e5687447
JW
3103 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3104 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3105
ff32812a 3106 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3107 write a CLOBBER insn. */
3108 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3109 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3110 && GET_CODE (insn) != INSN_LIST)
32131a9c
RK
3111 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3112
3113 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3114 return gen_rtx (SET, VOIDmode, new0, new1);
3115 }
3116
3117 return x;
3118
3119 case MEM:
3120 /* Our only special processing is to pass the mode of the MEM to our
3121 recursive call and copy the flags. While we are here, handle this
3122 case more efficiently. */
e5687447 3123 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3124 if (new != XEXP (x, 0))
3125 {
3126 new = gen_rtx (MEM, GET_MODE (x), new);
3127 new->volatil = x->volatil;
3128 new->unchanging = x->unchanging;
3129 new->in_struct = x->in_struct;
3130 return new;
3131 }
3132 else
3133 return x;
3134 }
3135
3136 /* Process each of our operands recursively. If any have changed, make a
3137 copy of the rtx. */
3138 fmt = GET_RTX_FORMAT (code);
3139 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3140 {
3141 if (*fmt == 'e')
3142 {
e5687447 3143 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3144 if (new != XEXP (x, i) && ! copied)
3145 {
3146 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3147 bcopy ((char *) x, (char *) new_x,
3148 (sizeof (*new_x) - sizeof (new_x->fld)
3149 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
32131a9c
RK
3150 x = new_x;
3151 copied = 1;
3152 }
3153 XEXP (x, i) = new;
3154 }
3155 else if (*fmt == 'E')
3156 {
3157 int copied_vec = 0;
3158 for (j = 0; j < XVECLEN (x, i); j++)
3159 {
3160 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3161 if (new != XVECEXP (x, i, j) && ! copied_vec)
3162 {
3163 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3164 &XVECEXP (x, i, 0));
3165 if (! copied)
3166 {
3167 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3168 bcopy ((char *) x, (char *) new_x,
3169 (sizeof (*new_x) - sizeof (new_x->fld)
3170 + (sizeof (new_x->fld[0])
3171 * GET_RTX_LENGTH (code))));
32131a9c
RK
3172 x = new_x;
3173 copied = 1;
3174 }
3175 XVEC (x, i) = new_v;
3176 copied_vec = 1;
3177 }
3178 XVECEXP (x, i, j) = new;
3179 }
3180 }
3181 }
3182
3183 return x;
3184}
3185\f
3186/* Scan INSN and eliminate all eliminable registers in it.
3187
3188 If REPLACE is nonzero, do the replacement destructively. Also
3189 delete the insn as dead it if it is setting an eliminable register.
3190
3191 If REPLACE is zero, do all our allocations in reload_obstack.
3192
3193 If no eliminations were done and this insn doesn't require any elimination
3194 processing (these are not identical conditions: it might be updating sp,
3195 but not referencing fp; this needs to be seen during reload_as_needed so
3196 that the offset between fp and sp can be taken into consideration), zero
3197 is returned. Otherwise, 1 is returned. */
3198
3199static int
3200eliminate_regs_in_insn (insn, replace)
3201 rtx insn;
3202 int replace;
3203{
3204 rtx old_body = PATTERN (insn);
774672d2 3205 rtx old_set = single_set (insn);
32131a9c
RK
3206 rtx new_body;
3207 int val = 0;
3208 struct elim_table *ep;
3209
3210 if (! replace)
3211 push_obstacks (&reload_obstack, &reload_obstack);
3212
774672d2
RK
3213 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3214 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
3215 {
3216 /* Check for setting an eliminable register. */
3217 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
774672d2 3218 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
32131a9c 3219 {
dd1eab0a
RK
3220#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3221 /* If this is setting the frame pointer register to the
3222 hardware frame pointer register and this is an elimination
3223 that will be done (tested above), this insn is really
3224 adjusting the frame pointer downward to compensate for
3225 the adjustment done before a nonlocal goto. */
3226 if (ep->from == FRAME_POINTER_REGNUM
3227 && ep->to == HARD_FRAME_POINTER_REGNUM)
3228 {
3229 rtx src = SET_SRC (old_set);
3230 int offset, ok = 0;
3231
3232 if (src == ep->to_rtx)
3233 offset = 0, ok = 1;
3234 else if (GET_CODE (src) == PLUS
3235 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3236 offset = INTVAL (XEXP (src, 0)), ok = 1;
3237
3238 if (ok)
3239 {
3240 if (replace)
3241 {
3242 rtx src
3243 = plus_constant (ep->to_rtx, offset - ep->offset);
3244
3245 /* First see if this insn remains valid when we
3246 make the change. If not, keep the INSN_CODE
3247 the same and let reload fit it up. */
3248 validate_change (insn, &SET_SRC (old_set), src, 1);
3249 validate_change (insn, &SET_DEST (old_set),
3250 ep->to_rtx, 1);
3251 if (! apply_change_group ())
3252 {
3253 SET_SRC (old_set) = src;
3254 SET_DEST (old_set) = ep->to_rtx;
3255 }
3256 }
3257
3258 val = 1;
3259 goto done;
3260 }
3261 }
3262#endif
3263
32131a9c
RK
3264 /* In this case this insn isn't serving a useful purpose. We
3265 will delete it in reload_as_needed once we know that this
3266 elimination is, in fact, being done.
3267
abc95ed3 3268 If REPLACE isn't set, we can't delete this insn, but needn't
32131a9c
RK
3269 process it since it won't be used unless something changes. */
3270 if (replace)
3271 delete_dead_insn (insn);
3272 val = 1;
3273 goto done;
3274 }
3275
3276 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3277 in the insn is the negative of the offset in FROM. Substitute
3278 (set (reg) (reg to)) for the insn and change its code.
3279
3280 We have to do this here, rather than in eliminate_regs, do that we can
3281 change the insn code. */
3282
774672d2
RK
3283 if (GET_CODE (SET_SRC (old_set)) == PLUS
3284 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3285 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
32131a9c
RK
3286 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3287 ep++)
774672d2 3288 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
922d9d40 3289 && ep->can_eliminate)
32131a9c 3290 {
922d9d40
RK
3291 /* We must stop at the first elimination that will be used.
3292 If this one would replace the PLUS with a REG, do it
3293 now. Otherwise, quit the loop and let eliminate_regs
3294 do its normal replacement. */
774672d2 3295 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
922d9d40 3296 {
774672d2
RK
3297 /* We assume here that we don't need a PARALLEL of
3298 any CLOBBERs for this assignment. There's not
3299 much we can do if we do need it. */
922d9d40 3300 PATTERN (insn) = gen_rtx (SET, VOIDmode,
774672d2 3301 SET_DEST (old_set), ep->to_rtx);
922d9d40
RK
3302 INSN_CODE (insn) = -1;
3303 val = 1;
3304 goto done;
3305 }
3306
3307 break;
32131a9c
RK
3308 }
3309 }
3310
3311 old_asm_operands_vec = 0;
3312
3313 /* Replace the body of this insn with a substituted form. If we changed
05b4c365 3314 something, return non-zero.
32131a9c
RK
3315
3316 If we are replacing a body that was a (set X (plus Y Z)), try to
3317 re-recognize the insn. We do this in case we had a simple addition
3318 but now can do this as a load-address. This saves an insn in this
3319 common case. */
3320
fb3821f7 3321 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3322 if (new_body != old_body)
3323 {
7c791b13
RK
3324 /* If we aren't replacing things permanently and we changed something,
3325 make another copy to ensure that all the RTL is new. Otherwise
3326 things can go wrong if find_reload swaps commutative operands
3327 and one is inside RTL that has been copied while the other is not. */
3328
4d411872
RS
3329 /* Don't copy an asm_operands because (1) there's no need and (2)
3330 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3331 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3332 new_body = copy_rtx (new_body);
3333
774672d2
RK
3334 /* If we had a move insn but now we don't, rerecognize it. This will
3335 cause spurious re-recognition if the old move had a PARALLEL since
3336 the new one still will, but we can't call single_set without
3337 having put NEW_BODY into the insn and the re-recognition won't
3338 hurt in this rare case. */
3339 if (old_set != 0
3340 && ((GET_CODE (SET_SRC (old_set)) == REG
3341 && (GET_CODE (new_body) != SET
3342 || GET_CODE (SET_SRC (new_body)) != REG))
3343 /* If this was a load from or store to memory, compare
3344 the MEM in recog_operand to the one in the insn. If they
3345 are not equal, then rerecognize the insn. */
3346 || (old_set != 0
3347 && ((GET_CODE (SET_SRC (old_set)) == MEM
3348 && SET_SRC (old_set) != recog_operand[1])
3349 || (GET_CODE (SET_DEST (old_set)) == MEM
3350 && SET_DEST (old_set) != recog_operand[0])))
3351 /* If this was an add insn before, rerecognize. */
3352 || GET_CODE (SET_SRC (old_set)) == PLUS))
4a5d0fb5
RS
3353 {
3354 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3355 /* If recognition fails, store the new body anyway.
3356 It's normal to have recognition failures here
3357 due to bizarre memory addresses; reloading will fix them. */
3358 PATTERN (insn) = new_body;
4a5d0fb5 3359 }
0ba846c7 3360 else
32131a9c
RK
3361 PATTERN (insn) = new_body;
3362
32131a9c
RK
3363 val = 1;
3364 }
a8fdc208 3365
32131a9c
RK
3366 /* Loop through all elimination pairs. See if any have changed and
3367 recalculate the number not at initial offset.
3368
a8efe40d
RK
3369 Compute the maximum offset (minimum offset if the stack does not
3370 grow downward) for each elimination pair.
3371
32131a9c
RK
3372 We also detect a cases where register elimination cannot be done,
3373 namely, if a register would be both changed and referenced outside a MEM
3374 in the resulting insn since such an insn is often undefined and, even if
3375 not, we cannot know what meaning will be given to it. Note that it is
3376 valid to have a register used in an address in an insn that changes it
3377 (presumably with a pre- or post-increment or decrement).
3378
3379 If anything changes, return nonzero. */
3380
3381 num_not_at_initial_offset = 0;
3382 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3383 {
3384 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3385 ep->can_eliminate = 0;
3386
3387 ep->ref_outside_mem = 0;
3388
3389 if (ep->previous_offset != ep->offset)
3390 val = 1;
3391
3392 ep->previous_offset = ep->offset;
3393 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3394 num_not_at_initial_offset++;
a8efe40d
RK
3395
3396#ifdef STACK_GROWS_DOWNWARD
3397 ep->max_offset = MAX (ep->max_offset, ep->offset);
3398#else
3399 ep->max_offset = MIN (ep->max_offset, ep->offset);
3400#endif
32131a9c
RK
3401 }
3402
3403 done:
9faa82d8 3404 /* If we changed something, perform elimination in REG_NOTES. This is
05b4c365
RK
3405 needed even when REPLACE is zero because a REG_DEAD note might refer
3406 to a register that we eliminate and could cause a different number
3407 of spill registers to be needed in the final reload pass than in
3408 the pre-passes. */
20748cab 3409 if (val && REG_NOTES (insn) != 0)
ff32812a 3410 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3411
32131a9c
RK
3412 if (! replace)
3413 pop_obstacks ();
3414
3415 return val;
3416}
3417
3418/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3419 replacement we currently believe is valid, mark it as not eliminable if X
3420 modifies DEST in any way other than by adding a constant integer to it.
3421
3422 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3423 all assignments to the hard frame pointer are nonlocal gotos and are being
3424 done at a time when they are valid and do not disturb anything else.
32131a9c 3425 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3426 frame or stack pointer. Assignments to the hard frame pointer must not
3427 prevent this elimination.
32131a9c
RK
3428
3429 Called via note_stores from reload before starting its passes to scan
3430 the insns of the function. */
3431
3432static void
3433mark_not_eliminable (dest, x)
3434 rtx dest;
3435 rtx x;
3436{
3437 register int i;
3438
3439 /* A SUBREG of a hard register here is just changing its mode. We should
3440 not see a SUBREG of an eliminable hard register, but check just in
3441 case. */
3442 if (GET_CODE (dest) == SUBREG)
3443 dest = SUBREG_REG (dest);
3444
3ec2ea3e 3445 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3446 return;
3447
3448 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3449 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3450 && (GET_CODE (x) != SET
3451 || GET_CODE (SET_SRC (x)) != PLUS
3452 || XEXP (SET_SRC (x), 0) != dest
3453 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3454 {
3455 reg_eliminate[i].can_eliminate_previous
3456 = reg_eliminate[i].can_eliminate = 0;
3457 num_eliminable--;
3458 }
3459}
3460\f
3461/* Kick all pseudos out of hard register REGNO.
3462 If GLOBAL is nonzero, try to find someplace else to put them.
3463 If DUMPFILE is nonzero, log actions taken on that file.
3464
3465 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3466 because we found we can't eliminate some register. In the case, no pseudos
3467 are allowed to be in the register, even if they are only in a block that
3468 doesn't require spill registers, unlike the case when we are spilling this
3469 hard reg to produce another spill register.
3470
3471 Return nonzero if any pseudos needed to be kicked out. */
3472
3473static int
3474spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3475 register int regno;
3476 int global;
3477 FILE *dumpfile;
3478 int cant_eliminate;
3479{
c307c237 3480 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3481 int something_changed = 0;
3482 register int i;
3483
3484 SET_HARD_REG_BIT (forbidden_regs, regno);
3485
9ff3516a
RK
3486 if (cant_eliminate)
3487 regs_ever_live[regno] = 1;
3488
32131a9c
RK
3489 /* Spill every pseudo reg that was allocated to this reg
3490 or to something that overlaps this reg. */
3491
3492 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3493 if (reg_renumber[i] >= 0
3494 && reg_renumber[i] <= regno
a8fdc208 3495 && (reg_renumber[i]
32131a9c
RK
3496 + HARD_REGNO_NREGS (reg_renumber[i],
3497 PSEUDO_REGNO_MODE (i))
3498 > regno))
3499 {
32131a9c
RK
3500 /* If this register belongs solely to a basic block which needed no
3501 spilling of any class that this register is contained in,
3502 leave it be, unless we are spilling this register because
3503 it was a hard register that can't be eliminated. */
3504
3505 if (! cant_eliminate
3506 && basic_block_needs[0]
3507 && reg_basic_block[i] >= 0
3508 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3509 {
3510 enum reg_class *p;
3511
3512 for (p = reg_class_superclasses[(int) class];
3513 *p != LIM_REG_CLASSES; p++)
3514 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3515 break;
a8fdc208 3516
32131a9c
RK
3517 if (*p == LIM_REG_CLASSES)
3518 continue;
3519 }
3520
3521 /* Mark it as no longer having a hard register home. */
3522 reg_renumber[i] = -1;
3523 /* We will need to scan everything again. */
3524 something_changed = 1;
3525 if (global)
2c5d9e37 3526 retry_global_alloc (i, forbidden_regs);
32131a9c
RK
3527
3528 alter_reg (i, regno);
3529 if (dumpfile)
3530 {
3531 if (reg_renumber[i] == -1)
3532 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3533 else
3534 fprintf (dumpfile, " Register %d now in %d.\n\n",
3535 i, reg_renumber[i]);
3536 }
3537 }
c307c237
RK
3538 for (i = 0; i < scratch_list_length; i++)
3539 {
3540 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3541 {
3542 if (! cant_eliminate && basic_block_needs[0]
3543 && ! basic_block_needs[(int) class][scratch_block[i]])
3544 {
3545 enum reg_class *p;
3546
3547 for (p = reg_class_superclasses[(int) class];
3548 *p != LIM_REG_CLASSES; p++)
3549 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3550 break;
3551
3552 if (*p == LIM_REG_CLASSES)
3553 continue;
3554 }
3555 PUT_CODE (scratch_list[i], SCRATCH);
3556 scratch_list[i] = 0;
3557 something_changed = 1;
3558 continue;
3559 }
3560 }
32131a9c
RK
3561
3562 return something_changed;
3563}
3564\f
56f58d3a
RK
3565/* Find all paradoxical subregs within X and update reg_max_ref_width.
3566 Also mark any hard registers used to store user variables as
3567 forbidden from being used for spill registers. */
32131a9c
RK
3568
3569static void
3570scan_paradoxical_subregs (x)
3571 register rtx x;
3572{
3573 register int i;
3574 register char *fmt;
3575 register enum rtx_code code = GET_CODE (x);
3576
3577 switch (code)
3578 {
56f58d3a
RK
3579 case REG:
3580#ifdef SMALL_REGISTER_CLASSES
3581 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3582 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3583#endif
3584 return;
3585
32131a9c
RK
3586 case CONST_INT:
3587 case CONST:
3588 case SYMBOL_REF:
3589 case LABEL_REF:
3590 case CONST_DOUBLE:
3591 case CC0:
3592 case PC:
32131a9c
RK
3593 case USE:
3594 case CLOBBER:
3595 return;
3596
3597 case SUBREG:
3598 if (GET_CODE (SUBREG_REG (x)) == REG
3599 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3600 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3601 = GET_MODE_SIZE (GET_MODE (x));
3602 return;
3603 }
3604
3605 fmt = GET_RTX_FORMAT (code);
3606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3607 {
3608 if (fmt[i] == 'e')
3609 scan_paradoxical_subregs (XEXP (x, i));
3610 else if (fmt[i] == 'E')
3611 {
3612 register int j;
3613 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3614 scan_paradoxical_subregs (XVECEXP (x, i, j));
3615 }
3616 }
3617}
3618\f
32131a9c
RK
3619static int
3620hard_reg_use_compare (p1, p2)
3621 struct hard_reg_n_uses *p1, *p2;
3622{
3623 int tem = p1->uses - p2->uses;
3624 if (tem != 0) return tem;
3625 /* If regs are equally good, sort by regno,
3626 so that the results of qsort leave nothing to chance. */
3627 return p1->regno - p2->regno;
3628}
3629
3630/* Choose the order to consider regs for use as reload registers
3631 based on how much trouble would be caused by spilling one.
3632 Store them in order of decreasing preference in potential_reload_regs. */
3633
3634static void
2c5d9e37
RK
3635order_regs_for_reload (global)
3636 int global;
32131a9c
RK
3637{
3638 register int i;
3639 register int o = 0;
3640 int large = 0;
3641
3642 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3643
3644 CLEAR_HARD_REG_SET (bad_spill_regs);
3645
3646 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3647 potential_reload_regs[i] = -1;
3648
3649 /* Count number of uses of each hard reg by pseudo regs allocated to it
3650 and then order them by decreasing use. */
3651
3652 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3653 {
3654 hard_reg_n_uses[i].uses = 0;
3655 hard_reg_n_uses[i].regno = i;
3656 }
3657
3658 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3659 {
3660 int regno = reg_renumber[i];
3661 if (regno >= 0)
3662 {
3663 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3664 while (regno < lim)
2c5d9e37
RK
3665 {
3666 /* If allocated by local-alloc, show more uses since
3667 we're not going to be able to reallocate it, but
3668 we might if allocated by global alloc. */
3669 if (global && reg_allocno[i] < 0)
3670 hard_reg_n_uses[regno].uses += (reg_n_refs[i] + 1) / 2;
3671
3672 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3673 }
32131a9c
RK
3674 }
3675 large += reg_n_refs[i];
3676 }
3677
3678 /* Now fixed registers (which cannot safely be used for reloading)
3679 get a very high use count so they will be considered least desirable.
3680 Registers used explicitly in the rtl code are almost as bad. */
3681
3682 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3683 {
3684 if (fixed_regs[i])
3685 {
3686 hard_reg_n_uses[i].uses += 2 * large + 2;
3687 SET_HARD_REG_BIT (bad_spill_regs, i);
3688 }
3689 else if (regs_explicitly_used[i])
3690 {
3691 hard_reg_n_uses[i].uses += large + 1;
546b63fb 3692#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
3693 /* ??? We are doing this here because of the potential that
3694 bad code may be generated if a register explicitly used in
3695 an insn was used as a spill register for that insn. But
3696 not using these are spill registers may lose on some machine.
3697 We'll have to see how this works out. */
3698 SET_HARD_REG_BIT (bad_spill_regs, i);
546b63fb 3699#endif
32131a9c
RK
3700 }
3701 }
3ec2ea3e
DE
3702 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3703 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
32131a9c
RK
3704
3705#ifdef ELIMINABLE_REGS
3706 /* If registers other than the frame pointer are eliminable, mark them as
3707 poor choices. */
3708 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3709 {
3710 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3711 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3712 }
3713#endif
3714
3715 /* Prefer registers not so far used, for use in temporary loading.
3716 Among them, if REG_ALLOC_ORDER is defined, use that order.
3717 Otherwise, prefer registers not preserved by calls. */
3718
3719#ifdef REG_ALLOC_ORDER
3720 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3721 {
3722 int regno = reg_alloc_order[i];
3723
3724 if (hard_reg_n_uses[regno].uses == 0)
3725 potential_reload_regs[o++] = regno;
3726 }
3727#else
3728 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3729 {
3730 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3731 potential_reload_regs[o++] = i;
3732 }
3733 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3734 {
3735 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3736 potential_reload_regs[o++] = i;
3737 }
3738#endif
3739
3740 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3741 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3742
3743 /* Now add the regs that are already used,
3744 preferring those used less often. The fixed and otherwise forbidden
3745 registers will be at the end of this list. */
3746
3747 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3748 if (hard_reg_n_uses[i].uses != 0)
3749 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3750}
3751\f
a5339699 3752/* Used in reload_as_needed to sort the spilled regs. */
2f23a46d 3753
a5339699
RK
3754static int
3755compare_spill_regs (r1, r2)
3756 short *r1, *r2;
3757{
2f23a46d 3758 return *r1 - *r2;
a5339699
RK
3759}
3760
32131a9c
RK
3761/* Reload pseudo-registers into hard regs around each insn as needed.
3762 Additional register load insns are output before the insn that needs it
3763 and perhaps store insns after insns that modify the reloaded pseudo reg.
3764
3765 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3766 which registers are already available in reload registers.
32131a9c
RK
3767 We update these for the reloads that we perform,
3768 as the insns are scanned. */
3769
3770static void
3771reload_as_needed (first, live_known)
3772 rtx first;
3773 int live_known;
3774{
3775 register rtx insn;
3776 register int i;
3777 int this_block = 0;
3778 rtx x;
3779 rtx after_call = 0;
3780
4c9a05bc
RK
3781 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3782 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
32131a9c 3783 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 3784 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
32131a9c
RK
3785 reg_has_output_reload = (char *) alloca (max_regno);
3786 for (i = 0; i < n_spills; i++)
3787 {
3788 reg_reloaded_contents[i] = -1;
3789 reg_reloaded_insn[i] = 0;
3790 }
3791
3792 /* Reset all offsets on eliminable registers to their initial values. */
3793#ifdef ELIMINABLE_REGS
3794 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3795 {
3796 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3797 reg_eliminate[i].initial_offset);
32131a9c
RK
3798 reg_eliminate[i].previous_offset
3799 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3800 }
3801#else
3802 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3803 reg_eliminate[0].previous_offset
3804 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3805#endif
3806
3807 num_not_at_initial_offset = 0;
3808
a5339699
RK
3809 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3810 pack registers with group needs. */
3811 if (n_spills > 1)
5f40cc2d
RK
3812 {
3813 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3814 for (i = 0; i < n_spills; i++)
3815 spill_reg_order[spill_regs[i]] = i;
3816 }
a5339699 3817
32131a9c
RK
3818 for (insn = first; insn;)
3819 {
3820 register rtx next = NEXT_INSN (insn);
3821
3822 /* Notice when we move to a new basic block. */
aa2c50d6 3823 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3824 && insn == basic_block_head[this_block+1])
3825 ++this_block;
3826
3827 /* If we pass a label, copy the offsets from the label information
3828 into the current offsets of each elimination. */
3829 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3830 {
3831 num_not_at_initial_offset = 0;
3832 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3833 {
3834 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3835 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3836 if (reg_eliminate[i].can_eliminate
3837 && (reg_eliminate[i].offset
3838 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3839 num_not_at_initial_offset++;
3840 }
3841 }
32131a9c
RK
3842
3843 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3844 {
3845 rtx avoid_return_reg = 0;
0639444f 3846 rtx oldpat = PATTERN (insn);
32131a9c
RK
3847
3848#ifdef SMALL_REGISTER_CLASSES
3849 /* Set avoid_return_reg if this is an insn
3850 that might use the value of a function call. */
3851 if (GET_CODE (insn) == CALL_INSN)
3852 {
3853 if (GET_CODE (PATTERN (insn)) == SET)
3854 after_call = SET_DEST (PATTERN (insn));
3855 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3856 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3857 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3858 else
3859 after_call = 0;
3860 }
3861 else if (after_call != 0
3862 && !(GET_CODE (PATTERN (insn)) == SET
3863 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3864 {
2b979c57 3865 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
3866 avoid_return_reg = after_call;
3867 after_call = 0;
3868 }
3869#endif /* SMALL_REGISTER_CLASSES */
3870
2758481d
RS
3871 /* If this is a USE and CLOBBER of a MEM, ensure that any
3872 references to eliminable registers have been removed. */
3873
3874 if ((GET_CODE (PATTERN (insn)) == USE
3875 || GET_CODE (PATTERN (insn)) == CLOBBER)
3876 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3877 XEXP (XEXP (PATTERN (insn), 0), 0)
3878 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3879 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3880
32131a9c
RK
3881 /* If we need to do register elimination processing, do so.
3882 This might delete the insn, in which case we are done. */
3883 if (num_eliminable && GET_MODE (insn) == QImode)
3884 {
3885 eliminate_regs_in_insn (insn, 1);
3886 if (GET_CODE (insn) == NOTE)
3887 {
3888 insn = next;
3889 continue;
3890 }
3891 }
3892
3893 if (GET_MODE (insn) == VOIDmode)
3894 n_reloads = 0;
3895 /* First find the pseudo regs that must be reloaded for this insn.
3896 This info is returned in the tables reload_... (see reload.h).
3897 Also modify the body of INSN by substituting RELOAD
3898 rtx's for those pseudo regs. */
3899 else
3900 {
3901 bzero (reg_has_output_reload, max_regno);
3902 CLEAR_HARD_REG_SET (reg_is_output_reload);
3903
3904 find_reloads (insn, 1, spill_indirect_levels, live_known,
3905 spill_reg_order);
3906 }
3907
3908 if (n_reloads > 0)
3909 {
3c3eeea6
RK
3910 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3911 rtx p;
32131a9c
RK
3912 int class;
3913
3914 /* If this block has not had spilling done for a
546b63fb
RK
3915 particular clas and we have any non-optionals that need a
3916 spill reg in that class, abort. */
32131a9c
RK
3917
3918 for (class = 0; class < N_REG_CLASSES; class++)
3919 if (basic_block_needs[class] != 0
3920 && basic_block_needs[class][this_block] == 0)
3921 for (i = 0; i < n_reloads; i++)
546b63fb
RK
3922 if (class == (int) reload_reg_class[i]
3923 && reload_reg_rtx[i] == 0
3924 && ! reload_optional[i]
3925 && (reload_in[i] != 0 || reload_out[i] != 0
3926 || reload_secondary_p[i] != 0))
a89b2cc4 3927 fatal_insn ("Non-optional registers need a spill register", insn);
32131a9c
RK
3928
3929 /* Now compute which reload regs to reload them into. Perhaps
3930 reusing reload regs from previous insns, or else output
3931 load insns to reload them. Maybe output store insns too.
3932 Record the choices of reload reg in reload_reg_rtx. */
3933 choose_reload_regs (insn, avoid_return_reg);
3934
546b63fb
RK
3935#ifdef SMALL_REGISTER_CLASSES
3936 /* Merge any reloads that we didn't combine for fear of
3937 increasing the number of spill registers needed but now
3938 discover can be safely merged. */
3939 merge_assigned_reloads (insn);
3940#endif
3941
32131a9c
RK
3942 /* Generate the insns to reload operands into or out of
3943 their reload regs. */
3944 emit_reload_insns (insn);
3945
3946 /* Substitute the chosen reload regs from reload_reg_rtx
3947 into the insn's body (or perhaps into the bodies of other
3948 load and store insn that we just made for reloading
3949 and that we moved the structure into). */
3950 subst_reloads ();
3c3eeea6
RK
3951
3952 /* If this was an ASM, make sure that all the reload insns
3953 we have generated are valid. If not, give an error
3954 and delete them. */
3955
3956 if (asm_noperands (PATTERN (insn)) >= 0)
3957 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3958 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3959 && (recog_memoized (p) < 0
3960 || (insn_extract (p),
3961 ! constrain_operands (INSN_CODE (p), 1))))
3962 {
3963 error_for_asm (insn,
3964 "`asm' operand requires impossible reload");
3965 PUT_CODE (p, NOTE);
3966 NOTE_SOURCE_FILE (p) = 0;
3967 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3968 }
32131a9c
RK
3969 }
3970 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3971 is no longer validly lying around to save a future reload.
3972 Note that this does not detect pseudos that were reloaded
3973 for this insn in order to be stored in
3974 (obeying register constraints). That is correct; such reload
3975 registers ARE still valid. */
0639444f 3976 note_stores (oldpat, forget_old_reloads_1);
32131a9c
RK
3977
3978 /* There may have been CLOBBER insns placed after INSN. So scan
3979 between INSN and NEXT and use them to forget old reloads. */
3980 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3981 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3982 note_stores (PATTERN (x), forget_old_reloads_1);
3983
3984#ifdef AUTO_INC_DEC
3985 /* Likewise for regs altered by auto-increment in this insn.
3986 But note that the reg-notes are not changed by reloading:
3987 they still contain the pseudo-regs, not the spill regs. */
3988 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3989 if (REG_NOTE_KIND (x) == REG_INC)
3990 {
3991 /* See if this pseudo reg was reloaded in this insn.
3992 If so, its last-reload info is still valid
3993 because it is based on this insn's reload. */
3994 for (i = 0; i < n_reloads; i++)
3995 if (reload_out[i] == XEXP (x, 0))
3996 break;
3997
08fb99fa 3998 if (i == n_reloads)
9a881562 3999 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
4000 }
4001#endif
4002 }
4003 /* A reload reg's contents are unknown after a label. */
4004 if (GET_CODE (insn) == CODE_LABEL)
4005 for (i = 0; i < n_spills; i++)
4006 {
4007 reg_reloaded_contents[i] = -1;
4008 reg_reloaded_insn[i] = 0;
4009 }
4010
4011 /* Don't assume a reload reg is still good after a call insn
4012 if it is a call-used reg. */
546b63fb 4013 else if (GET_CODE (insn) == CALL_INSN)
32131a9c
RK
4014 for (i = 0; i < n_spills; i++)
4015 if (call_used_regs[spill_regs[i]])
4016 {
4017 reg_reloaded_contents[i] = -1;
4018 reg_reloaded_insn[i] = 0;
4019 }
4020
4021 /* In case registers overlap, allow certain insns to invalidate
4022 particular hard registers. */
4023
4024#ifdef INSN_CLOBBERS_REGNO_P
4025 for (i = 0 ; i < n_spills ; i++)
4026 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4027 {
4028 reg_reloaded_contents[i] = -1;
4029 reg_reloaded_insn[i] = 0;
4030 }
4031#endif
4032
4033 insn = next;
4034
4035#ifdef USE_C_ALLOCA
4036 alloca (0);
4037#endif
4038 }
4039}
4040
4041/* Discard all record of any value reloaded from X,
4042 or reloaded in X from someplace else;
4043 unless X is an output reload reg of the current insn.
4044
4045 X may be a hard reg (the reload reg)
4046 or it may be a pseudo reg that was reloaded from. */
4047
4048static void
9a881562 4049forget_old_reloads_1 (x, ignored)
32131a9c 4050 rtx x;
9a881562 4051 rtx ignored;
32131a9c
RK
4052{
4053 register int regno;
4054 int nr;
0a2e51a9
RS
4055 int offset = 0;
4056
4057 /* note_stores does give us subregs of hard regs. */
4058 while (GET_CODE (x) == SUBREG)
4059 {
4060 offset += SUBREG_WORD (x);
4061 x = SUBREG_REG (x);
4062 }
32131a9c
RK
4063
4064 if (GET_CODE (x) != REG)
4065 return;
4066
0a2e51a9 4067 regno = REGNO (x) + offset;
32131a9c
RK
4068
4069 if (regno >= FIRST_PSEUDO_REGISTER)
4070 nr = 1;
4071 else
4072 {
4073 int i;
4074 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4075 /* Storing into a spilled-reg invalidates its contents.
4076 This can happen if a block-local pseudo is allocated to that reg
4077 and it wasn't spilled because this block's total need is 0.
4078 Then some insn might have an optional reload and use this reg. */
4079 for (i = 0; i < nr; i++)
4080 if (spill_reg_order[regno + i] >= 0
4081 /* But don't do this if the reg actually serves as an output
4082 reload reg in the current instruction. */
4083 && (n_reloads == 0
4084 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4085 {
4086 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4087 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4088 }
4089 }
4090
4091 /* Since value of X has changed,
4092 forget any value previously copied from it. */
4093
4094 while (nr-- > 0)
4095 /* But don't forget a copy if this is the output reload
4096 that establishes the copy's validity. */
4097 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4098 reg_last_reload_reg[regno + nr] = 0;
4099}
4100\f
4101/* For each reload, the mode of the reload register. */
4102static enum machine_mode reload_mode[MAX_RELOADS];
4103
4104/* For each reload, the largest number of registers it will require. */
4105static int reload_nregs[MAX_RELOADS];
4106
4107/* Comparison function for qsort to decide which of two reloads
4108 should be handled first. *P1 and *P2 are the reload numbers. */
4109
4110static int
4111reload_reg_class_lower (p1, p2)
4112 short *p1, *p2;
4113{
4114 register int r1 = *p1, r2 = *p2;
4115 register int t;
a8fdc208 4116
32131a9c
RK
4117 /* Consider required reloads before optional ones. */
4118 t = reload_optional[r1] - reload_optional[r2];
4119 if (t != 0)
4120 return t;
4121
4122 /* Count all solitary classes before non-solitary ones. */
4123 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4124 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4125 if (t != 0)
4126 return t;
4127
4128 /* Aside from solitaires, consider all multi-reg groups first. */
4129 t = reload_nregs[r2] - reload_nregs[r1];
4130 if (t != 0)
4131 return t;
4132
4133 /* Consider reloads in order of increasing reg-class number. */
4134 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4135 if (t != 0)
4136 return t;
4137
4138 /* If reloads are equally urgent, sort by reload number,
4139 so that the results of qsort leave nothing to chance. */
4140 return r1 - r2;
4141}
4142\f
4143/* The following HARD_REG_SETs indicate when each hard register is
4144 used for a reload of various parts of the current insn. */
4145
4146/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4147static HARD_REG_SET reload_reg_used;
546b63fb
RK
4148/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4149static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4150/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4151static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4152/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4153static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4154/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4155static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
4156/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4157static HARD_REG_SET reload_reg_used_in_op_addr;
893bc853
RK
4158/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4159static HARD_REG_SET reload_reg_used_in_op_addr_reload;
546b63fb
RK
4160/* If reg is in use for a RELOAD_FOR_INSN reload. */
4161static HARD_REG_SET reload_reg_used_in_insn;
4162/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4163static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
4164
4165/* If reg is in use as a reload reg for any sort of reload. */
4166static HARD_REG_SET reload_reg_used_at_all;
4167
be7ae2a4
RK
4168/* If reg is use as an inherited reload. We just mark the first register
4169 in the group. */
4170static HARD_REG_SET reload_reg_used_for_inherit;
4171
546b63fb
RK
4172/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4173 TYPE. MODE is used to indicate how many consecutive regs are
4174 actually used. */
32131a9c
RK
4175
4176static void
546b63fb 4177mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 4178 int regno;
546b63fb
RK
4179 int opnum;
4180 enum reload_type type;
32131a9c
RK
4181 enum machine_mode mode;
4182{
4183 int nregs = HARD_REGNO_NREGS (regno, mode);
4184 int i;
4185
4186 for (i = regno; i < nregs + regno; i++)
4187 {
546b63fb 4188 switch (type)
32131a9c
RK
4189 {
4190 case RELOAD_OTHER:
4191 SET_HARD_REG_BIT (reload_reg_used, i);
4192 break;
4193
546b63fb
RK
4194 case RELOAD_FOR_INPUT_ADDRESS:
4195 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4196 break;
4197
546b63fb
RK
4198 case RELOAD_FOR_OUTPUT_ADDRESS:
4199 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4200 break;
4201
4202 case RELOAD_FOR_OPERAND_ADDRESS:
4203 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4204 break;
4205
893bc853
RK
4206 case RELOAD_FOR_OPADDR_ADDR:
4207 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4208 break;
4209
546b63fb
RK
4210 case RELOAD_FOR_OTHER_ADDRESS:
4211 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4212 break;
4213
32131a9c 4214 case RELOAD_FOR_INPUT:
546b63fb 4215 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4216 break;
4217
4218 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4219 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4220 break;
4221
4222 case RELOAD_FOR_INSN:
4223 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4224 break;
4225 }
4226
4227 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4228 }
4229}
4230
be7ae2a4
RK
4231/* Similarly, but show REGNO is no longer in use for a reload. */
4232
4233static void
4234clear_reload_reg_in_use (regno, opnum, type, mode)
4235 int regno;
4236 int opnum;
4237 enum reload_type type;
4238 enum machine_mode mode;
4239{
4240 int nregs = HARD_REGNO_NREGS (regno, mode);
4241 int i;
4242
4243 for (i = regno; i < nregs + regno; i++)
4244 {
4245 switch (type)
4246 {
4247 case RELOAD_OTHER:
4248 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4249 break;
4250
4251 case RELOAD_FOR_INPUT_ADDRESS:
4252 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4253 break;
4254
4255 case RELOAD_FOR_OUTPUT_ADDRESS:
4256 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4257 break;
4258
4259 case RELOAD_FOR_OPERAND_ADDRESS:
4260 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4261 break;
4262
893bc853
RK
4263 case RELOAD_FOR_OPADDR_ADDR:
4264 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4265 break;
4266
be7ae2a4
RK
4267 case RELOAD_FOR_OTHER_ADDRESS:
4268 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4269 break;
4270
4271 case RELOAD_FOR_INPUT:
4272 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4273 break;
4274
4275 case RELOAD_FOR_OUTPUT:
4276 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4277 break;
4278
4279 case RELOAD_FOR_INSN:
4280 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4281 break;
4282 }
4283 }
4284}
4285
32131a9c 4286/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4287 specified by OPNUM and TYPE. */
32131a9c
RK
4288
4289static int
546b63fb 4290reload_reg_free_p (regno, opnum, type)
32131a9c 4291 int regno;
546b63fb
RK
4292 int opnum;
4293 enum reload_type type;
32131a9c 4294{
546b63fb
RK
4295 int i;
4296
4297 /* In use for a RELOAD_OTHER means it's not available for anything except
4298 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4299 to be used only for inputs. */
4300
4301 if (type != RELOAD_FOR_OTHER_ADDRESS
4302 && TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4303 return 0;
546b63fb
RK
4304
4305 switch (type)
32131a9c
RK
4306 {
4307 case RELOAD_OTHER:
224f1d71
RK
4308 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4309 we can't use it for RELOAD_OTHER. */
4310 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4311 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4312 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4313 return 0;
4314
4315 for (i = 0; i < reload_n_operands; i++)
4316 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4317 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4318 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4319 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4320 return 0;
4321
4322 return 1;
32131a9c 4323
32131a9c 4324 case RELOAD_FOR_INPUT:
546b63fb
RK
4325 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4326 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4327 return 0;
4328
893bc853
RK
4329 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4330 return 0;
4331
546b63fb
RK
4332 /* If it is used for some other input, can't use it. */
4333 for (i = 0; i < reload_n_operands; i++)
4334 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4335 return 0;
4336
4337 /* If it is used in a later operand's address, can't use it. */
4338 for (i = opnum + 1; i < reload_n_operands; i++)
4339 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4340 return 0;
4341
4342 return 1;
4343
4344 case RELOAD_FOR_INPUT_ADDRESS:
4345 /* Can't use a register if it is used for an input address for this
4346 operand or used as an input in an earlier one. */
4347 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4348 return 0;
4349
4350 for (i = 0; i < opnum; i++)
4351 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4352 return 0;
4353
4354 return 1;
4355
4356 case RELOAD_FOR_OUTPUT_ADDRESS:
4357 /* Can't use a register if it is used for an output address for this
4358 operand or used as an output in this or a later operand. */
4359 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4360 return 0;
4361
4362 for (i = opnum; i < reload_n_operands; i++)
4363 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4364 return 0;
4365
4366 return 1;
4367
32131a9c 4368 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4369 for (i = 0; i < reload_n_operands; i++)
4370 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4371 return 0;
4372
4373 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4374 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4375
893bc853
RK
4376 case RELOAD_FOR_OPADDR_ADDR:
4377 for (i = 0; i < reload_n_operands; i++)
4378 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4379 return 0;
4380
4381 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4382
32131a9c 4383 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4384 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4385 outputs, or an operand address for this or an earlier output. */
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4387 return 0;
4388
4389 for (i = 0; i < reload_n_operands; i++)
4390 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4391 return 0;
4392
4393 for (i = 0; i <= opnum; i++)
4394 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4395 return 0;
4396
4397 return 1;
4398
4399 case RELOAD_FOR_INSN:
4400 for (i = 0; i < reload_n_operands; i++)
4401 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4402 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4403 return 0;
4404
4405 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4406 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4407
4408 case RELOAD_FOR_OTHER_ADDRESS:
4409 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4410 }
4411 abort ();
4412}
4413
4414/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4415 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4416 is not in use for a reload in any prior part of the insn.
4417
4418 We can assume that the reload reg was already tested for availability
4419 at the time it is needed, and we should not check this again,
4420 in case the reg has already been marked in use. */
4421
4422static int
546b63fb 4423reload_reg_free_before_p (regno, opnum, type)
32131a9c 4424 int regno;
546b63fb
RK
4425 int opnum;
4426 enum reload_type type;
32131a9c 4427{
546b63fb
RK
4428 int i;
4429
4430 switch (type)
32131a9c 4431 {
546b63fb
RK
4432 case RELOAD_FOR_OTHER_ADDRESS:
4433 /* These always come first. */
32131a9c
RK
4434 return 1;
4435
546b63fb
RK
4436 case RELOAD_OTHER:
4437 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4438
32131a9c 4439 /* If this use is for part of the insn,
546b63fb
RK
4440 check the reg is not in use for any prior part. It is tempting
4441 to try to do this by falling through from objecs that occur
4442 later in the insn to ones that occur earlier, but that will not
4443 correctly take into account the fact that here we MUST ignore
4444 things that would prevent the register from being allocated in
4445 the first place, since we know that it was allocated. */
4446
4447 case RELOAD_FOR_OUTPUT_ADDRESS:
4448 /* Earlier reloads are for earlier outputs or their addresses,
4449 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4450 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4451 RELOAD_OTHER).. */
4452 for (i = 0; i < opnum; i++)
4453 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4454 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4455 return 0;
4456
4457 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4458 return 0;
546b63fb
RK
4459
4460 for (i = 0; i < reload_n_operands; i++)
4461 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4462 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4463 return 0;
4464
4465 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4466 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4467 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4468
32131a9c 4469 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4470 /* This can't be used in the output address for this operand and
4471 anything that can't be used for it, except that we've already
4472 tested for RELOAD_FOR_INSN objects. */
4473
4474 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
32131a9c 4475 return 0;
546b63fb
RK
4476
4477 for (i = 0; i < opnum; i++)
4478 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4479 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4480 return 0;
4481
4482 for (i = 0; i < reload_n_operands; i++)
4483 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4484 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4485 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4486 return 0;
4487
4488 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4489
32131a9c 4490 case RELOAD_FOR_OPERAND_ADDRESS:
893bc853 4491 case RELOAD_FOR_OPADDR_ADDR:
546b63fb
RK
4492 case RELOAD_FOR_INSN:
4493 /* These can't conflict with inputs, or each other, so all we have to
4494 test is input addresses and the addresses of OTHER items. */
4495
4496 for (i = 0; i < reload_n_operands; i++)
4497 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4498 return 0;
4499
4500 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4501
32131a9c 4502 case RELOAD_FOR_INPUT:
546b63fb
RK
4503 /* The only things earlier are the address for this and
4504 earlier inputs, other inputs (which we know we don't conflict
4505 with), and addresses of RELOAD_OTHER objects. */
4506
4507 for (i = 0; i <= opnum; i++)
4508 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4509 return 0;
4510
4511 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4512
4513 case RELOAD_FOR_INPUT_ADDRESS:
4514 /* Similarly, all we have to check is for use in earlier inputs'
4515 addresses. */
4516 for (i = 0; i < opnum; i++)
4517 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4518 return 0;
4519
4520 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4521 }
4522 abort ();
4523}
4524
4525/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4526 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4527 is still available in REGNO at the end of the insn.
4528
4529 We can assume that the reload reg was already tested for availability
4530 at the time it is needed, and we should not check this again,
4531 in case the reg has already been marked in use. */
4532
4533static int
546b63fb 4534reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4535 int regno;
546b63fb
RK
4536 int opnum;
4537 enum reload_type type;
32131a9c 4538{
546b63fb
RK
4539 int i;
4540
4541 switch (type)
32131a9c
RK
4542 {
4543 case RELOAD_OTHER:
4544 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4545 its value must reach the end. */
4546 return 1;
4547
4548 /* If this use is for part of the insn,
546b63fb
RK
4549 its value reaches if no subsequent part uses the same register.
4550 Just like the above function, don't try to do this with lots
4551 of fallthroughs. */
4552
4553 case RELOAD_FOR_OTHER_ADDRESS:
4554 /* Here we check for everything else, since these don't conflict
4555 with anything else and everything comes later. */
4556
4557 for (i = 0; i < reload_n_operands; i++)
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4559 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4560 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4561 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4562 return 0;
4563
4564 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4565 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4566 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4567
4568 case RELOAD_FOR_INPUT_ADDRESS:
4569 /* Similar, except that we check only for this and subsequent inputs
4570 and the address of only subsequent inputs and we do not need
4571 to check for RELOAD_OTHER objects since they are known not to
4572 conflict. */
4573
4574 for (i = opnum; i < reload_n_operands; i++)
4575 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4576 return 0;
4577
4578 for (i = opnum + 1; i < reload_n_operands; i++)
4579 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4580 return 0;
4581
4582 for (i = 0; i < reload_n_operands; i++)
4583 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4584 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4585 return 0;
4586
893bc853
RK
4587 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4588 return 0;
4589
546b63fb
RK
4590 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4591 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4592
32131a9c 4593 case RELOAD_FOR_INPUT:
546b63fb
RK
4594 /* Similar to input address, except we start at the next operand for
4595 both input and input address and we do not check for
4596 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4597 would conflict. */
4598
4599 for (i = opnum + 1; i < reload_n_operands; i++)
4600 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4601 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4602 return 0;
4603
4604 /* ... fall through ... */
4605
32131a9c 4606 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4607 /* Check outputs and their addresses. */
4608
4609 for (i = 0; i < reload_n_operands; i++)
4610 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4611 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4612 return 0;
4613
4614 return 1;
4615
893bc853
RK
4616 case RELOAD_FOR_OPADDR_ADDR:
4617 for (i = 0; i < reload_n_operands; i++)
4618 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4619 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4620 return 0;
4621
4622 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4623 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4624
546b63fb 4625 case RELOAD_FOR_INSN:
893bc853 4626 /* These conflict with other outputs with RELOAD_OTHER. So
546b63fb
RK
4627 we need only check for output addresses. */
4628
4629 opnum = -1;
4630
4631 /* ... fall through ... */
4632
32131a9c 4633 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4634 case RELOAD_FOR_OUTPUT_ADDRESS:
4635 /* We already know these can't conflict with a later output. So the
4636 only thing to check are later output addresses. */
4637 for (i = opnum + 1; i < reload_n_operands; i++)
4638 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4639 return 0;
4640
32131a9c
RK
4641 return 1;
4642 }
546b63fb 4643
32131a9c
RK
4644 abort ();
4645}
4646\f
351aa1c1
RK
4647/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4648 Return 0 otherwise.
4649
4650 This function uses the same algorithm as reload_reg_free_p above. */
4651
4652static int
4653reloads_conflict (r1, r2)
4654 int r1, r2;
4655{
4656 enum reload_type r1_type = reload_when_needed[r1];
4657 enum reload_type r2_type = reload_when_needed[r2];
4658 int r1_opnum = reload_opnum[r1];
4659 int r2_opnum = reload_opnum[r2];
4660
adab4fc5 4661 /* RELOAD_OTHER conflicts with everything except RELOAD_FOR_OTHER_ADDRESS. */
351aa1c1 4662
adab4fc5 4663 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
351aa1c1
RK
4664 return 1;
4665
4666 /* Otherwise, check conflicts differently for each type. */
4667
4668 switch (r1_type)
4669 {
4670 case RELOAD_FOR_INPUT:
4671 return (r2_type == RELOAD_FOR_INSN
4672 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
893bc853 4673 || r2_type == RELOAD_FOR_OPADDR_ADDR
351aa1c1
RK
4674 || r2_type == RELOAD_FOR_INPUT
4675 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4676
4677 case RELOAD_FOR_INPUT_ADDRESS:
4678 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4679 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4680
4681 case RELOAD_FOR_OUTPUT_ADDRESS:
4682 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4683 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4684
4685 case RELOAD_FOR_OPERAND_ADDRESS:
4686 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4687 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4688
893bc853
RK
4689 case RELOAD_FOR_OPADDR_ADDR:
4690 return (r2_type == RELOAD_FOR_INPUT
4691 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4692
351aa1c1
RK
4693 case RELOAD_FOR_OUTPUT:
4694 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
0aaf4a58 4695 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
351aa1c1
RK
4696 && r2_opnum >= r1_opnum));
4697
4698 case RELOAD_FOR_INSN:
4699 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4700 || r2_type == RELOAD_FOR_INSN
4701 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4702
4703 case RELOAD_FOR_OTHER_ADDRESS:
4704 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4705
adab4fc5
RK
4706 case RELOAD_OTHER:
4707 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4708
351aa1c1
RK
4709 default:
4710 abort ();
4711 }
4712}
4713\f
32131a9c
RK
4714/* Vector of reload-numbers showing the order in which the reloads should
4715 be processed. */
4716short reload_order[MAX_RELOADS];
4717
4718/* Indexed by reload number, 1 if incoming value
4719 inherited from previous insns. */
4720char reload_inherited[MAX_RELOADS];
4721
4722/* For an inherited reload, this is the insn the reload was inherited from,
4723 if we know it. Otherwise, this is 0. */
4724rtx reload_inheritance_insn[MAX_RELOADS];
4725
4726/* If non-zero, this is a place to get the value of the reload,
4727 rather than using reload_in. */
4728rtx reload_override_in[MAX_RELOADS];
4729
4730/* For each reload, the index in spill_regs of the spill register used,
4731 or -1 if we did not need one of the spill registers for this reload. */
4732int reload_spill_index[MAX_RELOADS];
4733
32131a9c
RK
4734/* Find a spill register to use as a reload register for reload R.
4735 LAST_RELOAD is non-zero if this is the last reload for the insn being
4736 processed.
4737
4738 Set reload_reg_rtx[R] to the register allocated.
4739
4740 If NOERROR is nonzero, we return 1 if successful,
4741 or 0 if we couldn't find a spill reg and we didn't change anything. */
4742
4743static int
4744allocate_reload_reg (r, insn, last_reload, noerror)
4745 int r;
4746 rtx insn;
4747 int last_reload;
4748 int noerror;
4749{
4750 int i;
4751 int pass;
4752 int count;
4753 rtx new;
4754 int regno;
4755
4756 /* If we put this reload ahead, thinking it is a group,
4757 then insist on finding a group. Otherwise we can grab a
a8fdc208 4758 reg that some other reload needs.
32131a9c
RK
4759 (That can happen when we have a 68000 DATA_OR_FP_REG
4760 which is a group of data regs or one fp reg.)
4761 We need not be so restrictive if there are no more reloads
4762 for this insn.
4763
4764 ??? Really it would be nicer to have smarter handling
4765 for that kind of reg class, where a problem like this is normal.
4766 Perhaps those classes should be avoided for reloading
4767 by use of more alternatives. */
4768
4769 int force_group = reload_nregs[r] > 1 && ! last_reload;
4770
4771 /* If we want a single register and haven't yet found one,
4772 take any reg in the right class and not in use.
4773 If we want a consecutive group, here is where we look for it.
4774
4775 We use two passes so we can first look for reload regs to
4776 reuse, which are already in use for other reloads in this insn,
4777 and only then use additional registers.
4778 I think that maximizing reuse is needed to make sure we don't
4779 run out of reload regs. Suppose we have three reloads, and
4780 reloads A and B can share regs. These need two regs.
4781 Suppose A and B are given different regs.
4782 That leaves none for C. */
4783 for (pass = 0; pass < 2; pass++)
4784 {
4785 /* I is the index in spill_regs.
4786 We advance it round-robin between insns to use all spill regs
4787 equally, so that inherited reloads have a chance
a5339699
RK
4788 of leapfrogging each other. Don't do this, however, when we have
4789 group needs and failure would be fatal; if we only have a relatively
4790 small number of spill registers, and more than one of them has
4791 group needs, then by starting in the middle, we may end up
4792 allocating the first one in such a way that we are not left with
4793 sufficient groups to handle the rest. */
4794
4795 if (noerror || ! force_group)
4796 i = last_spill_reg;
4797 else
4798 i = -1;
4799
4800 for (count = 0; count < n_spills; count++)
32131a9c
RK
4801 {
4802 int class = (int) reload_reg_class[r];
4803
4804 i = (i + 1) % n_spills;
4805
546b63fb
RK
4806 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4807 reload_when_needed[r])
32131a9c
RK
4808 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4809 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
4810 /* Look first for regs to share, then for unshared. But
4811 don't share regs used for inherited reloads; they are
4812 the ones we want to preserve. */
4813 && (pass
4814 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4815 spill_regs[i])
4816 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4817 spill_regs[i]))))
32131a9c
RK
4818 {
4819 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4820 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4821 (on 68000) got us two FP regs. If NR is 1,
4822 we would reject both of them. */
4823 if (force_group)
4824 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4825 /* If we need only one reg, we have already won. */
4826 if (nr == 1)
4827 {
4828 /* But reject a single reg if we demand a group. */
4829 if (force_group)
4830 continue;
4831 break;
4832 }
4833 /* Otherwise check that as many consecutive regs as we need
4834 are available here.
4835 Also, don't use for a group registers that are
4836 needed for nongroups. */
4837 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4838 while (nr > 1)
4839 {
4840 regno = spill_regs[i] + nr - 1;
4841 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4842 && spill_reg_order[regno] >= 0
546b63fb
RK
4843 && reload_reg_free_p (regno, reload_opnum[r],
4844 reload_when_needed[r])
32131a9c
RK
4845 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4846 regno)))
4847 break;
4848 nr--;
4849 }
4850 if (nr == 1)
4851 break;
4852 }
4853 }
4854
4855 /* If we found something on pass 1, omit pass 2. */
4856 if (count < n_spills)
4857 break;
4858 }
4859
4860 /* We should have found a spill register by now. */
4861 if (count == n_spills)
4862 {
4863 if (noerror)
4864 return 0;
139fc12e 4865 goto failure;
32131a9c
RK
4866 }
4867
be7ae2a4
RK
4868 /* I is the index in SPILL_REG_RTX of the reload register we are to
4869 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
4870
4871 new = spill_reg_rtx[i];
4872
4873 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4
RK
4874 spill_reg_rtx[i] = new
4875 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4876
32131a9c
RK
4877 regno = true_regnum (new);
4878
4879 /* Detect when the reload reg can't hold the reload mode.
4880 This used to be one `if', but Sequent compiler can't handle that. */
4881 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4882 {
4883 enum machine_mode test_mode = VOIDmode;
4884 if (reload_in[r])
4885 test_mode = GET_MODE (reload_in[r]);
4886 /* If reload_in[r] has VOIDmode, it means we will load it
4887 in whatever mode the reload reg has: to wit, reload_mode[r].
4888 We have already tested that for validity. */
4889 /* Aside from that, we need to test that the expressions
4890 to reload from or into have modes which are valid for this
4891 reload register. Otherwise the reload insns would be invalid. */
4892 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4893 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4894 if (! (reload_out[r] != 0
4895 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
4896 {
4897 /* The reg is OK. */
4898 last_spill_reg = i;
4899
4900 /* Mark as in use for this insn the reload regs we use
4901 for this. */
4902 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4903 reload_when_needed[r], reload_mode[r]);
4904
4905 reload_reg_rtx[r] = new;
4906 reload_spill_index[r] = i;
4907 return 1;
4908 }
32131a9c
RK
4909 }
4910
4911 /* The reg is not OK. */
4912 if (noerror)
4913 return 0;
4914
139fc12e 4915 failure:
32131a9c
RK
4916 if (asm_noperands (PATTERN (insn)) < 0)
4917 /* It's the compiler's fault. */
a89b2cc4 4918 fatal_insn ("Could not find a spill register", insn);
32131a9c
RK
4919
4920 /* It's the user's fault; the operand's mode and constraint
4921 don't match. Disable this reload so we don't crash in final. */
4922 error_for_asm (insn,
4923 "`asm' operand constraint incompatible with operand size");
4924 reload_in[r] = 0;
4925 reload_out[r] = 0;
4926 reload_reg_rtx[r] = 0;
4927 reload_optional[r] = 1;
4928 reload_secondary_p[r] = 1;
4929
4930 return 1;
4931}
4932\f
4933/* Assign hard reg targets for the pseudo-registers we must reload
4934 into hard regs for this insn.
4935 Also output the instructions to copy them in and out of the hard regs.
4936
4937 For machines with register classes, we are responsible for
4938 finding a reload reg in the proper class. */
4939
4940static void
4941choose_reload_regs (insn, avoid_return_reg)
4942 rtx insn;
32131a9c
RK
4943 rtx avoid_return_reg;
4944{
4945 register int i, j;
4946 int max_group_size = 1;
4947 enum reg_class group_class = NO_REGS;
4948 int inheritance;
4949
4950 rtx save_reload_reg_rtx[MAX_RELOADS];
4951 char save_reload_inherited[MAX_RELOADS];
4952 rtx save_reload_inheritance_insn[MAX_RELOADS];
4953 rtx save_reload_override_in[MAX_RELOADS];
4954 int save_reload_spill_index[MAX_RELOADS];
4955 HARD_REG_SET save_reload_reg_used;
546b63fb
RK
4956 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4957 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4958 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4959 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 4960 HARD_REG_SET save_reload_reg_used_in_op_addr;
893bc853 4961 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
546b63fb
RK
4962 HARD_REG_SET save_reload_reg_used_in_insn;
4963 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
4964 HARD_REG_SET save_reload_reg_used_at_all;
4965
4966 bzero (reload_inherited, MAX_RELOADS);
4c9a05bc
RK
4967 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4968 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
32131a9c
RK
4969
4970 CLEAR_HARD_REG_SET (reload_reg_used);
4971 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 4972 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
893bc853 4973 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
546b63fb
RK
4974 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4975 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 4976
546b63fb
RK
4977 for (i = 0; i < reload_n_operands; i++)
4978 {
4979 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4980 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4981 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4982 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4983 }
32131a9c
RK
4984
4985#ifdef SMALL_REGISTER_CLASSES
4986 /* Don't bother with avoiding the return reg
4987 if we have no mandatory reload that could use it. */
4988 if (avoid_return_reg)
4989 {
4990 int do_avoid = 0;
4991 int regno = REGNO (avoid_return_reg);
4992 int nregs
4993 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4994 int r;
4995
4996 for (r = regno; r < regno + nregs; r++)
4997 if (spill_reg_order[r] >= 0)
4998 for (j = 0; j < n_reloads; j++)
4999 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5000 && (reload_in[j] != 0 || reload_out[j] != 0
5001 || reload_secondary_p[j])
5002 &&
5003 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5004 do_avoid = 1;
5005 if (!do_avoid)
5006 avoid_return_reg = 0;
5007 }
5008#endif /* SMALL_REGISTER_CLASSES */
5009
5010#if 0 /* Not needed, now that we can always retry without inheritance. */
5011 /* See if we have more mandatory reloads than spill regs.
5012 If so, then we cannot risk optimizations that could prevent
a8fdc208 5013 reloads from sharing one spill register.
32131a9c
RK
5014
5015 Since we will try finding a better register than reload_reg_rtx
5016 unless it is equal to reload_in or reload_out, count such reloads. */
5017
5018 {
5019 int tem = 0;
5020#ifdef SMALL_REGISTER_CLASSES
5021 int tem = (avoid_return_reg != 0);
a8fdc208 5022#endif
32131a9c
RK
5023 for (j = 0; j < n_reloads; j++)
5024 if (! reload_optional[j]
5025 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5026 && (reload_reg_rtx[j] == 0
5027 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5028 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5029 tem++;
5030 if (tem > n_spills)
5031 must_reuse = 1;
5032 }
5033#endif
5034
5035#ifdef SMALL_REGISTER_CLASSES
5036 /* Don't use the subroutine call return reg for a reload
5037 if we are supposed to avoid it. */
5038 if (avoid_return_reg)
5039 {
5040 int regno = REGNO (avoid_return_reg);
5041 int nregs
5042 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5043 int r;
5044
5045 for (r = regno; r < regno + nregs; r++)
5046 if (spill_reg_order[r] >= 0)
5047 SET_HARD_REG_BIT (reload_reg_used, r);
5048 }
5049#endif /* SMALL_REGISTER_CLASSES */
5050
5051 /* In order to be certain of getting the registers we need,
5052 we must sort the reloads into order of increasing register class.
5053 Then our grabbing of reload registers will parallel the process
a8fdc208 5054 that provided the reload registers.
32131a9c
RK
5055
5056 Also note whether any of the reloads wants a consecutive group of regs.
5057 If so, record the maximum size of the group desired and what
5058 register class contains all the groups needed by this insn. */
5059
5060 for (j = 0; j < n_reloads; j++)
5061 {
5062 reload_order[j] = j;
5063 reload_spill_index[j] = -1;
5064
5065 reload_mode[j]
546b63fb
RK
5066 = (reload_inmode[j] == VOIDmode
5067 || (GET_MODE_SIZE (reload_outmode[j])
5068 > GET_MODE_SIZE (reload_inmode[j])))
5069 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
5070
5071 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5072
5073 if (reload_nregs[j] > 1)
5074 {
5075 max_group_size = MAX (reload_nregs[j], max_group_size);
5076 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5077 }
5078
5079 /* If we have already decided to use a certain register,
5080 don't use it in another way. */
5081 if (reload_reg_rtx[j])
546b63fb 5082 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
5083 reload_when_needed[j], reload_mode[j]);
5084 }
5085
5086 if (n_reloads > 1)
5087 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5088
4c9a05bc
RK
5089 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5090 sizeof reload_reg_rtx);
32131a9c 5091 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4c9a05bc
RK
5092 bcopy ((char *) reload_inheritance_insn,
5093 (char *) save_reload_inheritance_insn,
32131a9c 5094 sizeof reload_inheritance_insn);
4c9a05bc 5095 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
32131a9c 5096 sizeof reload_override_in);
4c9a05bc 5097 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
32131a9c
RK
5098 sizeof reload_spill_index);
5099 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5100 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
5101 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5102 reload_reg_used_in_op_addr);
893bc853
RK
5103
5104 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5105 reload_reg_used_in_op_addr_reload);
5106
546b63fb
RK
5107 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5108 reload_reg_used_in_insn);
5109 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5110 reload_reg_used_in_other_addr);
5111
5112 for (i = 0; i < reload_n_operands; i++)
5113 {
5114 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5115 reload_reg_used_in_output[i]);
5116 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5117 reload_reg_used_in_input[i]);
5118 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5119 reload_reg_used_in_input_addr[i]);
5120 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5121 reload_reg_used_in_output_addr[i]);
5122 }
32131a9c 5123
58b1581b
RS
5124 /* If -O, try first with inheritance, then turning it off.
5125 If not -O, don't do inheritance.
5126 Using inheritance when not optimizing leads to paradoxes
5127 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5128 because one side of the comparison might be inherited. */
32131a9c 5129
58b1581b 5130 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
5131 {
5132 /* Process the reloads in order of preference just found.
5133 Beyond this point, subregs can be found in reload_reg_rtx.
5134
5135 This used to look for an existing reloaded home for all
5136 of the reloads, and only then perform any new reloads.
5137 But that could lose if the reloads were done out of reg-class order
5138 because a later reload with a looser constraint might have an old
5139 home in a register needed by an earlier reload with a tighter constraint.
5140
5141 To solve this, we make two passes over the reloads, in the order
5142 described above. In the first pass we try to inherit a reload
5143 from a previous insn. If there is a later reload that needs a
5144 class that is a proper subset of the class being processed, we must
5145 also allocate a spill register during the first pass.
5146
5147 Then make a second pass over the reloads to allocate any reloads
5148 that haven't been given registers yet. */
5149
be7ae2a4
RK
5150 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5151
32131a9c
RK
5152 for (j = 0; j < n_reloads; j++)
5153 {
5154 register int r = reload_order[j];
5155
5156 /* Ignore reloads that got marked inoperative. */
5157 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5158 continue;
5159
5160 /* If find_reloads chose a to use reload_in or reload_out as a reload
5161 register, we don't need to chose one. Otherwise, try even if it found
5162 one since we might save an insn if we find the value lying around. */
5163 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5164 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5165 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5166 continue;
5167
5168#if 0 /* No longer needed for correct operation.
5169 It might give better code, or might not; worth an experiment? */
5170 /* If this is an optional reload, we can't inherit from earlier insns
5171 until we are sure that any non-optional reloads have been allocated.
5172 The following code takes advantage of the fact that optional reloads
5173 are at the end of reload_order. */
5174 if (reload_optional[r] != 0)
5175 for (i = 0; i < j; i++)
5176 if ((reload_out[reload_order[i]] != 0
5177 || reload_in[reload_order[i]] != 0
5178 || reload_secondary_p[reload_order[i]])
5179 && ! reload_optional[reload_order[i]]
5180 && reload_reg_rtx[reload_order[i]] == 0)
5181 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5182#endif
5183
5184 /* First see if this pseudo is already available as reloaded
5185 for a previous insn. We cannot try to inherit for reloads
5186 that are smaller than the maximum number of registers needed
5187 for groups unless the register we would allocate cannot be used
5188 for the groups.
5189
5190 We could check here to see if this is a secondary reload for
5191 an object that is already in a register of the desired class.
5192 This would avoid the need for the secondary reload register.
5193 But this is complex because we can't easily determine what
5194 objects might want to be loaded via this reload. So let a register
5195 be allocated here. In `emit_reload_insns' we suppress one of the
5196 loads in the case described above. */
5197
5198 if (inheritance)
5199 {
5200 register int regno = -1;
db660765 5201 enum machine_mode mode;
32131a9c
RK
5202
5203 if (reload_in[r] == 0)
5204 ;
5205 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
5206 {
5207 regno = REGNO (reload_in[r]);
5208 mode = GET_MODE (reload_in[r]);
5209 }
32131a9c 5210 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
5211 {
5212 regno = REGNO (reload_in_reg[r]);
5213 mode = GET_MODE (reload_in_reg[r]);
5214 }
32131a9c
RK
5215#if 0
5216 /* This won't work, since REGNO can be a pseudo reg number.
5217 Also, it takes much more hair to keep track of all the things
5218 that can invalidate an inherited reload of part of a pseudoreg. */
5219 else if (GET_CODE (reload_in[r]) == SUBREG
5220 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5221 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5222#endif
5223
5224 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5225 {
5226 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5227
5228 if (reg_reloaded_contents[i] == regno
db660765
TW
5229 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5230 >= GET_MODE_SIZE (mode))
32131a9c
RK
5231 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5232 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5233 spill_regs[i])
5234 && (reload_nregs[r] == max_group_size
5235 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5236 spill_regs[i]))
546b63fb
RK
5237 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5238 reload_when_needed[r])
32131a9c 5239 && reload_reg_free_before_p (spill_regs[i],
546b63fb 5240 reload_opnum[r],
32131a9c
RK
5241 reload_when_needed[r]))
5242 {
5243 /* If a group is needed, verify that all the subsequent
5244 registers still have their values intact. */
5245 int nr
5246 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5247 int k;
5248
5249 for (k = 1; k < nr; k++)
5250 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5251 != regno)
5252 break;
5253
5254 if (k == nr)
5255 {
c74fa651
RS
5256 int i1;
5257
5258 /* We found a register that contains the
5259 value we need. If this register is the
5260 same as an `earlyclobber' operand of the
5261 current insn, just mark it as a place to
5262 reload from since we can't use it as the
5263 reload register itself. */
5264
5265 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5266 if (reg_overlap_mentioned_for_reload_p
5267 (reg_last_reload_reg[regno],
5268 reload_earlyclobbers[i1]))
5269 break;
5270
8908158d
RS
5271 if (i1 != n_earlyclobbers
5272 /* Don't really use the inherited spill reg
5273 if we need it wider than we've got it. */
5274 || (GET_MODE_SIZE (reload_mode[r])
5275 > GET_MODE_SIZE (mode)))
c74fa651
RS
5276 reload_override_in[r] = reg_last_reload_reg[regno];
5277 else
5278 {
54c40e68 5279 int k;
c74fa651
RS
5280 /* We can use this as a reload reg. */
5281 /* Mark the register as in use for this part of
5282 the insn. */
5283 mark_reload_reg_in_use (spill_regs[i],
5284 reload_opnum[r],
5285 reload_when_needed[r],
5286 reload_mode[r]);
5287 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5288 reload_inherited[r] = 1;
5289 reload_inheritance_insn[r]
5290 = reg_reloaded_insn[i];
5291 reload_spill_index[r] = i;
54c40e68
RS
5292 for (k = 0; k < nr; k++)
5293 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5294 spill_regs[i + k]);
c74fa651 5295 }
32131a9c
RK
5296 }
5297 }
5298 }
5299 }
5300
5301 /* Here's another way to see if the value is already lying around. */
5302 if (inheritance
5303 && reload_in[r] != 0
5304 && ! reload_inherited[r]
5305 && reload_out[r] == 0
5306 && (CONSTANT_P (reload_in[r])
5307 || GET_CODE (reload_in[r]) == PLUS
5308 || GET_CODE (reload_in[r]) == REG
5309 || GET_CODE (reload_in[r]) == MEM)
5310 && (reload_nregs[r] == max_group_size
5311 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5312 {
5313 register rtx equiv
5314 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 5315 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
5316 int regno;
5317
5318 if (equiv != 0)
5319 {
5320 if (GET_CODE (equiv) == REG)
5321 regno = REGNO (equiv);
5322 else if (GET_CODE (equiv) == SUBREG)
5323 {
f8a9e02b
RK
5324 /* This must be a SUBREG of a hard register.
5325 Make a new REG since this might be used in an
5326 address and not all machines support SUBREGs
5327 there. */
5328 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5329 equiv = gen_rtx (REG, reload_mode[r], regno);
32131a9c
RK
5330 }
5331 else
5332 abort ();
5333 }
5334
5335 /* If we found a spill reg, reject it unless it is free
5336 and of the desired class. */
5337 if (equiv != 0
5338 && ((spill_reg_order[regno] >= 0
546b63fb 5339 && ! reload_reg_free_before_p (regno, reload_opnum[r],
32131a9c
RK
5340 reload_when_needed[r]))
5341 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5342 regno)))
5343 equiv = 0;
5344
5345 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5346 equiv = 0;
5347
5348 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5349 equiv = 0;
5350
5351 /* We found a register that contains the value we need.
5352 If this register is the same as an `earlyclobber' operand
5353 of the current insn, just mark it as a place to reload from
5354 since we can't use it as the reload register itself. */
5355
5356 if (equiv != 0)
5357 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5358 if (reg_overlap_mentioned_for_reload_p (equiv,
5359 reload_earlyclobbers[i]))
32131a9c
RK
5360 {
5361 reload_override_in[r] = equiv;
5362 equiv = 0;
5363 break;
5364 }
5365
5366 /* JRV: If the equiv register we have found is explicitly
5367 clobbered in the current insn, mark but don't use, as above. */
5368
5369 if (equiv != 0 && regno_clobbered_p (regno, insn))
5370 {
5371 reload_override_in[r] = equiv;
5372 equiv = 0;
5373 }
5374
5375 /* If we found an equivalent reg, say no code need be generated
5376 to load it, and use it as our reload reg. */
3ec2ea3e 5377 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c 5378 {
100338df
JL
5379 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5380 int k;
32131a9c
RK
5381 reload_reg_rtx[r] = equiv;
5382 reload_inherited[r] = 1;
100338df
JL
5383
5384 /* If any of the hard registers in EQUIV are spill
5385 registers, mark them as in use for this insn. */
5386 for (k = 0; k < nr; k++)
be7ae2a4 5387 {
100338df
JL
5388 i = spill_reg_order[regno + k];
5389 if (i >= 0)
5390 {
5391 mark_reload_reg_in_use (regno, reload_opnum[r],
5392 reload_when_needed[r],
5393 reload_mode[r]);
5394 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5395 regno + k);
5396 }
be7ae2a4 5397 }
32131a9c
RK
5398 }
5399 }
5400
5401 /* If we found a register to use already, or if this is an optional
5402 reload, we are done. */
5403 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5404 continue;
5405
5406#if 0 /* No longer needed for correct operation. Might or might not
5407 give better code on the average. Want to experiment? */
5408
5409 /* See if there is a later reload that has a class different from our
5410 class that intersects our class or that requires less register
5411 than our reload. If so, we must allocate a register to this
5412 reload now, since that reload might inherit a previous reload
5413 and take the only available register in our class. Don't do this
5414 for optional reloads since they will force all previous reloads
5415 to be allocated. Also don't do this for reloads that have been
5416 turned off. */
5417
5418 for (i = j + 1; i < n_reloads; i++)
5419 {
5420 int s = reload_order[i];
5421
d45cf215
RS
5422 if ((reload_in[s] == 0 && reload_out[s] == 0
5423 && ! reload_secondary_p[s])
32131a9c
RK
5424 || reload_optional[s])
5425 continue;
5426
5427 if ((reload_reg_class[s] != reload_reg_class[r]
5428 && reg_classes_intersect_p (reload_reg_class[r],
5429 reload_reg_class[s]))
5430 || reload_nregs[s] < reload_nregs[r])
5431 break;
5432 }
5433
5434 if (i == n_reloads)
5435 continue;
5436
5437 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5438#endif
5439 }
5440
5441 /* Now allocate reload registers for anything non-optional that
5442 didn't get one yet. */
5443 for (j = 0; j < n_reloads; j++)
5444 {
5445 register int r = reload_order[j];
5446
5447 /* Ignore reloads that got marked inoperative. */
5448 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5449 continue;
5450
5451 /* Skip reloads that already have a register allocated or are
5452 optional. */
5453 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5454 continue;
5455
5456 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5457 break;
5458 }
5459
5460 /* If that loop got all the way, we have won. */
5461 if (j == n_reloads)
5462 break;
5463
5464 fail:
5465 /* Loop around and try without any inheritance. */
5466 /* First undo everything done by the failed attempt
5467 to allocate with inheritance. */
4c9a05bc
RK
5468 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5469 sizeof reload_reg_rtx);
5470 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5471 sizeof reload_inherited);
5472 bcopy ((char *) save_reload_inheritance_insn,
5473 (char *) reload_inheritance_insn,
32131a9c 5474 sizeof reload_inheritance_insn);
4c9a05bc 5475 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
32131a9c 5476 sizeof reload_override_in);
4c9a05bc 5477 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
32131a9c
RK
5478 sizeof reload_spill_index);
5479 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5480 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5481 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5482 save_reload_reg_used_in_op_addr);
893bc853
RK
5483 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5484 save_reload_reg_used_in_op_addr_reload);
546b63fb
RK
5485 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5486 save_reload_reg_used_in_insn);
5487 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5488 save_reload_reg_used_in_other_addr);
5489
5490 for (i = 0; i < reload_n_operands; i++)
5491 {
5492 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5493 save_reload_reg_used_in_input[i]);
5494 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5495 save_reload_reg_used_in_output[i]);
5496 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5497 save_reload_reg_used_in_input_addr[i]);
5498 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5499 save_reload_reg_used_in_output_addr[i]);
5500 }
32131a9c
RK
5501 }
5502
5503 /* If we thought we could inherit a reload, because it seemed that
5504 nothing else wanted the same reload register earlier in the insn,
5505 verify that assumption, now that all reloads have been assigned. */
5506
5507 for (j = 0; j < n_reloads; j++)
5508 {
5509 register int r = reload_order[j];
5510
5511 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5512 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
546b63fb 5513 reload_opnum[r],
32131a9c
RK
5514 reload_when_needed[r]))
5515 reload_inherited[r] = 0;
5516
5517 /* If we found a better place to reload from,
5518 validate it in the same fashion, if it is a reload reg. */
5519 if (reload_override_in[r]
5520 && (GET_CODE (reload_override_in[r]) == REG
5521 || GET_CODE (reload_override_in[r]) == SUBREG))
5522 {
5523 int regno = true_regnum (reload_override_in[r]);
5524 if (spill_reg_order[regno] >= 0
546b63fb
RK
5525 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5526 reload_when_needed[r]))
32131a9c
RK
5527 reload_override_in[r] = 0;
5528 }
5529 }
5530
5531 /* Now that reload_override_in is known valid,
5532 actually override reload_in. */
5533 for (j = 0; j < n_reloads; j++)
5534 if (reload_override_in[j])
5535 reload_in[j] = reload_override_in[j];
5536
5537 /* If this reload won't be done because it has been cancelled or is
5538 optional and not inherited, clear reload_reg_rtx so other
5539 routines (such as subst_reloads) don't get confused. */
5540 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
5541 if (reload_reg_rtx[j] != 0
5542 && ((reload_optional[j] && ! reload_inherited[j])
5543 || (reload_in[j] == 0 && reload_out[j] == 0
5544 && ! reload_secondary_p[j])))
5545 {
5546 int regno = true_regnum (reload_reg_rtx[j]);
5547
5548 if (spill_reg_order[regno] >= 0)
5549 clear_reload_reg_in_use (regno, reload_opnum[j],
5550 reload_when_needed[j], reload_mode[j]);
5551 reload_reg_rtx[j] = 0;
5552 }
32131a9c
RK
5553
5554 /* Record which pseudos and which spill regs have output reloads. */
5555 for (j = 0; j < n_reloads; j++)
5556 {
5557 register int r = reload_order[j];
5558
5559 i = reload_spill_index[r];
5560
5561 /* I is nonneg if this reload used one of the spill regs.
5562 If reload_reg_rtx[r] is 0, this is an optional reload
5563 that we opted to ignore. */
5564 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5565 && reload_reg_rtx[r] != 0)
5566 {
5567 register int nregno = REGNO (reload_out[r]);
372e033b
RS
5568 int nr = 1;
5569
5570 if (nregno < FIRST_PSEUDO_REGISTER)
5571 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
5572
5573 while (--nr >= 0)
372e033b
RS
5574 reg_has_output_reload[nregno + nr] = 1;
5575
5576 if (i >= 0)
32131a9c 5577 {
372e033b
RS
5578 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5579 while (--nr >= 0)
32131a9c
RK
5580 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5581 }
5582
5583 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
5584 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5585 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
5586 abort ();
5587 }
5588 }
5589}
5590\f
546b63fb
RK
5591/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5592 reloads of the same item for fear that we might not have enough reload
5593 registers. However, normally they will get the same reload register
5594 and hence actually need not be loaded twice.
5595
5596 Here we check for the most common case of this phenomenon: when we have
5597 a number of reloads for the same object, each of which were allocated
5598 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5599 reload, and is not modified in the insn itself. If we find such,
5600 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5601 This will not increase the number of spill registers needed and will
5602 prevent redundant code. */
5603
5604#ifdef SMALL_REGISTER_CLASSES
5605
5606static void
5607merge_assigned_reloads (insn)
5608 rtx insn;
5609{
5610 int i, j;
5611
5612 /* Scan all the reloads looking for ones that only load values and
5613 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5614 assigned and not modified by INSN. */
5615
5616 for (i = 0; i < n_reloads; i++)
5617 {
5618 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5619 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5620 || reg_set_p (reload_reg_rtx[i], insn))
5621 continue;
5622
5623 /* Look at all other reloads. Ensure that the only use of this
5624 reload_reg_rtx is in a reload that just loads the same value
5625 as we do. Note that any secondary reloads must be of the identical
5626 class since the values, modes, and result registers are the
5627 same, so we need not do anything with any secondary reloads. */
5628
5629 for (j = 0; j < n_reloads; j++)
5630 {
5631 if (i == j || reload_reg_rtx[j] == 0
5632 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5633 reload_reg_rtx[i]))
5634 continue;
5635
5636 /* If the reload regs aren't exactly the same (e.g, different modes)
5637 or if the values are different, we can't merge anything with this
5638 reload register. */
5639
5640 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5641 || reload_out[j] != 0 || reload_in[j] == 0
5642 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5643 break;
5644 }
5645
5646 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5647 we, in fact, found any matching reloads. */
5648
5649 if (j == n_reloads)
5650 {
5651 for (j = 0; j < n_reloads; j++)
5652 if (i != j && reload_reg_rtx[j] != 0
5653 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5654 {
5655 reload_when_needed[i] = RELOAD_OTHER;
5656 reload_in[j] = 0;
5657 transfer_replacements (i, j);
5658 }
5659
5660 /* If this is now RELOAD_OTHER, look for any reloads that load
5661 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5662 if they were for inputs, RELOAD_OTHER for outputs. Note that
5663 this test is equivalent to looking for reloads for this operand
5664 number. */
5665
5666 if (reload_when_needed[i] == RELOAD_OTHER)
5667 for (j = 0; j < n_reloads; j++)
5668 if (reload_in[j] != 0
5669 && reload_when_needed[i] != RELOAD_OTHER
5670 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5671 reload_in[i]))
5672 reload_when_needed[j]
5673 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5674 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5675 }
5676 }
5677}
5678#endif /* SMALL_RELOAD_CLASSES */
5679\f
32131a9c
RK
5680/* Output insns to reload values in and out of the chosen reload regs. */
5681
5682static void
5683emit_reload_insns (insn)
5684 rtx insn;
5685{
5686 register int j;
546b63fb
RK
5687 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5688 rtx other_input_address_reload_insns = 0;
5689 rtx other_input_reload_insns = 0;
5690 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5691 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5692 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5693 rtx operand_reload_insns = 0;
893bc853 5694 rtx other_operand_reload_insns = 0;
d7e0324f 5695 rtx other_output_reload_insns = 0;
32131a9c 5696 rtx following_insn = NEXT_INSN (insn);
a8efe40d 5697 rtx before_insn = insn;
32131a9c
RK
5698 int special;
5699 /* Values to be put in spill_reg_store are put here first. */
5700 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5701
546b63fb
RK
5702 for (j = 0; j < reload_n_operands; j++)
5703 input_reload_insns[j] = input_address_reload_insns[j]
5704 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5705
32131a9c
RK
5706 /* Now output the instructions to copy the data into and out of the
5707 reload registers. Do these in the order that the reloads were reported,
5708 since reloads of base and index registers precede reloads of operands
5709 and the operands may need the base and index registers reloaded. */
5710
5711 for (j = 0; j < n_reloads; j++)
5712 {
5713 register rtx old;
5714 rtx oldequiv_reg = 0;
73b2ad9e
RK
5715
5716 if (reload_spill_index[j] >= 0)
5717 new_spill_reg_store[reload_spill_index[j]] = 0;
32131a9c
RK
5718
5719 old = reload_in[j];
5720 if (old != 0 && ! reload_inherited[j]
5721 && ! rtx_equal_p (reload_reg_rtx[j], old)
5722 && reload_reg_rtx[j] != 0)
5723 {
5724 register rtx reloadreg = reload_reg_rtx[j];
5725 rtx oldequiv = 0;
5726 enum machine_mode mode;
546b63fb 5727 rtx *where;
32131a9c
RK
5728
5729 /* Determine the mode to reload in.
5730 This is very tricky because we have three to choose from.
5731 There is the mode the insn operand wants (reload_inmode[J]).
5732 There is the mode of the reload register RELOADREG.
5733 There is the intrinsic mode of the operand, which we could find
5734 by stripping some SUBREGs.
5735 It turns out that RELOADREG's mode is irrelevant:
5736 we can change that arbitrarily.
5737
5738 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5739 then the reload reg may not support QImode moves, so use SImode.
5740 If foo is in memory due to spilling a pseudo reg, this is safe,
5741 because the QImode value is in the least significant part of a
5742 slot big enough for a SImode. If foo is some other sort of
5743 memory reference, then it is impossible to reload this case,
5744 so previous passes had better make sure this never happens.
5745
5746 Then consider a one-word union which has SImode and one of its
5747 members is a float, being fetched as (SUBREG:SF union:SI).
5748 We must fetch that as SFmode because we could be loading into
5749 a float-only register. In this case OLD's mode is correct.
5750
5751 Consider an immediate integer: it has VOIDmode. Here we need
5752 to get a mode from something else.
5753
5754 In some cases, there is a fourth mode, the operand's
5755 containing mode. If the insn specifies a containing mode for
5756 this operand, it overrides all others.
5757
5758 I am not sure whether the algorithm here is always right,
5759 but it does the right things in those cases. */
5760
5761 mode = GET_MODE (old);
5762 if (mode == VOIDmode)
5763 mode = reload_inmode[j];
32131a9c
RK
5764
5765#ifdef SECONDARY_INPUT_RELOAD_CLASS
5766 /* If we need a secondary register for this operation, see if
5767 the value is already in a register in that class. Don't
5768 do this if the secondary register will be used as a scratch
5769 register. */
5770
b80bba27
RK
5771 if (reload_secondary_in_reload[j] >= 0
5772 && reload_secondary_in_icode[j] == CODE_FOR_nothing
58b1581b 5773 && optimize)
32131a9c
RK
5774 oldequiv
5775 = find_equiv_reg (old, insn,
b80bba27 5776 reload_reg_class[reload_secondary_in_reload[j]],
fb3821f7 5777 -1, NULL_PTR, 0, mode);
32131a9c
RK
5778#endif
5779
5780 /* If reloading from memory, see if there is a register
5781 that already holds the same value. If so, reload from there.
5782 We can pass 0 as the reload_reg_p argument because
5783 any other reload has either already been emitted,
5784 in which case find_equiv_reg will see the reload-insn,
5785 or has yet to be emitted, in which case it doesn't matter
5786 because we will use this equiv reg right away. */
5787
58b1581b 5788 if (oldequiv == 0 && optimize
32131a9c
RK
5789 && (GET_CODE (old) == MEM
5790 || (GET_CODE (old) == REG
5791 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5792 && reg_renumber[REGNO (old)] < 0)))
546b63fb 5793 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 5794 -1, NULL_PTR, 0, mode);
32131a9c
RK
5795
5796 if (oldequiv)
5797 {
5798 int regno = true_regnum (oldequiv);
5799
5800 /* If OLDEQUIV is a spill register, don't use it for this
5801 if any other reload needs it at an earlier stage of this insn
a8fdc208 5802 or at this stage. */
32131a9c 5803 if (spill_reg_order[regno] >= 0
546b63fb
RK
5804 && (! reload_reg_free_p (regno, reload_opnum[j],
5805 reload_when_needed[j])
5806 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
5807 reload_when_needed[j])))
5808 oldequiv = 0;
5809
5810 /* If OLDEQUIV is not a spill register,
5811 don't use it if any other reload wants it. */
5812 if (spill_reg_order[regno] < 0)
5813 {
5814 int k;
5815 for (k = 0; k < n_reloads; k++)
5816 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
5817 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5818 oldequiv))
32131a9c
RK
5819 {
5820 oldequiv = 0;
5821 break;
5822 }
5823 }
546b63fb
RK
5824
5825 /* If it is no cheaper to copy from OLDEQUIV into the
5826 reload register than it would be to move from memory,
5827 don't use it. Likewise, if we need a secondary register
5828 or memory. */
5829
5830 if (oldequiv != 0
5831 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5832 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5833 reload_reg_class[j])
5834 >= MEMORY_MOVE_COST (mode)))
5835#ifdef SECONDARY_INPUT_RELOAD_CLASS
5836 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5837 mode, oldequiv)
5838 != NO_REGS)
5839#endif
5840#ifdef SECONDARY_MEMORY_NEEDED
5841 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5842 REGNO_REG_CLASS (regno),
5843 mode)
5844#endif
5845 ))
5846 oldequiv = 0;
32131a9c
RK
5847 }
5848
5849 if (oldequiv == 0)
5850 oldequiv = old;
5851 else if (GET_CODE (oldequiv) == REG)
5852 oldequiv_reg = oldequiv;
5853 else if (GET_CODE (oldequiv) == SUBREG)
5854 oldequiv_reg = SUBREG_REG (oldequiv);
5855
76182796
RK
5856 /* If we are reloading from a register that was recently stored in
5857 with an output-reload, see if we can prove there was
5858 actually no need to store the old value in it. */
5859
5860 if (optimize && GET_CODE (oldequiv) == REG
5861 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5862 && spill_reg_order[REGNO (oldequiv)] >= 0
c95c0732 5863 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
8aea655f 5864 && find_reg_note (insn, REG_DEAD, reload_in[j])
76182796 5865 /* This is unsafe if operand occurs more than once in current
b87b7ecd 5866 insn. Perhaps some occurrences weren't reloaded. */
c95c0732 5867 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
76182796
RK
5868 delete_output_reload
5869 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5870
32131a9c 5871 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
5872 then load RELOADREG from OLDEQUIV. Note that we cannot use
5873 gen_lowpart_common since it can do the wrong thing when
5874 RELOADREG has a multi-word mode. Note that RELOADREG
5875 must always be a REG here. */
32131a9c
RK
5876
5877 if (GET_MODE (reloadreg) != mode)
3abe6f90 5878 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
5879 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5880 oldequiv = SUBREG_REG (oldequiv);
5881 if (GET_MODE (oldequiv) != VOIDmode
5882 && mode != GET_MODE (oldequiv))
5883 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5884
546b63fb 5885 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
5886 switch (reload_when_needed[j])
5887 {
32131a9c 5888 case RELOAD_OTHER:
546b63fb
RK
5889 where = &other_input_reload_insns;
5890 break;
5891 case RELOAD_FOR_INPUT:
5892 where = &input_reload_insns[reload_opnum[j]];
32131a9c 5893 break;
546b63fb
RK
5894 case RELOAD_FOR_INPUT_ADDRESS:
5895 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 5896 break;
546b63fb
RK
5897 case RELOAD_FOR_OUTPUT_ADDRESS:
5898 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c
RK
5899 break;
5900 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5901 where = &operand_reload_insns;
5902 break;
893bc853
RK
5903 case RELOAD_FOR_OPADDR_ADDR:
5904 where = &other_operand_reload_insns;
5905 break;
546b63fb
RK
5906 case RELOAD_FOR_OTHER_ADDRESS:
5907 where = &other_input_address_reload_insns;
5908 break;
5909 default:
5910 abort ();
32131a9c
RK
5911 }
5912
546b63fb 5913 push_to_sequence (*where);
32131a9c
RK
5914 special = 0;
5915
5916 /* Auto-increment addresses must be reloaded in a special way. */
5917 if (GET_CODE (oldequiv) == POST_INC
5918 || GET_CODE (oldequiv) == POST_DEC
5919 || GET_CODE (oldequiv) == PRE_INC
5920 || GET_CODE (oldequiv) == PRE_DEC)
5921 {
5922 /* We are not going to bother supporting the case where a
5923 incremented register can't be copied directly from
5924 OLDEQUIV since this seems highly unlikely. */
b80bba27 5925 if (reload_secondary_in_reload[j] >= 0)
32131a9c
RK
5926 abort ();
5927 /* Prevent normal processing of this reload. */
5928 special = 1;
5929 /* Output a special code sequence for this case. */
546b63fb 5930 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
5931 }
5932
5933 /* If we are reloading a pseudo-register that was set by the previous
5934 insn, see if we can get rid of that pseudo-register entirely
5935 by redirecting the previous insn into our reload register. */
5936
5937 else if (optimize && GET_CODE (old) == REG
5938 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5939 && dead_or_set_p (insn, old)
5940 /* This is unsafe if some other reload
5941 uses the same reg first. */
546b63fb
RK
5942 && reload_reg_free_before_p (REGNO (reloadreg),
5943 reload_opnum[j],
5944 reload_when_needed[j]))
32131a9c
RK
5945 {
5946 rtx temp = PREV_INSN (insn);
5947 while (temp && GET_CODE (temp) == NOTE)
5948 temp = PREV_INSN (temp);
5949 if (temp
5950 && GET_CODE (temp) == INSN
5951 && GET_CODE (PATTERN (temp)) == SET
5952 && SET_DEST (PATTERN (temp)) == old
5953 /* Make sure we can access insn_operand_constraint. */
5954 && asm_noperands (PATTERN (temp)) < 0
5955 /* This is unsafe if prev insn rejects our reload reg. */
5956 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5957 reloadreg)
5958 /* This is unsafe if operand occurs more than once in current
5959 insn. Perhaps some occurrences aren't reloaded. */
5960 && count_occurrences (PATTERN (insn), old) == 1
5961 /* Don't risk splitting a matching pair of operands. */
5962 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5963 {
5964 /* Store into the reload register instead of the pseudo. */
5965 SET_DEST (PATTERN (temp)) = reloadreg;
5966 /* If these are the only uses of the pseudo reg,
5967 pretend for GDB it lives in the reload reg we used. */
5968 if (reg_n_deaths[REGNO (old)] == 1
5969 && reg_n_sets[REGNO (old)] == 1)
5970 {
5971 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5972 alter_reg (REGNO (old), -1);
5973 }
5974 special = 1;
5975 }
5976 }
5977
546b63fb
RK
5978 /* We can't do that, so output an insn to load RELOADREG. */
5979
32131a9c
RK
5980 if (! special)
5981 {
5982#ifdef SECONDARY_INPUT_RELOAD_CLASS
5983 rtx second_reload_reg = 0;
5984 enum insn_code icode;
5985
5986 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
5987 and icode, if any. If OLDEQUIV and OLD are different or
5988 if this is an in-out reload, recompute whether or not we
5989 still need a secondary register and what the icode should
5990 be. If we still need a secondary register and the class or
5991 icode is different, go back to reloading from OLD if using
5992 OLDEQUIV means that we got the wrong type of register. We
5993 cannot have different class or icode due to an in-out reload
5994 because we don't make such reloads when both the input and
5995 output need secondary reload registers. */
32131a9c 5996
b80bba27 5997 if (reload_secondary_in_reload[j] >= 0)
32131a9c 5998 {
b80bba27 5999 int secondary_reload = reload_secondary_in_reload[j];
1554c2c6
RK
6000 rtx real_oldequiv = oldequiv;
6001 rtx real_old = old;
6002
6003 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6004 and similarly for OLD.
b80bba27 6005 See comments in get_secondary_reload in reload.c. */
1554c2c6
RK
6006 if (GET_CODE (oldequiv) == REG
6007 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6008 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6009 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6010
6011 if (GET_CODE (old) == REG
6012 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6013 && reg_equiv_mem[REGNO (old)] != 0)
6014 real_old = reg_equiv_mem[REGNO (old)];
6015
32131a9c 6016 second_reload_reg = reload_reg_rtx[secondary_reload];
b80bba27 6017 icode = reload_secondary_in_icode[j];
32131a9c 6018
d445b551
RK
6019 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6020 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
6021 {
6022 enum reg_class new_class
6023 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 6024 mode, real_oldequiv);
32131a9c
RK
6025
6026 if (new_class == NO_REGS)
6027 second_reload_reg = 0;
6028 else
6029 {
6030 enum insn_code new_icode;
6031 enum machine_mode new_mode;
6032
6033 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6034 REGNO (second_reload_reg)))
1554c2c6 6035 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6036 else
6037 {
6038 new_icode = reload_in_optab[(int) mode];
6039 if (new_icode != CODE_FOR_nothing
6040 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 6041 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 6042 (reloadreg, mode)))
a8fdc208
RS
6043 || (insn_operand_predicate[(int) new_icode][1]
6044 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 6045 (real_oldequiv, mode)))))
32131a9c
RK
6046 new_icode = CODE_FOR_nothing;
6047
6048 if (new_icode == CODE_FOR_nothing)
6049 new_mode = mode;
6050 else
196ddf8a 6051 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
6052
6053 if (GET_MODE (second_reload_reg) != new_mode)
6054 {
6055 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6056 new_mode))
1554c2c6 6057 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6058 else
6059 second_reload_reg
3aaa90c7
MM
6060 = gen_rtx (REG, new_mode,
6061 REGNO (second_reload_reg));
32131a9c
RK
6062 }
6063 }
6064 }
6065 }
6066
6067 /* If we still need a secondary reload register, check
6068 to see if it is being used as a scratch or intermediate
1554c2c6
RK
6069 register and generate code appropriately. If we need
6070 a scratch register, use REAL_OLDEQUIV since the form of
6071 the insn may depend on the actual address if it is
6072 a MEM. */
32131a9c
RK
6073
6074 if (second_reload_reg)
6075 {
6076 if (icode != CODE_FOR_nothing)
6077 {
5e03c156
RK
6078 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6079 second_reload_reg));
32131a9c
RK
6080 special = 1;
6081 }
6082 else
6083 {
6084 /* See if we need a scratch register to load the
6085 intermediate register (a tertiary reload). */
6086 enum insn_code tertiary_icode
b80bba27 6087 = reload_secondary_in_icode[secondary_reload];
32131a9c
RK
6088
6089 if (tertiary_icode != CODE_FOR_nothing)
6090 {
6091 rtx third_reload_reg
b80bba27 6092 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
32131a9c 6093
546b63fb
RK
6094 emit_insn ((GEN_FCN (tertiary_icode)
6095 (second_reload_reg, real_oldequiv,
6096 third_reload_reg)));
32131a9c
RK
6097 }
6098 else
5e03c156
RK
6099 gen_reload (second_reload_reg, oldequiv,
6100 reload_opnum[j],
6101 reload_when_needed[j]);
546b63fb
RK
6102
6103 oldequiv = second_reload_reg;
32131a9c
RK
6104 }
6105 }
6106 }
6107#endif
6108
2d182c6f 6109 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
5e03c156
RK
6110 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6111 reload_when_needed[j]);
32131a9c
RK
6112
6113#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6114 /* We may have to make a REG_DEAD note for the secondary reload
6115 register in the insns we just made. Find the last insn that
6116 mentioned the register. */
6117 if (! special && second_reload_reg
6118 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6119 {
6120 rtx prev;
6121
546b63fb 6122 for (prev = get_last_insn (); prev;
32131a9c
RK
6123 prev = PREV_INSN (prev))
6124 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
6125 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6126 PATTERN (prev)))
32131a9c
RK
6127 {
6128 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6129 second_reload_reg,
6130 REG_NOTES (prev));
6131 break;
6132 }
6133 }
6134#endif
6135 }
6136
546b63fb
RK
6137 /* End this sequence. */
6138 *where = get_insns ();
6139 end_sequence ();
32131a9c
RK
6140 }
6141
6142 /* Add a note saying the input reload reg
6143 dies in this insn, if anyone cares. */
6144#ifdef PRESERVE_DEATH_INFO_REGNO_P
6145 if (old != 0
6146 && reload_reg_rtx[j] != old
6147 && reload_reg_rtx[j] != 0
6148 && reload_out[j] == 0
6149 && ! reload_inherited[j]
6150 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6151 {
6152 register rtx reloadreg = reload_reg_rtx[j];
6153
a8fdc208 6154#if 0
32131a9c
RK
6155 /* We can't abort here because we need to support this for sched.c.
6156 It's not terrible to miss a REG_DEAD note, but we should try
6157 to figure out how to do this correctly. */
6158 /* The code below is incorrect for address-only reloads. */
6159 if (reload_when_needed[j] != RELOAD_OTHER
6160 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6161 abort ();
6162#endif
6163
6164 /* Add a death note to this insn, for an input reload. */
6165
6166 if ((reload_when_needed[j] == RELOAD_OTHER
6167 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6168 && ! dead_or_set_p (insn, reloadreg))
6169 REG_NOTES (insn)
6170 = gen_rtx (EXPR_LIST, REG_DEAD,
6171 reloadreg, REG_NOTES (insn));
6172 }
6173
6174 /* When we inherit a reload, the last marked death of the reload reg
6175 may no longer really be a death. */
6176 if (reload_reg_rtx[j] != 0
6177 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6178 && reload_inherited[j])
6179 {
6180 /* Handle inheriting an output reload.
6181 Remove the death note from the output reload insn. */
6182 if (reload_spill_index[j] >= 0
6183 && GET_CODE (reload_in[j]) == REG
6184 && spill_reg_store[reload_spill_index[j]] != 0
6185 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6186 REG_DEAD, REGNO (reload_reg_rtx[j])))
6187 remove_death (REGNO (reload_reg_rtx[j]),
6188 spill_reg_store[reload_spill_index[j]]);
6189 /* Likewise for input reloads that were inherited. */
6190 else if (reload_spill_index[j] >= 0
6191 && GET_CODE (reload_in[j]) == REG
6192 && spill_reg_store[reload_spill_index[j]] == 0
6193 && reload_inheritance_insn[j] != 0
a8fdc208 6194 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
6195 REGNO (reload_reg_rtx[j])))
6196 remove_death (REGNO (reload_reg_rtx[j]),
6197 reload_inheritance_insn[j]);
6198 else
6199 {
6200 rtx prev;
6201
6202 /* We got this register from find_equiv_reg.
6203 Search back for its last death note and get rid of it.
6204 But don't search back too far.
6205 Don't go past a place where this reg is set,
6206 since a death note before that remains valid. */
6207 for (prev = PREV_INSN (insn);
6208 prev && GET_CODE (prev) != CODE_LABEL;
6209 prev = PREV_INSN (prev))
6210 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6211 && dead_or_set_p (prev, reload_reg_rtx[j]))
6212 {
6213 if (find_regno_note (prev, REG_DEAD,
6214 REGNO (reload_reg_rtx[j])))
6215 remove_death (REGNO (reload_reg_rtx[j]), prev);
6216 break;
6217 }
6218 }
6219 }
6220
6221 /* We might have used find_equiv_reg above to choose an alternate
6222 place from which to reload. If so, and it died, we need to remove
6223 that death and move it to one of the insns we just made. */
6224
6225 if (oldequiv_reg != 0
6226 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6227 {
6228 rtx prev, prev1;
6229
6230 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6231 prev = PREV_INSN (prev))
6232 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6233 && dead_or_set_p (prev, oldequiv_reg))
6234 {
6235 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6236 {
6237 for (prev1 = this_reload_insn;
6238 prev1; prev1 = PREV_INSN (prev1))
6239 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
6240 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6241 PATTERN (prev1)))
32131a9c
RK
6242 {
6243 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6244 oldequiv_reg,
6245 REG_NOTES (prev1));
6246 break;
6247 }
6248 remove_death (REGNO (oldequiv_reg), prev);
6249 }
6250 break;
6251 }
6252 }
6253#endif
6254
6255 /* If we are reloading a register that was recently stored in with an
6256 output-reload, see if we can prove there was
6257 actually no need to store the old value in it. */
6258
6259 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 6260 && reload_in[j] != 0
32131a9c
RK
6261 && GET_CODE (reload_in[j]) == REG
6262#if 0
6263 /* There doesn't seem to be any reason to restrict this to pseudos
6264 and doing so loses in the case where we are copying from a
6265 register of the wrong class. */
6266 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6267#endif
6268 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb
RK
6269 /* This is unsafe if some other reload uses the same reg first. */
6270 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6271 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
6272 && dead_or_set_p (insn, reload_in[j])
6273 /* This is unsafe if operand occurs more than once in current
6274 insn. Perhaps some occurrences weren't reloaded. */
6275 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6276 delete_output_reload (insn, j,
6277 spill_reg_store[reload_spill_index[j]]);
6278
6279 /* Input-reloading is done. Now do output-reloading,
6280 storing the value from the reload-register after the main insn
6281 if reload_out[j] is nonzero.
6282
6283 ??? At some point we need to support handling output reloads of
6284 JUMP_INSNs or insns that set cc0. */
6285 old = reload_out[j];
6286 if (old != 0
6287 && reload_reg_rtx[j] != old
6288 && reload_reg_rtx[j] != 0)
6289 {
6290 register rtx reloadreg = reload_reg_rtx[j];
6291 register rtx second_reloadreg = 0;
32131a9c
RK
6292 rtx note, p;
6293 enum machine_mode mode;
6294 int special = 0;
6295
6296 /* An output operand that dies right away does need a reload,
6297 but need not be copied from it. Show the new location in the
6298 REG_UNUSED note. */
6299 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6300 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6301 {
6302 XEXP (note, 0) = reload_reg_rtx[j];
6303 continue;
6304 }
a7911cd2
RK
6305 /* Likewise for a SUBREG of an operand that dies. */
6306 else if (GET_CODE (old) == SUBREG
6307 && GET_CODE (SUBREG_REG (old)) == REG
6308 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6309 SUBREG_REG (old))))
6310 {
6311 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6312 reload_reg_rtx[j]);
6313 continue;
6314 }
32131a9c
RK
6315 else if (GET_CODE (old) == SCRATCH)
6316 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6317 but we don't want to make an output reload. */
6318 continue;
6319
6320#if 0
6321 /* Strip off of OLD any size-increasing SUBREGs such as
6322 (SUBREG:SI foo:QI 0). */
6323
6324 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6325 && (GET_MODE_SIZE (GET_MODE (old))
6326 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6327 old = SUBREG_REG (old);
6328#endif
6329
6330 /* If is a JUMP_INSN, we can't support output reloads yet. */
6331 if (GET_CODE (insn) == JUMP_INSN)
6332 abort ();
6333
d7e0324f 6334 if (reload_when_needed[j] == RELOAD_OTHER)
5ca582cf 6335 start_sequence ();
d7e0324f
RK
6336 else
6337 push_to_sequence (output_reload_insns[reload_opnum[j]]);
546b63fb 6338
32131a9c
RK
6339 /* Determine the mode to reload in.
6340 See comments above (for input reloading). */
6341
6342 mode = GET_MODE (old);
6343 if (mode == VOIDmode)
79a365a7
RS
6344 {
6345 /* VOIDmode should never happen for an output. */
6346 if (asm_noperands (PATTERN (insn)) < 0)
6347 /* It's the compiler's fault. */
a89b2cc4 6348 fatal_insn ("VOIDmode on an output", insn);
79a365a7
RS
6349 error_for_asm (insn, "output operand is constant in `asm'");
6350 /* Prevent crash--use something we know is valid. */
6351 mode = word_mode;
6352 old = gen_rtx (REG, mode, REGNO (reloadreg));
6353 }
32131a9c 6354
32131a9c 6355 if (GET_MODE (reloadreg) != mode)
3abe6f90 6356 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
6357
6358#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6359
6360 /* If we need two reload regs, set RELOADREG to the intermediate
5e03c156 6361 one, since it will be stored into OLD. We might need a secondary
32131a9c
RK
6362 register only for an input reload, so check again here. */
6363
b80bba27 6364 if (reload_secondary_out_reload[j] >= 0)
32131a9c 6365 {
1554c2c6 6366 rtx real_old = old;
32131a9c 6367
1554c2c6
RK
6368 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6369 && reg_equiv_mem[REGNO (old)] != 0)
6370 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6371
1554c2c6
RK
6372 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6373 mode, real_old)
6374 != NO_REGS))
6375 {
6376 second_reloadreg = reloadreg;
b80bba27 6377 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
32131a9c 6378
1554c2c6
RK
6379 /* See if RELOADREG is to be used as a scratch register
6380 or as an intermediate register. */
b80bba27 6381 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
32131a9c 6382 {
b80bba27 6383 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
546b63fb 6384 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6385 special = 1;
32131a9c
RK
6386 }
6387 else
1554c2c6
RK
6388 {
6389 /* See if we need both a scratch and intermediate reload
6390 register. */
5e03c156 6391
b80bba27 6392 int secondary_reload = reload_secondary_out_reload[j];
1554c2c6 6393 enum insn_code tertiary_icode
b80bba27 6394 = reload_secondary_out_icode[secondary_reload];
32131a9c 6395
1554c2c6
RK
6396 if (GET_MODE (reloadreg) != mode)
6397 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6398
6399 if (tertiary_icode != CODE_FOR_nothing)
6400 {
6401 rtx third_reloadreg
b80bba27 6402 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
a7911cd2 6403 rtx tem;
5e03c156
RK
6404
6405 /* Copy primary reload reg to secondary reload reg.
6406 (Note that these have been swapped above, then
6407 secondary reload reg to OLD using our insn. */
6408
a7911cd2
RK
6409 /* If REAL_OLD is a paradoxical SUBREG, remove it
6410 and try to put the opposite SUBREG on
6411 RELOADREG. */
6412 if (GET_CODE (real_old) == SUBREG
6413 && (GET_MODE_SIZE (GET_MODE (real_old))
6414 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6415 && 0 != (tem = gen_lowpart_common
6416 (GET_MODE (SUBREG_REG (real_old)),
6417 reloadreg)))
6418 real_old = SUBREG_REG (real_old), reloadreg = tem;
6419
5e03c156
RK
6420 gen_reload (reloadreg, second_reloadreg,
6421 reload_opnum[j], reload_when_needed[j]);
6422 emit_insn ((GEN_FCN (tertiary_icode)
6423 (real_old, reloadreg, third_reloadreg)));
6424 special = 1;
9ad5f9f6 6425 }
5e03c156 6426
1554c2c6 6427 else
5e03c156
RK
6428 /* Copy between the reload regs here and then to
6429 OUT later. */
1554c2c6 6430
5e03c156
RK
6431 gen_reload (reloadreg, second_reloadreg,
6432 reload_opnum[j], reload_when_needed[j]);
1554c2c6 6433 }
32131a9c
RK
6434 }
6435 }
6436#endif
6437
6438 /* Output the last reload insn. */
6439 if (! special)
5e03c156
RK
6440 gen_reload (old, reloadreg, reload_opnum[j],
6441 reload_when_needed[j]);
32131a9c
RK
6442
6443#ifdef PRESERVE_DEATH_INFO_REGNO_P
6444 /* If final will look at death notes for this reg,
6445 put one on the last output-reload insn to use it. Similarly
6446 for any secondary register. */
6447 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 6448 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6449 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6450 && reg_overlap_mentioned_for_reload_p (reloadreg,
6451 PATTERN (p)))
32131a9c
RK
6452 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6453 reloadreg, REG_NOTES (p));
6454
6455#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6456 if (! special
6457 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 6458 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6459 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6460 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6461 PATTERN (p)))
32131a9c
RK
6462 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6463 second_reloadreg, REG_NOTES (p));
6464#endif
6465#endif
6466 /* Look at all insns we emitted, just to be safe. */
546b63fb 6467 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
6468 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6469 {
6470 /* If this output reload doesn't come from a spill reg,
6471 clear any memory of reloaded copies of the pseudo reg.
6472 If this output reload comes from a spill reg,
6473 reg_has_output_reload will make this do nothing. */
6474 note_stores (PATTERN (p), forget_old_reloads_1);
6475
73b2ad9e
RK
6476 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6477 && reload_spill_index[j] >= 0)
6478 new_spill_reg_store[reload_spill_index[j]] = p;
32131a9c
RK
6479 }
6480
d7e0324f 6481 if (reload_when_needed[j] == RELOAD_OTHER)
5ca582cf
RK
6482 {
6483 if (other_output_reload_insns)
6484 emit_insns (other_output_reload_insns);
6485 other_output_reload_insns = get_insns ();
6486 }
d7e0324f
RK
6487 else
6488 output_reload_insns[reload_opnum[j]] = get_insns ();
6489
546b63fb 6490 end_sequence ();
32131a9c 6491 }
32131a9c
RK
6492 }
6493
546b63fb
RK
6494 /* Now write all the insns we made for reloads in the order expected by
6495 the allocation functions. Prior to the insn being reloaded, we write
6496 the following reloads:
6497
6498 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6499
5ca582cf 6500 RELOAD_OTHER reloads, output in ascending order by reload number.
546b63fb
RK
6501
6502 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6503 the RELOAD_FOR_INPUT reload for the operand.
6504
893bc853
RK
6505 RELOAD_FOR_OPADDR_ADDRS reloads.
6506
546b63fb
RK
6507 RELOAD_FOR_OPERAND_ADDRESS reloads.
6508
6509 After the insn being reloaded, we write the following:
6510
6511 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
d7e0324f
RK
6512 the RELOAD_FOR_OUTPUT reload for that operand.
6513
5ca582cf
RK
6514 Any RELOAD_OTHER output reloads, output in descending order by
6515 reload number. */
546b63fb
RK
6516
6517 emit_insns_before (other_input_address_reload_insns, before_insn);
6518 emit_insns_before (other_input_reload_insns, before_insn);
6519
6520 for (j = 0; j < reload_n_operands; j++)
6521 {
6522 emit_insns_before (input_address_reload_insns[j], before_insn);
6523 emit_insns_before (input_reload_insns[j], before_insn);
6524 }
6525
893bc853 6526 emit_insns_before (other_operand_reload_insns, before_insn);
546b63fb
RK
6527 emit_insns_before (operand_reload_insns, before_insn);
6528
6529 for (j = 0; j < reload_n_operands; j++)
6530 {
6531 emit_insns_before (output_address_reload_insns[j], following_insn);
6532 emit_insns_before (output_reload_insns[j], following_insn);
6533 }
6534
d7e0324f
RK
6535 emit_insns_before (other_output_reload_insns, following_insn);
6536
32131a9c
RK
6537 /* Move death notes from INSN
6538 to output-operand-address and output reload insns. */
6539#ifdef PRESERVE_DEATH_INFO_REGNO_P
6540 {
6541 rtx insn1;
6542 /* Loop over those insns, last ones first. */
6543 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6544 insn1 = PREV_INSN (insn1))
6545 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6546 {
6547 rtx source = SET_SRC (PATTERN (insn1));
6548 rtx dest = SET_DEST (PATTERN (insn1));
6549
6550 /* The note we will examine next. */
6551 rtx reg_notes = REG_NOTES (insn);
6552 /* The place that pointed to this note. */
6553 rtx *prev_reg_note = &REG_NOTES (insn);
6554
6555 /* If the note is for something used in the source of this
6556 reload insn, or in the output address, move the note. */
6557 while (reg_notes)
6558 {
6559 rtx next_reg_notes = XEXP (reg_notes, 1);
6560 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6561 && GET_CODE (XEXP (reg_notes, 0)) == REG
6562 && ((GET_CODE (dest) != REG
bfa30b22
RK
6563 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6564 dest))
6565 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6566 source)))
32131a9c
RK
6567 {
6568 *prev_reg_note = next_reg_notes;
6569 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6570 REG_NOTES (insn1) = reg_notes;
6571 }
6572 else
6573 prev_reg_note = &XEXP (reg_notes, 1);
6574
6575 reg_notes = next_reg_notes;
6576 }
6577 }
6578 }
6579#endif
6580
6581 /* For all the spill regs newly reloaded in this instruction,
6582 record what they were reloaded from, so subsequent instructions
d445b551
RK
6583 can inherit the reloads.
6584
6585 Update spill_reg_store for the reloads of this insn.
e9e79d69 6586 Copy the elements that were updated in the loop above. */
32131a9c
RK
6587
6588 for (j = 0; j < n_reloads; j++)
6589 {
6590 register int r = reload_order[j];
6591 register int i = reload_spill_index[r];
6592
6593 /* I is nonneg if this reload used one of the spill regs.
6594 If reload_reg_rtx[r] is 0, this is an optional reload
51f0c3b7 6595 that we opted to ignore. */
d445b551 6596
51f0c3b7 6597 if (i >= 0 && reload_reg_rtx[r] != 0)
32131a9c 6598 {
32131a9c
RK
6599 int nr
6600 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6601 int k;
51f0c3b7
JW
6602 int part_reaches_end = 0;
6603 int all_reaches_end = 1;
32131a9c 6604
51f0c3b7
JW
6605 /* For a multi register reload, we need to check if all or part
6606 of the value lives to the end. */
32131a9c
RK
6607 for (k = 0; k < nr; k++)
6608 {
51f0c3b7
JW
6609 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6610 reload_when_needed[r]))
6611 part_reaches_end = 1;
6612 else
6613 all_reaches_end = 0;
32131a9c
RK
6614 }
6615
51f0c3b7
JW
6616 /* Ignore reloads that don't reach the end of the insn in
6617 entirety. */
6618 if (all_reaches_end)
32131a9c 6619 {
51f0c3b7
JW
6620 /* First, clear out memory of what used to be in this spill reg.
6621 If consecutive registers are used, clear them all. */
d08ea79f 6622
32131a9c
RK
6623 for (k = 0; k < nr; k++)
6624 {
51f0c3b7
JW
6625 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6626 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
32131a9c 6627 }
d08ea79f 6628
51f0c3b7
JW
6629 /* Maybe the spill reg contains a copy of reload_out. */
6630 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6631 {
6632 register int nregno = REGNO (reload_out[r]);
6633 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6634 : HARD_REGNO_NREGS (nregno,
6635 GET_MODE (reload_reg_rtx[r])));
6636
6637 spill_reg_store[i] = new_spill_reg_store[i];
6638 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6639
6640 /* If NREGNO is a hard register, it may occupy more than
6641 one register. If it does, say what is in the
6642 rest of the registers assuming that both registers
6643 agree on how many words the object takes. If not,
6644 invalidate the subsequent registers. */
6645
6646 if (nregno < FIRST_PSEUDO_REGISTER)
6647 for (k = 1; k < nnr; k++)
6648 reg_last_reload_reg[nregno + k]
6649 = (nr == nnr
6650 ? gen_rtx (REG,
6651 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6652 REGNO (reload_reg_rtx[r]) + k)
6653 : 0);
6654
6655 /* Now do the inverse operation. */
6656 for (k = 0; k < nr; k++)
6657 {
6658 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6659 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6660 ? nregno
6661 : nregno + k);
6662 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6663 }
6664 }
d08ea79f 6665
51f0c3b7
JW
6666 /* Maybe the spill reg contains a copy of reload_in. Only do
6667 something if there will not be an output reload for
6668 the register being reloaded. */
6669 else if (reload_out[r] == 0
6670 && reload_in[r] != 0
6671 && ((GET_CODE (reload_in[r]) == REG
6672 && ! reg_has_output_reload[REGNO (reload_in[r])])
6673 || (GET_CODE (reload_in_reg[r]) == REG
6674 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6675 {
6676 register int nregno;
6677 int nnr;
d445b551 6678
51f0c3b7
JW
6679 if (GET_CODE (reload_in[r]) == REG)
6680 nregno = REGNO (reload_in[r]);
6681 else
6682 nregno = REGNO (reload_in_reg[r]);
d08ea79f 6683
51f0c3b7
JW
6684 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6685 : HARD_REGNO_NREGS (nregno,
6686 GET_MODE (reload_reg_rtx[r])));
6687
6688 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6689
6690 if (nregno < FIRST_PSEUDO_REGISTER)
6691 for (k = 1; k < nnr; k++)
6692 reg_last_reload_reg[nregno + k]
6693 = (nr == nnr
6694 ? gen_rtx (REG,
6695 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6696 REGNO (reload_reg_rtx[r]) + k)
6697 : 0);
6698
6699 /* Unless we inherited this reload, show we haven't
6700 recently done a store. */
6701 if (! reload_inherited[r])
6702 spill_reg_store[i] = 0;
6703
6704 for (k = 0; k < nr; k++)
6705 {
6706 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6707 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6708 ? nregno
6709 : nregno + k);
6710 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6711 = insn;
6712 }
6713 }
6714 }
d445b551 6715
51f0c3b7
JW
6716 /* However, if part of the reload reaches the end, then we must
6717 invalidate the old info for the part that survives to the end. */
6718 else if (part_reaches_end)
6719 {
546b63fb 6720 for (k = 0; k < nr; k++)
51f0c3b7
JW
6721 if (reload_reg_reaches_end_p (spill_regs[i] + k,
6722 reload_opnum[r],
6723 reload_when_needed[r]))
6724 {
6725 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6726 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6727 }
32131a9c
RK
6728 }
6729 }
6730
6731 /* The following if-statement was #if 0'd in 1.34 (or before...).
6732 It's reenabled in 1.35 because supposedly nothing else
6733 deals with this problem. */
6734
6735 /* If a register gets output-reloaded from a non-spill register,
6736 that invalidates any previous reloaded copy of it.
6737 But forget_old_reloads_1 won't get to see it, because
6738 it thinks only about the original insn. So invalidate it here. */
6739 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6740 {
6741 register int nregno = REGNO (reload_out[r]);
c7093272
RK
6742 if (nregno >= FIRST_PSEUDO_REGISTER)
6743 reg_last_reload_reg[nregno] = 0;
6744 else
6745 {
6746 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
36281332 6747
c7093272
RK
6748 while (num_regs-- > 0)
6749 reg_last_reload_reg[nregno + num_regs] = 0;
6750 }
32131a9c
RK
6751 }
6752 }
6753}
6754\f
5e03c156
RK
6755/* Emit code to perform a reload from IN (which may be a reload register) to
6756 OUT (which may also be a reload register). IN or OUT is from operand
6757 OPNUM with reload type TYPE.
546b63fb 6758
3c3eeea6 6759 Returns first insn emitted. */
32131a9c
RK
6760
6761rtx
5e03c156
RK
6762gen_reload (out, in, opnum, type)
6763 rtx out;
32131a9c 6764 rtx in;
546b63fb
RK
6765 int opnum;
6766 enum reload_type type;
32131a9c 6767{
546b63fb 6768 rtx last = get_last_insn ();
7a5b18b0
RK
6769 rtx tem;
6770
6771 /* If IN is a paradoxical SUBREG, remove it and try to put the
6772 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6773 if (GET_CODE (in) == SUBREG
6774 && (GET_MODE_SIZE (GET_MODE (in))
6775 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6776 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6777 in = SUBREG_REG (in), out = tem;
6778 else if (GET_CODE (out) == SUBREG
6779 && (GET_MODE_SIZE (GET_MODE (out))
6780 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6781 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6782 out = SUBREG_REG (out), in = tem;
32131a9c 6783
a8fdc208 6784 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
6785 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6786 register that didn't get a hard register. In that case we can just
6787 call emit_move_insn.
6788
a7fd196c
JW
6789 We can also be asked to reload a PLUS that adds a register or a MEM to
6790 another register, constant or MEM. This can occur during frame pointer
6791 elimination and while reloading addresses. This case is handled by
6792 trying to emit a single insn to perform the add. If it is not valid,
6793 we use a two insn sequence.
32131a9c
RK
6794
6795 Finally, we could be called to handle an 'o' constraint by putting
6796 an address into a register. In that case, we first try to do this
6797 with a named pattern of "reload_load_address". If no such pattern
6798 exists, we just emit a SET insn and hope for the best (it will normally
6799 be valid on machines that use 'o').
6800
6801 This entire process is made complex because reload will never
6802 process the insns we generate here and so we must ensure that
6803 they will fit their constraints and also by the fact that parts of
6804 IN might be being reloaded separately and replaced with spill registers.
6805 Because of this, we are, in some sense, just guessing the right approach
6806 here. The one listed above seems to work.
6807
6808 ??? At some point, this whole thing needs to be rethought. */
6809
6810 if (GET_CODE (in) == PLUS
a7fd196c
JW
6811 && (GET_CODE (XEXP (in, 0)) == REG
6812 || GET_CODE (XEXP (in, 0)) == MEM)
6813 && (GET_CODE (XEXP (in, 1)) == REG
6814 || CONSTANT_P (XEXP (in, 1))
6815 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 6816 {
a7fd196c
JW
6817 /* We need to compute the sum of a register or a MEM and another
6818 register, constant, or MEM, and put it into the reload
3002e160
JW
6819 register. The best possible way of doing this is if the machine
6820 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
6821
6822 The simplest approach is to try to generate such an insn and see if it
6823 is recognized and matches its constraints. If so, it can be used.
6824
6825 It might be better not to actually emit the insn unless it is valid,
0009eff2 6826 but we need to pass the insn as an operand to `recog' and
b36d7dd7 6827 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 6828 not valid than to dummy things up. */
a8fdc208 6829
af929c62 6830 rtx op0, op1, tem, insn;
32131a9c 6831 int code;
a8fdc208 6832
af929c62
RK
6833 op0 = find_replacement (&XEXP (in, 0));
6834 op1 = find_replacement (&XEXP (in, 1));
6835
32131a9c
RK
6836 /* Since constraint checking is strict, commutativity won't be
6837 checked, so we need to do that here to avoid spurious failure
6838 if the add instruction is two-address and the second operand
6839 of the add is the same as the reload reg, which is frequently
6840 the case. If the insn would be A = B + A, rearrange it so
6841 it will be A = A + B as constrain_operands expects. */
a8fdc208 6842
32131a9c 6843 if (GET_CODE (XEXP (in, 1)) == REG
5e03c156 6844 && REGNO (out) == REGNO (XEXP (in, 1)))
af929c62
RK
6845 tem = op0, op0 = op1, op1 = tem;
6846
6847 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6848 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c 6849
5e03c156 6850 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
32131a9c
RK
6851 code = recog_memoized (insn);
6852
6853 if (code >= 0)
6854 {
6855 insn_extract (insn);
6856 /* We want constrain operands to treat this insn strictly in
6857 its validity determination, i.e., the way it would after reload
6858 has completed. */
6859 if (constrain_operands (code, 1))
6860 return insn;
6861 }
6862
546b63fb 6863 delete_insns_since (last);
32131a9c
RK
6864
6865 /* If that failed, we must use a conservative two-insn sequence.
6866 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
6867 register since "move" will be able to handle an arbitrary operand,
6868 unlike add which can't, in general. Then add the registers.
32131a9c
RK
6869
6870 If there is another way to do this for a specific machine, a
6871 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6872 we emit below. */
6873
af929c62
RK
6874 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6875 || (GET_CODE (op1) == REG
6876 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6877 tem = op0, op0 = op1, op1 = tem;
32131a9c 6878
5e03c156 6879 emit_insn (gen_move_insn (out, op0));
39b56c2a 6880
5e03c156 6881 /* If OP0 and OP1 are the same, we can use OUT for OP1.
39b56c2a
RK
6882 This fixes a problem on the 32K where the stack pointer cannot
6883 be used as an operand of an add insn. */
6884
6885 if (rtx_equal_p (op0, op1))
5e03c156 6886 op1 = out;
39b56c2a 6887
5e03c156 6888 insn = emit_insn (gen_add2_insn (out, op1));
c77c9766
RK
6889
6890 /* If that failed, copy the address register to the reload register.
6891 Then add the constant to the reload register. */
6892
6893 code = recog_memoized (insn);
6894
6895 if (code >= 0)
6896 {
6897 insn_extract (insn);
6898 /* We want constrain operands to treat this insn strictly in
6899 its validity determination, i.e., the way it would after reload
6900 has completed. */
6901 if (constrain_operands (code, 1))
6902 return insn;
6903 }
6904
6905 delete_insns_since (last);
6906
5e03c156
RK
6907 emit_insn (gen_move_insn (out, op1));
6908 emit_insn (gen_add2_insn (out, op0));
32131a9c
RK
6909 }
6910
0dadecf6
RK
6911#ifdef SECONDARY_MEMORY_NEEDED
6912 /* If we need a memory location to do the move, do it that way. */
6913 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
5e03c156 6914 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
0dadecf6 6915 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
5e03c156
RK
6916 REGNO_REG_CLASS (REGNO (out)),
6917 GET_MODE (out)))
0dadecf6
RK
6918 {
6919 /* Get the memory to use and rewrite both registers to its mode. */
5e03c156 6920 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
0dadecf6 6921
5e03c156
RK
6922 if (GET_MODE (loc) != GET_MODE (out))
6923 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
0dadecf6
RK
6924
6925 if (GET_MODE (loc) != GET_MODE (in))
6926 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6927
546b63fb 6928 emit_insn (gen_move_insn (loc, in));
5e03c156 6929 emit_insn (gen_move_insn (out, loc));
0dadecf6
RK
6930 }
6931#endif
6932
32131a9c
RK
6933 /* If IN is a simple operand, use gen_move_insn. */
6934 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
5e03c156 6935 emit_insn (gen_move_insn (out, in));
32131a9c
RK
6936
6937#ifdef HAVE_reload_load_address
6938 else if (HAVE_reload_load_address)
5e03c156 6939 emit_insn (gen_reload_load_address (out, in));
32131a9c
RK
6940#endif
6941
5e03c156 6942 /* Otherwise, just write (set OUT IN) and hope for the best. */
32131a9c 6943 else
5e03c156 6944 emit_insn (gen_rtx (SET, VOIDmode, out, in));
32131a9c
RK
6945
6946 /* Return the first insn emitted.
546b63fb 6947 We can not just return get_last_insn, because there may have
32131a9c
RK
6948 been multiple instructions emitted. Also note that gen_move_insn may
6949 emit more than one insn itself, so we can not assume that there is one
6950 insn emitted per emit_insn_before call. */
6951
546b63fb 6952 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
6953}
6954\f
6955/* Delete a previously made output-reload
6956 whose result we now believe is not needed.
6957 First we double-check.
6958
6959 INSN is the insn now being processed.
6960 OUTPUT_RELOAD_INSN is the insn of the output reload.
6961 J is the reload-number for this insn. */
6962
6963static void
6964delete_output_reload (insn, j, output_reload_insn)
6965 rtx insn;
6966 int j;
6967 rtx output_reload_insn;
6968{
6969 register rtx i1;
6970
6971 /* Get the raw pseudo-register referred to. */
6972
6973 rtx reg = reload_in[j];
6974 while (GET_CODE (reg) == SUBREG)
6975 reg = SUBREG_REG (reg);
6976
6977 /* If the pseudo-reg we are reloading is no longer referenced
6978 anywhere between the store into it and here,
6979 and no jumps or labels intervene, then the value can get
6980 here through the reload reg alone.
6981 Otherwise, give up--return. */
6982 for (i1 = NEXT_INSN (output_reload_insn);
6983 i1 != insn; i1 = NEXT_INSN (i1))
6984 {
6985 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6986 return;
6987 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6988 && reg_mentioned_p (reg, PATTERN (i1)))
6989 return;
6990 }
6991
208dffa5
RS
6992 if (cannot_omit_stores[REGNO (reg)])
6993 return;
6994
32131a9c
RK
6995 /* If this insn will store in the pseudo again,
6996 the previous store can be removed. */
6997 if (reload_out[j] == reload_in[j])
6998 delete_insn (output_reload_insn);
6999
7000 /* See if the pseudo reg has been completely replaced
7001 with reload regs. If so, delete the store insn
7002 and forget we had a stack slot for the pseudo. */
7003 else if (reg_n_deaths[REGNO (reg)] == 1
7004 && reg_basic_block[REGNO (reg)] >= 0
7005 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7006 {
7007 rtx i2;
7008
7009 /* We know that it was used only between here
7010 and the beginning of the current basic block.
7011 (We also know that the last use before INSN was
7012 the output reload we are thinking of deleting, but never mind that.)
7013 Search that range; see if any ref remains. */
7014 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7015 {
d445b551
RK
7016 rtx set = single_set (i2);
7017
32131a9c
RK
7018 /* Uses which just store in the pseudo don't count,
7019 since if they are the only uses, they are dead. */
d445b551 7020 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
7021 continue;
7022 if (GET_CODE (i2) == CODE_LABEL
7023 || GET_CODE (i2) == JUMP_INSN)
7024 break;
7025 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7026 && reg_mentioned_p (reg, PATTERN (i2)))
7027 /* Some other ref remains;
7028 we can't do anything. */
7029 return;
7030 }
7031
7032 /* Delete the now-dead stores into this pseudo. */
7033 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7034 {
d445b551
RK
7035 rtx set = single_set (i2);
7036
7037 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
7038 delete_insn (i2);
7039 if (GET_CODE (i2) == CODE_LABEL
7040 || GET_CODE (i2) == JUMP_INSN)
7041 break;
7042 }
7043
7044 /* For the debugging info,
7045 say the pseudo lives in this reload reg. */
7046 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7047 alter_reg (REGNO (reg), -1);
7048 }
7049}
32131a9c 7050\f
a8fdc208 7051/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 7052 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
7053 is a register or memory location;
7054 so reloading involves incrementing that location.
7055
7056 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 7057 This cannot be deduced from VALUE. */
32131a9c 7058
546b63fb
RK
7059static void
7060inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
7061 rtx reloadreg;
7062 rtx value;
7063 int inc_amount;
32131a9c
RK
7064{
7065 /* REG or MEM to be copied and incremented. */
7066 rtx incloc = XEXP (value, 0);
7067 /* Nonzero if increment after copying. */
7068 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 7069 rtx last;
0009eff2
RK
7070 rtx inc;
7071 rtx add_insn;
7072 int code;
32131a9c
RK
7073
7074 /* No hard register is equivalent to this register after
7075 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7076 we could inc/dec that register as well (maybe even using it for
7077 the source), but I'm not sure it's worth worrying about. */
7078 if (GET_CODE (incloc) == REG)
7079 reg_last_reload_reg[REGNO (incloc)] = 0;
7080
7081 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7082 inc_amount = - inc_amount;
7083
fb3821f7 7084 inc = GEN_INT (inc_amount);
0009eff2
RK
7085
7086 /* If this is post-increment, first copy the location to the reload reg. */
7087 if (post)
546b63fb 7088 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
7089
7090 /* See if we can directly increment INCLOC. Use a method similar to that
5e03c156 7091 in gen_reload. */
0009eff2 7092
546b63fb
RK
7093 last = get_last_insn ();
7094 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7095 gen_rtx (PLUS, GET_MODE (incloc),
7096 incloc, inc)));
0009eff2
RK
7097
7098 code = recog_memoized (add_insn);
7099 if (code >= 0)
32131a9c 7100 {
0009eff2
RK
7101 insn_extract (add_insn);
7102 if (constrain_operands (code, 1))
32131a9c 7103 {
0009eff2
RK
7104 /* If this is a pre-increment and we have incremented the value
7105 where it lives, copy the incremented value to RELOADREG to
7106 be used as an address. */
7107
7108 if (! post)
546b63fb
RK
7109 emit_insn (gen_move_insn (reloadreg, incloc));
7110
7111 return;
32131a9c
RK
7112 }
7113 }
0009eff2 7114
546b63fb 7115 delete_insns_since (last);
0009eff2
RK
7116
7117 /* If couldn't do the increment directly, must increment in RELOADREG.
7118 The way we do this depends on whether this is pre- or post-increment.
7119 For pre-increment, copy INCLOC to the reload register, increment it
7120 there, then save back. */
7121
7122 if (! post)
7123 {
546b63fb
RK
7124 emit_insn (gen_move_insn (reloadreg, incloc));
7125 emit_insn (gen_add2_insn (reloadreg, inc));
7126 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 7127 }
32131a9c
RK
7128 else
7129 {
0009eff2
RK
7130 /* Postincrement.
7131 Because this might be a jump insn or a compare, and because RELOADREG
7132 may not be available after the insn in an input reload, we must do
7133 the incrementation before the insn being reloaded for.
7134
7135 We have already copied INCLOC to RELOADREG. Increment the copy in
7136 RELOADREG, save that back, then decrement RELOADREG so it has
7137 the original value. */
7138
546b63fb
RK
7139 emit_insn (gen_add2_insn (reloadreg, inc));
7140 emit_insn (gen_move_insn (incloc, reloadreg));
7141 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 7142 }
0009eff2 7143
546b63fb 7144 return;
32131a9c
RK
7145}
7146\f
7147/* Return 1 if we are certain that the constraint-string STRING allows
7148 the hard register REG. Return 0 if we can't be sure of this. */
7149
7150static int
7151constraint_accepts_reg_p (string, reg)
7152 char *string;
7153 rtx reg;
7154{
7155 int value = 0;
7156 int regno = true_regnum (reg);
7157 int c;
7158
7159 /* Initialize for first alternative. */
7160 value = 0;
7161 /* Check that each alternative contains `g' or `r'. */
7162 while (1)
7163 switch (c = *string++)
7164 {
7165 case 0:
7166 /* If an alternative lacks `g' or `r', we lose. */
7167 return value;
7168 case ',':
7169 /* If an alternative lacks `g' or `r', we lose. */
7170 if (value == 0)
7171 return 0;
7172 /* Initialize for next alternative. */
7173 value = 0;
7174 break;
7175 case 'g':
7176 case 'r':
7177 /* Any general reg wins for this alternative. */
7178 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7179 value = 1;
7180 break;
7181 default:
7182 /* Any reg in specified class wins for this alternative. */
7183 {
0009eff2 7184 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 7185
0009eff2 7186 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
7187 value = 1;
7188 }
7189 }
7190}
7191\f
d445b551
RK
7192/* Return the number of places FIND appears within X, but don't count
7193 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
7194
7195static int
7196count_occurrences (x, find)
7197 register rtx x, find;
7198{
7199 register int i, j;
7200 register enum rtx_code code;
7201 register char *format_ptr;
7202 int count;
7203
7204 if (x == find)
7205 return 1;
7206 if (x == 0)
7207 return 0;
7208
7209 code = GET_CODE (x);
7210
7211 switch (code)
7212 {
7213 case REG:
7214 case QUEUED:
7215 case CONST_INT:
7216 case CONST_DOUBLE:
7217 case SYMBOL_REF:
7218 case CODE_LABEL:
7219 case PC:
7220 case CC0:
7221 return 0;
d445b551
RK
7222
7223 case SET:
7224 if (SET_DEST (x) == find)
7225 return count_occurrences (SET_SRC (x), find);
7226 break;
32131a9c
RK
7227 }
7228
7229 format_ptr = GET_RTX_FORMAT (code);
7230 count = 0;
7231
7232 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7233 {
7234 switch (*format_ptr++)
7235 {
7236 case 'e':
7237 count += count_occurrences (XEXP (x, i), find);
7238 break;
7239
7240 case 'E':
7241 if (XVEC (x, i) != NULL)
7242 {
7243 for (j = 0; j < XVECLEN (x, i); j++)
7244 count += count_occurrences (XVECEXP (x, i, j), find);
7245 }
7246 break;
7247 }
7248 }
7249 return count;
7250}
This page took 1.405745 seconds and 5 git commands to generate.