]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(operands_match_p): For WORDS_BIG_ENDIAN machines,
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
a8efe40d 2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69\f
70/* During reload_as_needed, element N contains a REG rtx for the hard reg
71 into which pseudo reg N has been reloaded (perhaps for a previous insn). */
72static rtx *reg_last_reload_reg;
73
74/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
75 for an output reload that stores into reg N. */
76static char *reg_has_output_reload;
77
78/* Indicates which hard regs are reload-registers for an output reload
79 in the current insn. */
80static HARD_REG_SET reg_is_output_reload;
81
82/* Element N is the constant value to which pseudo reg N is equivalent,
83 or zero if pseudo reg N is not equivalent to a constant.
84 find_reloads looks at this in order to replace pseudo reg N
85 with the constant it stands for. */
86rtx *reg_equiv_constant;
87
88/* Element N is a memory location to which pseudo reg N is equivalent,
89 prior to any register elimination (such as frame pointer to stack
90 pointer). Depending on whether or not it is a valid address, this value
91 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 92rtx *reg_equiv_memory_loc;
32131a9c
RK
93
94/* Element N is the address of stack slot to which pseudo reg N is equivalent.
95 This is used when the address is not valid as a memory address
96 (because its displacement is too big for the machine.) */
97rtx *reg_equiv_address;
98
99/* Element N is the memory slot to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a memory slot. */
101rtx *reg_equiv_mem;
102
103/* Widest width in which each pseudo reg is referred to (via subreg). */
104static int *reg_max_ref_width;
105
106/* Element N is the insn that initialized reg N from its equivalent
107 constant or memory slot. */
108static rtx *reg_equiv_init;
109
110/* During reload_as_needed, element N contains the last pseudo regno
111 reloaded into the Nth reload register. This vector is in parallel
112 with spill_regs. If that pseudo reg occupied more than one register,
113 reg_reloaded_contents points to that pseudo for each spill register in
114 use; all of these must remain set for an inheritance to occur. */
115static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
116
117/* During reload_as_needed, element N contains the insn for which
118 the Nth reload register was last used. This vector is in parallel
119 with spill_regs, and its contents are significant only when
120 reg_reloaded_contents is significant. */
121static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
122
123/* Number of spill-regs so far; number of valid elements of spill_regs. */
124static int n_spills;
125
126/* In parallel with spill_regs, contains REG rtx's for those regs.
127 Holds the last rtx used for any given reg, or 0 if it has never
128 been used for spilling yet. This rtx is reused, provided it has
129 the proper mode. */
130static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
131
132/* In parallel with spill_regs, contains nonzero for a spill reg
133 that was stored after the last time it was used.
134 The precise value is the insn generated to do the store. */
135static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
136
137/* This table is the inverse mapping of spill_regs:
138 indexed by hard reg number,
139 it contains the position of that reg in spill_regs,
140 or -1 for something that is not in spill_regs. */
141static short spill_reg_order[FIRST_PSEUDO_REGISTER];
142
143/* This reg set indicates registers that may not be used for retrying global
144 allocation. The registers that may not be used include all spill registers
145 and the frame pointer (if we are using one). */
146HARD_REG_SET forbidden_regs;
147
148/* This reg set indicates registers that are not good for spill registers.
149 They will not be used to complete groups of spill registers. This includes
150 all fixed registers, registers that may be eliminated, and registers
151 explicitly used in the rtl.
152
153 (spill_reg_order prevents these registers from being used to start a
154 group.) */
155static HARD_REG_SET bad_spill_regs;
156
157/* Describes order of use of registers for reloading
158 of spilled pseudo-registers. `spills' is the number of
159 elements that are actually valid; new ones are added at the end. */
160static short spill_regs[FIRST_PSEUDO_REGISTER];
161
162/* Describes order of preference for putting regs into spill_regs.
163 Contains the numbers of all the hard regs, in order most preferred first.
164 This order is different for each function.
165 It is set up by order_regs_for_reload.
166 Empty elements at the end contain -1. */
167static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
168
169/* 1 for a hard register that appears explicitly in the rtl
170 (for example, function value registers, special registers
171 used by insns, structure value pointer registers). */
172static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
173
174/* Indicates if a register was counted against the need for
175 groups. 0 means it can count against max_nongroup instead. */
176static HARD_REG_SET counted_for_groups;
177
178/* Indicates if a register was counted against the need for
179 non-groups. 0 means it can become part of a new group.
180 During choose_reload_regs, 1 here means don't use this reg
181 as part of a group, even if it seems to be otherwise ok. */
182static HARD_REG_SET counted_for_nongroups;
183
184/* Nonzero if indirect addressing is supported on the machine; this means
185 that spilling (REG n) does not require reloading it into a register in
186 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
187 value indicates the level of indirect addressing supported, e.g., two
188 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
189 a hard register. */
190
191static char spill_indirect_levels;
192
193/* Nonzero if indirect addressing is supported when the innermost MEM is
194 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
195 which these are valid is the same as spill_indirect_levels, above. */
196
197char indirect_symref_ok;
198
199/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
200
201char double_reg_address_ok;
202
203/* Record the stack slot for each spilled hard register. */
204
205static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
206
207/* Width allocated so far for that stack slot. */
208
209static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
210
211/* Indexed by register class and basic block number, nonzero if there is
212 any need for a spill register of that class in that basic block.
213 The pointer is 0 if we did stupid allocation and don't know
214 the structure of basic blocks. */
215
216char *basic_block_needs[N_REG_CLASSES];
217
218/* First uid used by insns created by reload in this function.
219 Used in find_equiv_reg. */
220int reload_first_uid;
221
222/* Flag set by local-alloc or global-alloc if anything is live in
223 a call-clobbered reg across calls. */
224
225int caller_save_needed;
226
227/* Set to 1 while reload_as_needed is operating.
228 Required by some machines to handle any generated moves differently. */
229
230int reload_in_progress = 0;
231
232/* These arrays record the insn_code of insns that may be needed to
233 perform input and output reloads of special objects. They provide a
234 place to pass a scratch register. */
235
236enum insn_code reload_in_optab[NUM_MACHINE_MODES];
237enum insn_code reload_out_optab[NUM_MACHINE_MODES];
238
d45cf215 239/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
240 The allocated storage can be freed once find_reloads has processed the
241 insn. */
242
243struct obstack reload_obstack;
244char *reload_firstobj;
245
246#define obstack_chunk_alloc xmalloc
247#define obstack_chunk_free free
248
32131a9c
RK
249/* List of labels that must never be deleted. */
250extern rtx forced_labels;
251\f
252/* This structure is used to record information about register eliminations.
253 Each array entry describes one possible way of eliminating a register
254 in favor of another. If there is more than one way of eliminating a
255 particular register, the most preferred should be specified first. */
256
257static struct elim_table
258{
259 int from; /* Register number to be eliminated. */
260 int to; /* Register number used as replacement. */
261 int initial_offset; /* Initial difference between values. */
262 int can_eliminate; /* Non-zero if this elimination can be done. */
263 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
264 insns made by reload. */
265 int offset; /* Current offset between the two regs. */
a8efe40d 266 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
267 int previous_offset; /* Offset at end of previous insn. */
268 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
269 rtx from_rtx; /* REG rtx for the register to be eliminated.
270 We cannot simply compare the number since
271 we might then spuriously replace a hard
272 register corresponding to a pseudo
273 assigned to the reg to be eliminated. */
274 rtx to_rtx; /* REG rtx for the replacement. */
275} reg_eliminate[] =
276
277/* If a set of eliminable registers was specified, define the table from it.
278 Otherwise, default to the normal case of the frame pointer being
279 replaced by the stack pointer. */
280
281#ifdef ELIMINABLE_REGS
282 ELIMINABLE_REGS;
283#else
284 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
285#endif
286
287#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
288
289/* Record the number of pending eliminations that have an offset not equal
290 to their initial offset. If non-zero, we use a new copy of each
291 replacement result in any insns encountered. */
292static int num_not_at_initial_offset;
293
294/* Count the number of registers that we may be able to eliminate. */
295static int num_eliminable;
296
297/* For each label, we record the offset of each elimination. If we reach
298 a label by more than one path and an offset differs, we cannot do the
299 elimination. This information is indexed by the number of the label.
300 The first table is an array of flags that records whether we have yet
301 encountered a label and the second table is an array of arrays, one
302 entry in the latter array for each elimination. */
303
304static char *offsets_known_at;
305static int (*offsets_at)[NUM_ELIMINABLE_REGS];
306
307/* Number of labels in the current function. */
308
309static int num_labels;
310\f
311void mark_home_live ();
312static void count_possible_groups ();
313static int possible_group_p ();
314static void scan_paradoxical_subregs ();
315static void reload_as_needed ();
316static int modes_equiv_for_class_p ();
317static void alter_reg ();
318static void delete_dead_insn ();
5352b11a 319static void spill_failure ();
32131a9c
RK
320static int new_spill_reg();
321static void set_label_offsets ();
322static int eliminate_regs_in_insn ();
323static void mark_not_eliminable ();
324static int spill_hard_reg ();
325static void choose_reload_regs ();
326static void emit_reload_insns ();
327static void delete_output_reload ();
328static void forget_old_reloads_1 ();
329static void order_regs_for_reload ();
330static rtx inc_for_reload ();
331static int constraint_accepts_reg_p ();
332static int count_occurrences ();
333
334extern void remove_death ();
335extern rtx adj_offsettable_operand ();
336extern rtx form_sum ();
337\f
338void
339init_reload ()
340{
341 register int i;
342
343 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
344 Set spill_indirect_levels to the number of levels such addressing is
345 permitted, zero if it is not permitted at all. */
346
347 register rtx tem
348 = gen_rtx (MEM, Pmode,
349 gen_rtx (PLUS, Pmode,
350 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 351 GEN_INT (4)));
32131a9c
RK
352 spill_indirect_levels = 0;
353
354 while (memory_address_p (QImode, tem))
355 {
356 spill_indirect_levels++;
357 tem = gen_rtx (MEM, Pmode, tem);
358 }
359
360 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
361
362 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
363 indirect_symref_ok = memory_address_p (QImode, tem);
364
365 /* See if reg+reg is a valid (and offsettable) address. */
366
65701fd2 367 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
368 {
369 tem = gen_rtx (PLUS, Pmode,
370 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
371 gen_rtx (REG, Pmode, i));
372 /* This way, we make sure that reg+reg is an offsettable address. */
373 tem = plus_constant (tem, 4);
374
375 if (memory_address_p (QImode, tem))
376 {
377 double_reg_address_ok = 1;
378 break;
379 }
380 }
32131a9c
RK
381
382 /* Initialize obstack for our rtl allocation. */
383 gcc_obstack_init (&reload_obstack);
384 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
385
386#ifdef HAVE_SECONDARY_RELOADS
387
388 /* Initialize the optabs for doing special input and output reloads. */
389
390 for (i = 0; i < NUM_MACHINE_MODES; i++)
391 reload_in_optab[i] = reload_out_optab[i] = CODE_FOR_nothing;
392
393#ifdef HAVE_reload_inqi
394 if (HAVE_reload_inqi)
395 reload_in_optab[(int) QImode] = CODE_FOR_reload_inqi;
396#endif
397#ifdef HAVE_reload_inhi
398 if (HAVE_reload_inhi)
399 reload_in_optab[(int) HImode] = CODE_FOR_reload_inhi;
400#endif
401#ifdef HAVE_reload_insi
402 if (HAVE_reload_insi)
403 reload_in_optab[(int) SImode] = CODE_FOR_reload_insi;
404#endif
405#ifdef HAVE_reload_indi
406 if (HAVE_reload_indi)
407 reload_in_optab[(int) DImode] = CODE_FOR_reload_indi;
408#endif
409#ifdef HAVE_reload_inti
410 if (HAVE_reload_inti)
411 reload_in_optab[(int) TImode] = CODE_FOR_reload_inti;
412#endif
413#ifdef HAVE_reload_insf
414 if (HAVE_reload_insf)
415 reload_in_optab[(int) SFmode] = CODE_FOR_reload_insf;
416#endif
417#ifdef HAVE_reload_indf
418 if (HAVE_reload_indf)
419 reload_in_optab[(int) DFmode] = CODE_FOR_reload_indf;
420#endif
421#ifdef HAVE_reload_inxf
422 if (HAVE_reload_inxf)
423 reload_in_optab[(int) XFmode] = CODE_FOR_reload_inxf;
424#endif
425#ifdef HAVE_reload_intf
426 if (HAVE_reload_intf)
427 reload_in_optab[(int) TFmode] = CODE_FOR_reload_intf;
428#endif
429
430#ifdef HAVE_reload_outqi
431 if (HAVE_reload_outqi)
432 reload_out_optab[(int) QImode] = CODE_FOR_reload_outqi;
433#endif
434#ifdef HAVE_reload_outhi
435 if (HAVE_reload_outhi)
436 reload_out_optab[(int) HImode] = CODE_FOR_reload_outhi;
437#endif
438#ifdef HAVE_reload_outsi
439 if (HAVE_reload_outsi)
440 reload_out_optab[(int) SImode] = CODE_FOR_reload_outsi;
441#endif
442#ifdef HAVE_reload_outdi
443 if (HAVE_reload_outdi)
444 reload_out_optab[(int) DImode] = CODE_FOR_reload_outdi;
445#endif
446#ifdef HAVE_reload_outti
447 if (HAVE_reload_outti)
448 reload_out_optab[(int) TImode] = CODE_FOR_reload_outti;
449#endif
450#ifdef HAVE_reload_outsf
451 if (HAVE_reload_outsf)
452 reload_out_optab[(int) SFmode] = CODE_FOR_reload_outsf;
453#endif
454#ifdef HAVE_reload_outdf
455 if (HAVE_reload_outdf)
456 reload_out_optab[(int) DFmode] = CODE_FOR_reload_outdf;
457#endif
458#ifdef HAVE_reload_outxf
459 if (HAVE_reload_outxf)
460 reload_out_optab[(int) XFmode] = CODE_FOR_reload_outxf;
461#endif
462#ifdef HAVE_reload_outtf
463 if (HAVE_reload_outtf)
464 reload_out_optab[(int) TFmode] = CODE_FOR_reload_outtf;
465#endif
466
467#endif /* HAVE_SECONDARY_RELOADS */
468
469}
470
471/* Main entry point for the reload pass, and only entry point
472 in this file.
473
474 FIRST is the first insn of the function being compiled.
475
476 GLOBAL nonzero means we were called from global_alloc
477 and should attempt to reallocate any pseudoregs that we
478 displace from hard regs we will use for reloads.
479 If GLOBAL is zero, we do not have enough information to do that,
480 so any pseudo reg that is spilled must go to the stack.
481
482 DUMPFILE is the global-reg debugging dump file stream, or 0.
483 If it is nonzero, messages are written to it to describe
484 which registers are seized as reload regs, which pseudo regs
5352b11a 485 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 486
5352b11a
RS
487 Return value is nonzero if reload failed
488 and we must not do any more for this function. */
489
490int
32131a9c
RK
491reload (first, global, dumpfile)
492 rtx first;
493 int global;
494 FILE *dumpfile;
495{
496 register int class;
497 register int i;
498 register rtx insn;
499 register struct elim_table *ep;
500
501 int something_changed;
502 int something_needs_reloads;
503 int something_needs_elimination;
504 int new_basic_block_needs;
a8efe40d
RK
505 enum reg_class caller_save_spill_class = NO_REGS;
506 int caller_save_group_size = 1;
32131a9c 507
5352b11a
RS
508 /* Nonzero means we couldn't get enough spill regs. */
509 int failure = 0;
510
32131a9c
RK
511 /* The basic block number currently being processed for INSN. */
512 int this_block;
513
514 /* Make sure even insns with volatile mem refs are recognizable. */
515 init_recog ();
516
517 /* Enable find_equiv_reg to distinguish insns made by reload. */
518 reload_first_uid = get_max_uid ();
519
520 for (i = 0; i < N_REG_CLASSES; i++)
521 basic_block_needs[i] = 0;
522
0dadecf6
RK
523#ifdef SECONDARY_MEMORY_NEEDED
524 /* Initialize the secondary memory table. */
525 clear_secondary_mem ();
526#endif
527
32131a9c
RK
528 /* Remember which hard regs appear explicitly
529 before we merge into `regs_ever_live' the ones in which
530 pseudo regs have been allocated. */
531 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
532
533 /* We don't have a stack slot for any spill reg yet. */
534 bzero (spill_stack_slot, sizeof spill_stack_slot);
535 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
536
a8efe40d
RK
537 /* Initialize the save area information for caller-save, in case some
538 are needed. */
539 init_save_areas ();
a8fdc208 540
32131a9c
RK
541 /* Compute which hard registers are now in use
542 as homes for pseudo registers.
543 This is done here rather than (eg) in global_alloc
544 because this point is reached even if not optimizing. */
545
546 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
547 mark_home_live (i);
548
549 /* Make sure that the last insn in the chain
550 is not something that needs reloading. */
fb3821f7 551 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
552
553 /* Find all the pseudo registers that didn't get hard regs
554 but do have known equivalent constants or memory slots.
555 These include parameters (known equivalent to parameter slots)
556 and cse'd or loop-moved constant memory addresses.
557
558 Record constant equivalents in reg_equiv_constant
559 so they will be substituted by find_reloads.
560 Record memory equivalents in reg_mem_equiv so they can
561 be substituted eventually by altering the REG-rtx's. */
562
563 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
564 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
565 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
566 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
567 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
568 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
569 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
570 bzero (reg_equiv_init, max_regno * sizeof (rtx));
571 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
572 bzero (reg_equiv_address, max_regno * sizeof (rtx));
573 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
574 bzero (reg_max_ref_width, max_regno * sizeof (int));
575
576 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
577 Also find all paradoxical subregs
578 and find largest such for each pseudo. */
579
580 for (insn = first; insn; insn = NEXT_INSN (insn))
581 {
582 rtx set = single_set (insn);
583
584 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
585 {
fb3821f7 586 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
587 if (note
588#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 589 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
590 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
591#endif
592 )
32131a9c
RK
593 {
594 rtx x = XEXP (note, 0);
595 i = REGNO (SET_DEST (set));
596 if (i > LAST_VIRTUAL_REGISTER)
597 {
598 if (GET_CODE (x) == MEM)
599 reg_equiv_memory_loc[i] = x;
600 else if (CONSTANT_P (x))
601 {
602 if (LEGITIMATE_CONSTANT_P (x))
603 reg_equiv_constant[i] = x;
604 else
605 reg_equiv_memory_loc[i]
d445b551 606 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
607 }
608 else
609 continue;
610
611 /* If this register is being made equivalent to a MEM
612 and the MEM is not SET_SRC, the equivalencing insn
613 is one with the MEM as a SET_DEST and it occurs later.
614 So don't mark this insn now. */
615 if (GET_CODE (x) != MEM
616 || rtx_equal_p (SET_SRC (set), x))
617 reg_equiv_init[i] = insn;
618 }
619 }
620 }
621
622 /* If this insn is setting a MEM from a register equivalent to it,
623 this is the equivalencing insn. */
624 else if (set && GET_CODE (SET_DEST (set)) == MEM
625 && GET_CODE (SET_SRC (set)) == REG
626 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
627 && rtx_equal_p (SET_DEST (set),
628 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
629 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
630
631 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
632 scan_paradoxical_subregs (PATTERN (insn));
633 }
634
635 /* Does this function require a frame pointer? */
636
637 frame_pointer_needed = (! flag_omit_frame_pointer
638#ifdef EXIT_IGNORE_STACK
639 /* ?? If EXIT_IGNORE_STACK is set, we will not save
640 and restore sp for alloca. So we can't eliminate
641 the frame pointer in that case. At some point,
642 we should improve this by emitting the
643 sp-adjusting insns for this case. */
644 || (current_function_calls_alloca
645 && EXIT_IGNORE_STACK)
646#endif
647 || FRAME_POINTER_REQUIRED);
648
649 num_eliminable = 0;
650
651 /* Initialize the table of registers to eliminate. The way we do this
652 depends on how the eliminable registers were defined. */
653#ifdef ELIMINABLE_REGS
654 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
655 {
656 ep->can_eliminate = ep->can_eliminate_previous
657 = (CAN_ELIMINATE (ep->from, ep->to)
658 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
659 }
660#else
661 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
662 = ! frame_pointer_needed;
663#endif
664
665 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 666 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
667 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
668 We depend on this. */
669 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
670 {
671 num_eliminable += ep->can_eliminate;
672 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
673 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
674 }
675
676 num_labels = max_label_num () - get_first_label_num ();
677
678 /* Allocate the tables used to store offset information at labels. */
679 offsets_known_at = (char *) alloca (num_labels);
680 offsets_at
681 = (int (*)[NUM_ELIMINABLE_REGS])
682 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
683
684 offsets_known_at -= get_first_label_num ();
685 offsets_at -= get_first_label_num ();
686
687 /* Alter each pseudo-reg rtx to contain its hard reg number.
688 Assign stack slots to the pseudos that lack hard regs or equivalents.
689 Do not touch virtual registers. */
690
691 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
692 alter_reg (i, -1);
693
694 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
695 because the stack size may be a part of the offset computation for
696 register elimination. */
697 assign_stack_local (BLKmode, 0, 0);
698
699 /* If we have some registers we think can be eliminated, scan all insns to
700 see if there is an insn that sets one of these registers to something
701 other than itself plus a constant. If so, the register cannot be
702 eliminated. Doing this scan here eliminates an extra pass through the
703 main reload loop in the most common case where register elimination
704 cannot be done. */
705 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
706 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
707 || GET_CODE (insn) == CALL_INSN)
708 note_stores (PATTERN (insn), mark_not_eliminable);
709
710#ifndef REGISTER_CONSTRAINTS
711 /* If all the pseudo regs have hard regs,
712 except for those that are never referenced,
713 we know that no reloads are needed. */
714 /* But that is not true if there are register constraints, since
715 in that case some pseudos might be in the wrong kind of hard reg. */
716
717 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
718 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
719 break;
720
b8093d02 721 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
722 return;
723#endif
724
725 /* Compute the order of preference for hard registers to spill.
726 Store them by decreasing preference in potential_reload_regs. */
727
728 order_regs_for_reload ();
729
730 /* So far, no hard regs have been spilled. */
731 n_spills = 0;
732 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
733 spill_reg_order[i] = -1;
734
735 /* On most machines, we can't use any register explicitly used in the
736 rtl as a spill register. But on some, we have to. Those will have
737 taken care to keep the life of hard regs as short as possible. */
738
739#ifdef SMALL_REGISTER_CLASSES
740 CLEAR_HARD_REG_SET (forbidden_regs);
741#else
742 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
743#endif
744
745 /* Spill any hard regs that we know we can't eliminate. */
746 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
747 if (! ep->can_eliminate)
748 {
749 spill_hard_reg (ep->from, global, dumpfile, 1);
750 regs_ever_live[ep->from] = 1;
751 }
752
753 if (global)
754 for (i = 0; i < N_REG_CLASSES; i++)
755 {
756 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
757 bzero (basic_block_needs[i], n_basic_blocks);
758 }
759
b2f15f94
RK
760 /* From now on, we need to emit any moves without making new pseudos. */
761 reload_in_progress = 1;
762
32131a9c
RK
763 /* This loop scans the entire function each go-round
764 and repeats until one repetition spills no additional hard regs. */
765
d45cf215 766 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
767 to require another pass. Note that getting an additional reload
768 reg does not necessarily imply any pseudo reg was spilled;
769 sometimes we find a reload reg that no pseudo reg was allocated in. */
770 something_changed = 1;
771 /* This flag is set if there are any insns that require reloading. */
772 something_needs_reloads = 0;
773 /* This flag is set if there are any insns that require register
774 eliminations. */
775 something_needs_elimination = 0;
776 while (something_changed)
777 {
778 rtx after_call = 0;
779
780 /* For each class, number of reload regs needed in that class.
781 This is the maximum over all insns of the needs in that class
782 of the individual insn. */
783 int max_needs[N_REG_CLASSES];
784 /* For each class, size of group of consecutive regs
785 that is needed for the reloads of this class. */
786 int group_size[N_REG_CLASSES];
787 /* For each class, max number of consecutive groups needed.
788 (Each group contains group_size[CLASS] consecutive registers.) */
789 int max_groups[N_REG_CLASSES];
790 /* For each class, max number needed of regs that don't belong
791 to any of the groups. */
792 int max_nongroups[N_REG_CLASSES];
793 /* For each class, the machine mode which requires consecutive
794 groups of regs of that class.
795 If two different modes ever require groups of one class,
796 they must be the same size and equally restrictive for that class,
797 otherwise we can't handle the complexity. */
798 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
799 /* Record the insn where each maximum need is first found. */
800 rtx max_needs_insn[N_REG_CLASSES];
801 rtx max_groups_insn[N_REG_CLASSES];
802 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 803 rtx x;
0dadecf6 804 int starting_frame_size = get_frame_size ();
e404a39a 805 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
806
807 something_changed = 0;
808 bzero (max_needs, sizeof max_needs);
809 bzero (max_groups, sizeof max_groups);
810 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
811 bzero (max_needs_insn, sizeof max_needs_insn);
812 bzero (max_groups_insn, sizeof max_groups_insn);
813 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
814 bzero (group_size, sizeof group_size);
815 for (i = 0; i < N_REG_CLASSES; i++)
816 group_mode[i] = VOIDmode;
817
818 /* Keep track of which basic blocks are needing the reloads. */
819 this_block = 0;
820
821 /* Remember whether any element of basic_block_needs
822 changes from 0 to 1 in this pass. */
823 new_basic_block_needs = 0;
824
825 /* Reset all offsets on eliminable registers to their initial values. */
826#ifdef ELIMINABLE_REGS
827 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
828 {
829 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
830 ep->previous_offset = ep->offset
831 = ep->max_offset = ep->initial_offset;
32131a9c
RK
832 }
833#else
834#ifdef INITIAL_FRAME_POINTER_OFFSET
835 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
836#else
837 if (!FRAME_POINTER_REQUIRED)
838 abort ();
839 reg_eliminate[0].initial_offset = 0;
840#endif
a8efe40d 841 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
842 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
843#endif
844
845 num_not_at_initial_offset = 0;
846
847 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
848
849 /* Set a known offset for each forced label to be at the initial offset
850 of each elimination. We do this because we assume that all
851 computed jumps occur from a location where each elimination is
852 at its initial offset. */
853
854 for (x = forced_labels; x; x = XEXP (x, 1))
855 if (XEXP (x, 0))
fb3821f7 856 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
857
858 /* For each pseudo register that has an equivalent location defined,
859 try to eliminate any eliminable registers (such as the frame pointer)
860 assuming initial offsets for the replacement register, which
861 is the normal case.
862
863 If the resulting location is directly addressable, substitute
864 the MEM we just got directly for the old REG.
865
866 If it is not addressable but is a constant or the sum of a hard reg
867 and constant, it is probably not addressable because the constant is
868 out of range, in that case record the address; we will generate
869 hairy code to compute the address in a register each time it is
a8fdc208 870 needed.
32131a9c
RK
871
872 If the location is not addressable, but does not have one of the
873 above forms, assign a stack slot. We have to do this to avoid the
874 potential of producing lots of reloads if, e.g., a location involves
875 a pseudo that didn't get a hard register and has an equivalent memory
876 location that also involves a pseudo that didn't get a hard register.
877
878 Perhaps at some point we will improve reload_when_needed handling
879 so this problem goes away. But that's very hairy. */
880
881 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
882 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
883 {
fb3821f7 884 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
885
886 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
887 XEXP (x, 0)))
888 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
889 else if (CONSTANT_P (XEXP (x, 0))
890 || (GET_CODE (XEXP (x, 0)) == PLUS
891 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
892 && (REGNO (XEXP (XEXP (x, 0), 0))
893 < FIRST_PSEUDO_REGISTER)
894 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
895 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
896 else
897 {
898 /* Make a new stack slot. Then indicate that something
a8fdc208 899 changed so we go back and recompute offsets for
32131a9c
RK
900 eliminable registers because the allocation of memory
901 below might change some offset. reg_equiv_{mem,address}
902 will be set up for this pseudo on the next pass around
903 the loop. */
904 reg_equiv_memory_loc[i] = 0;
905 reg_equiv_init[i] = 0;
906 alter_reg (i, -1);
907 something_changed = 1;
908 }
909 }
a8fdc208 910
d45cf215 911 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
912 bookkeeping. */
913 if (something_changed)
914 continue;
915
a8efe40d
RK
916 /* If caller-saves needs a group, initialize the group to include
917 the size and mode required for caller-saves. */
918
919 if (caller_save_group_size > 1)
920 {
921 group_mode[(int) caller_save_spill_class] = Pmode;
922 group_size[(int) caller_save_spill_class] = caller_save_group_size;
923 }
924
32131a9c
RK
925 /* Compute the most additional registers needed by any instruction.
926 Collect information separately for each class of regs. */
927
928 for (insn = first; insn; insn = NEXT_INSN (insn))
929 {
930 if (global && this_block + 1 < n_basic_blocks
931 && insn == basic_block_head[this_block+1])
932 ++this_block;
933
934 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
935 might include REG_LABEL), we need to see what effects this
936 has on the known offsets at labels. */
937
938 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
939 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
940 && REG_NOTES (insn) != 0))
941 set_label_offsets (insn, insn, 0);
942
943 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
944 {
945 /* Nonzero means don't use a reload reg that overlaps
946 the place where a function value can be returned. */
947 rtx avoid_return_reg = 0;
948
949 rtx old_body = PATTERN (insn);
950 int old_code = INSN_CODE (insn);
951 rtx old_notes = REG_NOTES (insn);
952 int did_elimination = 0;
953
954 /* Initially, count RELOAD_OTHER reloads.
955 Later, merge in the other kinds. */
956 int insn_needs[N_REG_CLASSES];
957 int insn_groups[N_REG_CLASSES];
958 int insn_total_groups = 0;
959
960 /* Count RELOAD_FOR_INPUT_RELOAD_ADDRESS reloads. */
961 int insn_needs_for_inputs[N_REG_CLASSES];
962 int insn_groups_for_inputs[N_REG_CLASSES];
963 int insn_total_groups_for_inputs = 0;
964
965 /* Count RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reloads. */
966 int insn_needs_for_outputs[N_REG_CLASSES];
967 int insn_groups_for_outputs[N_REG_CLASSES];
968 int insn_total_groups_for_outputs = 0;
969
970 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
971 int insn_needs_for_operands[N_REG_CLASSES];
972 int insn_groups_for_operands[N_REG_CLASSES];
973 int insn_total_groups_for_operands = 0;
974
32131a9c
RK
975#if 0 /* This wouldn't work nowadays, since optimize_bit_field
976 looks for non-strict memory addresses. */
977 /* Optimization: a bit-field instruction whose field
978 happens to be a byte or halfword in memory
979 can be changed to a move instruction. */
980
981 if (GET_CODE (PATTERN (insn)) == SET)
982 {
983 rtx dest = SET_DEST (PATTERN (insn));
984 rtx src = SET_SRC (PATTERN (insn));
985
986 if (GET_CODE (dest) == ZERO_EXTRACT
987 || GET_CODE (dest) == SIGN_EXTRACT)
988 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
989 if (GET_CODE (src) == ZERO_EXTRACT
990 || GET_CODE (src) == SIGN_EXTRACT)
991 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
992 }
993#endif
994
995 /* If needed, eliminate any eliminable registers. */
996 if (num_eliminable)
997 did_elimination = eliminate_regs_in_insn (insn, 0);
998
999#ifdef SMALL_REGISTER_CLASSES
1000 /* Set avoid_return_reg if this is an insn
1001 that might use the value of a function call. */
1002 if (GET_CODE (insn) == CALL_INSN)
1003 {
1004 if (GET_CODE (PATTERN (insn)) == SET)
1005 after_call = SET_DEST (PATTERN (insn));
1006 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1007 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1008 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1009 else
1010 after_call = 0;
1011 }
1012 else if (after_call != 0
1013 && !(GET_CODE (PATTERN (insn)) == SET
1014 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1015 {
1016 if (reg_mentioned_p (after_call, PATTERN (insn)))
1017 avoid_return_reg = after_call;
1018 after_call = 0;
1019 }
1020#endif /* SMALL_REGISTER_CLASSES */
1021
1022 /* Analyze the instruction. */
1023 find_reloads (insn, 0, spill_indirect_levels, global,
1024 spill_reg_order);
1025
1026 /* Remember for later shortcuts which insns had any reloads or
1027 register eliminations.
1028
1029 One might think that it would be worthwhile to mark insns
1030 that need register replacements but not reloads, but this is
1031 not safe because find_reloads may do some manipulation of
1032 the insn (such as swapping commutative operands), which would
1033 be lost when we restore the old pattern after register
1034 replacement. So the actions of find_reloads must be redone in
1035 subsequent passes or in reload_as_needed.
1036
1037 However, it is safe to mark insns that need reloads
1038 but not register replacement. */
1039
1040 PUT_MODE (insn, (did_elimination ? QImode
1041 : n_reloads ? HImode
1042 : VOIDmode));
1043
1044 /* Discard any register replacements done. */
1045 if (did_elimination)
1046 {
1047 obstack_free (&reload_obstack, reload_firstobj);
1048 PATTERN (insn) = old_body;
1049 INSN_CODE (insn) = old_code;
1050 REG_NOTES (insn) = old_notes;
1051 something_needs_elimination = 1;
1052 }
1053
a8efe40d 1054 /* If this insn has no reloads, we need not do anything except
a8fdc208 1055 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1056 caller-save needs reloads. */
1057
1058 if (n_reloads == 0
1059 && ! (GET_CODE (insn) == CALL_INSN
1060 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1061 continue;
1062
1063 something_needs_reloads = 1;
1064
a8efe40d
RK
1065 for (i = 0; i < N_REG_CLASSES; i++)
1066 {
1067 insn_needs[i] = 0, insn_groups[i] = 0;
1068 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1069 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1070 insn_needs_for_operands[i] = 0, insn_groups_for_operands[i] = 0;
1071 }
1072
32131a9c
RK
1073 /* Count each reload once in every class
1074 containing the reload's own class. */
1075
1076 for (i = 0; i < n_reloads; i++)
1077 {
1078 register enum reg_class *p;
e85ddd99 1079 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1080 int size;
1081 enum machine_mode mode;
1082 int *this_groups;
1083 int *this_needs;
1084 int *this_total_groups;
1085
1086 /* Don't count the dummy reloads, for which one of the
1087 regs mentioned in the insn can be used for reloading.
1088 Don't count optional reloads.
1089 Don't count reloads that got combined with others. */
1090 if (reload_reg_rtx[i] != 0
1091 || reload_optional[i] != 0
1092 || (reload_out[i] == 0 && reload_in[i] == 0
1093 && ! reload_secondary_p[i]))
1094 continue;
1095
e85ddd99
RK
1096 /* Show that a reload register of this class is needed
1097 in this basic block. We do not use insn_needs and
1098 insn_groups because they are overly conservative for
1099 this purpose. */
1100 if (global && ! basic_block_needs[(int) class][this_block])
1101 {
1102 basic_block_needs[(int) class][this_block] = 1;
1103 new_basic_block_needs = 1;
1104 }
1105
32131a9c
RK
1106 /* Decide which time-of-use to count this reload for. */
1107 switch (reload_when_needed[i])
1108 {
1109 case RELOAD_OTHER:
1110 case RELOAD_FOR_OUTPUT:
1111 case RELOAD_FOR_INPUT:
1112 this_needs = insn_needs;
1113 this_groups = insn_groups;
1114 this_total_groups = &insn_total_groups;
1115 break;
1116
1117 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
1118 this_needs = insn_needs_for_inputs;
1119 this_groups = insn_groups_for_inputs;
1120 this_total_groups = &insn_total_groups_for_inputs;
1121 break;
1122
1123 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
1124 this_needs = insn_needs_for_outputs;
1125 this_groups = insn_groups_for_outputs;
1126 this_total_groups = &insn_total_groups_for_outputs;
1127 break;
1128
1129 case RELOAD_FOR_OPERAND_ADDRESS:
1130 this_needs = insn_needs_for_operands;
1131 this_groups = insn_groups_for_operands;
1132 this_total_groups = &insn_total_groups_for_operands;
1133 break;
1134 }
1135
1136 mode = reload_inmode[i];
1137 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1138 mode = reload_outmode[i];
e85ddd99 1139 size = CLASS_MAX_NREGS (class, mode);
32131a9c
RK
1140 if (size > 1)
1141 {
1142 enum machine_mode other_mode, allocate_mode;
1143
1144 /* Count number of groups needed separately from
1145 number of individual regs needed. */
e85ddd99
RK
1146 this_groups[(int) class]++;
1147 p = reg_class_superclasses[(int) class];
32131a9c
RK
1148 while (*p != LIM_REG_CLASSES)
1149 this_groups[(int) *p++]++;
1150 (*this_total_groups)++;
1151
1152 /* Record size and mode of a group of this class. */
1153 /* If more than one size group is needed,
1154 make all groups the largest needed size. */
e85ddd99 1155 if (group_size[(int) class] < size)
32131a9c 1156 {
e85ddd99 1157 other_mode = group_mode[(int) class];
32131a9c
RK
1158 allocate_mode = mode;
1159
e85ddd99
RK
1160 group_size[(int) class] = size;
1161 group_mode[(int) class] = mode;
32131a9c
RK
1162 }
1163 else
1164 {
1165 other_mode = mode;
e85ddd99 1166 allocate_mode = group_mode[(int) class];
32131a9c
RK
1167 }
1168
1169 /* Crash if two dissimilar machine modes both need
1170 groups of consecutive regs of the same class. */
1171
1172 if (other_mode != VOIDmode
1173 && other_mode != allocate_mode
1174 && ! modes_equiv_for_class_p (allocate_mode,
1175 other_mode,
e85ddd99 1176 class))
32131a9c
RK
1177 abort ();
1178 }
1179 else if (size == 1)
1180 {
e85ddd99
RK
1181 this_needs[(int) class] += 1;
1182 p = reg_class_superclasses[(int) class];
32131a9c
RK
1183 while (*p != LIM_REG_CLASSES)
1184 this_needs[(int) *p++] += 1;
1185 }
1186 else
1187 abort ();
1188 }
1189
1190 /* All reloads have been counted for this insn;
1191 now merge the various times of use.
1192 This sets insn_needs, etc., to the maximum total number
1193 of registers needed at any point in this insn. */
1194
1195 for (i = 0; i < N_REG_CLASSES; i++)
1196 {
1197 int this_max;
1198 this_max = insn_needs_for_inputs[i];
1199 if (insn_needs_for_outputs[i] > this_max)
1200 this_max = insn_needs_for_outputs[i];
1201 if (insn_needs_for_operands[i] > this_max)
1202 this_max = insn_needs_for_operands[i];
1203 insn_needs[i] += this_max;
1204 this_max = insn_groups_for_inputs[i];
1205 if (insn_groups_for_outputs[i] > this_max)
1206 this_max = insn_groups_for_outputs[i];
1207 if (insn_groups_for_operands[i] > this_max)
1208 this_max = insn_groups_for_operands[i];
1209 insn_groups[i] += this_max;
32131a9c 1210 }
a8efe40d 1211
32131a9c
RK
1212 insn_total_groups += MAX (insn_total_groups_for_inputs,
1213 MAX (insn_total_groups_for_outputs,
1214 insn_total_groups_for_operands));
1215
a8efe40d
RK
1216 /* If this is a CALL_INSN and caller-saves will need
1217 a spill register, act as if the spill register is
1218 needed for this insn. However, the spill register
1219 can be used by any reload of this insn, so we only
1220 need do something if no need for that class has
a8fdc208 1221 been recorded.
a8efe40d
RK
1222
1223 The assumption that every CALL_INSN will trigger a
1224 caller-save is highly conservative, however, the number
1225 of cases where caller-saves will need a spill register but
1226 a block containing a CALL_INSN won't need a spill register
1227 of that class should be quite rare.
1228
1229 If a group is needed, the size and mode of the group will
d45cf215 1230 have been set up at the beginning of this loop. */
a8efe40d
RK
1231
1232 if (GET_CODE (insn) == CALL_INSN
1233 && caller_save_spill_class != NO_REGS)
1234 {
1235 int *caller_save_needs
1236 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1237
1238 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1239 {
1240 register enum reg_class *p
1241 = reg_class_superclasses[(int) caller_save_spill_class];
1242
1243 caller_save_needs[(int) caller_save_spill_class]++;
1244
1245 while (*p != LIM_REG_CLASSES)
0aaa6af8 1246 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1247 }
1248
1249 if (caller_save_group_size > 1)
1250 insn_total_groups = MAX (insn_total_groups, 1);
d1c1397e
RS
1251
1252
1253 /* Show that this basic block will need a register of
1254 this class. */
1255
1256 if (global
1257 && ! (basic_block_needs[(int) caller_save_spill_class]
1258 [this_block]))
1259 {
1260 basic_block_needs[(int) caller_save_spill_class]
1261 [this_block] = 1;
1262 new_basic_block_needs = 1;
1263 }
a8efe40d
RK
1264 }
1265
32131a9c
RK
1266#ifdef SMALL_REGISTER_CLASSES
1267 /* If this insn stores the value of a function call,
1268 and that value is in a register that has been spilled,
1269 and if the insn needs a reload in a class
1270 that might use that register as the reload register,
1271 then add add an extra need in that class.
1272 This makes sure we have a register available that does
1273 not overlap the return value. */
1274 if (avoid_return_reg)
1275 {
1276 int regno = REGNO (avoid_return_reg);
1277 int nregs
1278 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1279 int r;
1280 int inc_groups = 0;
1281 for (r = regno; r < regno + nregs; r++)
1282 if (spill_reg_order[r] >= 0)
1283 for (i = 0; i < N_REG_CLASSES; i++)
1284 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1285 {
1286 if (insn_needs[i] > 0)
1287 insn_needs[i]++;
1288 if (insn_groups[i] > 0
1289 && nregs > 1)
1290 inc_groups = 1;
1291 }
1292 if (inc_groups)
1293 insn_groups[i]++;
1294 }
1295#endif /* SMALL_REGISTER_CLASSES */
1296
1297 /* For each class, collect maximum need of any insn. */
1298
1299 for (i = 0; i < N_REG_CLASSES; i++)
1300 {
1301 if (max_needs[i] < insn_needs[i])
5352b11a
RS
1302 {
1303 max_needs[i] = insn_needs[i];
1304 max_needs_insn[i] = insn;
1305 }
32131a9c 1306 if (max_groups[i] < insn_groups[i])
5352b11a
RS
1307 {
1308 max_groups[i] = insn_groups[i];
1309 max_groups_insn[i] = insn;
1310 }
32131a9c
RK
1311 if (insn_total_groups > 0)
1312 if (max_nongroups[i] < insn_needs[i])
5352b11a
RS
1313 {
1314 max_nongroups[i] = insn_needs[i];
1315 max_nongroups_insn[i] = insn;
1316 }
32131a9c
RK
1317 }
1318 }
1319 /* Note that there is a continue statement above. */
1320 }
1321
0dadecf6
RK
1322 /* If we allocated any new memory locations, make another pass
1323 since it might have changed elimination offsets. */
1324 if (starting_frame_size != get_frame_size ())
1325 something_changed = 1;
1326
e404a39a
RK
1327 if (dumpfile)
1328 for (i = 0; i < N_REG_CLASSES; i++)
1329 {
1330 if (max_needs[i] > 0)
1331 fprintf (dumpfile,
1332 ";; Need %d reg%s of class %s (for insn %d).\n",
1333 max_needs[i], max_needs[i] == 1 ? "" : "s",
1334 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1335 if (max_nongroups[i] > 0)
1336 fprintf (dumpfile,
1337 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1338 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1339 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1340 if (max_groups[i] > 0)
1341 fprintf (dumpfile,
1342 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1343 max_groups[i], max_groups[i] == 1 ? "" : "s",
1344 mode_name[(int) group_mode[i]],
1345 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1346 }
1347
d445b551 1348 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1349 will need a spill register. */
32131a9c 1350
d445b551 1351 if (caller_save_needed
a8efe40d
RK
1352 && ! setup_save_areas (&something_changed)
1353 && caller_save_spill_class == NO_REGS)
32131a9c 1354 {
a8efe40d
RK
1355 /* The class we will need depends on whether the machine
1356 supports the sum of two registers for an address; see
1357 find_address_reloads for details. */
1358
a8fdc208 1359 caller_save_spill_class
a8efe40d
RK
1360 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1361 caller_save_group_size
1362 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1363 something_changed = 1;
32131a9c
RK
1364 }
1365
1366 /* Now deduct from the needs for the registers already
1367 available (already spilled). */
1368
1369 CLEAR_HARD_REG_SET (counted_for_groups);
1370 CLEAR_HARD_REG_SET (counted_for_nongroups);
1371
1372 /* First find all regs alone in their class
1373 and count them (if desired) for non-groups.
1374 We would be screwed if a group took the only reg in a class
d445b551 1375 for which a non-group reload is needed.
32131a9c
RK
1376 (Note there is still a bug; if a class has 2 regs,
1377 both could be stolen by groups and we would lose the same way.
1378 With luck, no machine will need a nongroup in a 2-reg class.) */
1379
1380 for (i = 0; i < n_spills; i++)
1381 {
1382 register enum reg_class *p;
1383 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1384
1385 if (reg_class_size[class] == 1 && max_nongroups[class] > 0)
1386 {
1387 max_needs[class]--;
1388 p = reg_class_superclasses[class];
1389 while (*p != LIM_REG_CLASSES)
1390 max_needs[(int) *p++]--;
1391
1392 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1393 max_nongroups[class]--;
1394 p = reg_class_superclasses[class];
1395 while (*p != LIM_REG_CLASSES)
1396 {
1397 if (max_nongroups[(int) *p] > 0)
1398 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1399 max_nongroups[(int) *p++]--;
1400 }
1401 }
1402 }
1403
1404 /* Now find all consecutive groups of spilled registers
1405 and mark each group off against the need for such groups.
1406 But don't count them against ordinary need, yet. */
1407
1408 count_possible_groups (group_size, group_mode, max_groups);
1409
1410 /* Now count all spill regs against the individual need,
a8fdc208 1411 This includes those counted above for groups,
32131a9c
RK
1412 but not those previously counted for nongroups.
1413
1414 Those that weren't counted_for_groups can also count against
1415 the not-in-group need. */
1416
1417 for (i = 0; i < n_spills; i++)
1418 {
1419 register enum reg_class *p;
1420 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1421
1422 /* Those counted at the beginning shouldn't be counted twice. */
1423 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
1424 {
1425 max_needs[class]--;
1426 p = reg_class_superclasses[class];
1427 while (*p != LIM_REG_CLASSES)
1428 max_needs[(int) *p++]--;
1429
1430 if (! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i]))
1431 {
1432 if (max_nongroups[class] > 0)
1433 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1434 max_nongroups[class]--;
1435 p = reg_class_superclasses[class];
1436 while (*p != LIM_REG_CLASSES)
1437 {
1438 if (max_nongroups[(int) *p] > 0)
1439 SET_HARD_REG_BIT (counted_for_nongroups,
1440 spill_regs[i]);
1441 max_nongroups[(int) *p++]--;
1442 }
1443 }
1444 }
1445 }
1446
5c23c401
RK
1447 /* See if anything that happened changes which eliminations are valid.
1448 For example, on the Sparc, whether or not the frame pointer can
1449 be eliminated can depend on what registers have been used. We need
1450 not check some conditions again (such as flag_omit_frame_pointer)
1451 since they can't have changed. */
1452
1453 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1454 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1455#ifdef ELIMINABLE_REGS
1456 || ! CAN_ELIMINATE (ep->from, ep->to)
1457#endif
1458 )
1459 ep->can_eliminate = 0;
1460
32131a9c
RK
1461 /* Look for the case where we have discovered that we can't replace
1462 register A with register B and that means that we will now be
1463 trying to replace register A with register C. This means we can
1464 no longer replace register C with register B and we need to disable
1465 such an elimination, if it exists. This occurs often with A == ap,
1466 B == sp, and C == fp. */
a8fdc208 1467
32131a9c
RK
1468 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1469 {
1470 struct elim_table *op;
1471 register int new_to = -1;
1472
1473 if (! ep->can_eliminate && ep->can_eliminate_previous)
1474 {
1475 /* Find the current elimination for ep->from, if there is a
1476 new one. */
1477 for (op = reg_eliminate;
1478 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1479 if (op->from == ep->from && op->can_eliminate)
1480 {
1481 new_to = op->to;
1482 break;
1483 }
1484
1485 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1486 disable it. */
1487 for (op = reg_eliminate;
1488 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1489 if (op->from == new_to && op->to == ep->to)
1490 op->can_eliminate = 0;
1491 }
1492 }
1493
1494 /* See if any registers that we thought we could eliminate the previous
1495 time are no longer eliminable. If so, something has changed and we
1496 must spill the register. Also, recompute the number of eliminable
1497 registers and see if the frame pointer is needed; it is if there is
1498 no elimination of the frame pointer that we can perform. */
1499
1500 frame_pointer_needed = 1;
1501 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1502 {
1503 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1504 frame_pointer_needed = 0;
1505
1506 if (! ep->can_eliminate && ep->can_eliminate_previous)
1507 {
1508 ep->can_eliminate_previous = 0;
1509 spill_hard_reg (ep->from, global, dumpfile, 1);
1510 regs_ever_live[ep->from] = 1;
1511 something_changed = 1;
1512 num_eliminable--;
1513 }
1514 }
1515
1516 /* If all needs are met, we win. */
1517
1518 for (i = 0; i < N_REG_CLASSES; i++)
1519 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1520 break;
1521 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1522 break;
1523
1524 /* Not all needs are met; must spill more hard regs. */
1525
1526 /* If any element of basic_block_needs changed from 0 to 1,
1527 re-spill all the regs already spilled. This may spill
1528 additional pseudos that didn't spill before. */
1529
1530 if (new_basic_block_needs)
1531 for (i = 0; i < n_spills; i++)
1532 something_changed
1533 |= spill_hard_reg (spill_regs[i], global, dumpfile, 0);
1534
1535 /* Now find more reload regs to satisfy the remaining need
1536 Do it by ascending class number, since otherwise a reg
1537 might be spilled for a big class and might fail to count
1538 for a smaller class even though it belongs to that class.
1539
1540 Count spilled regs in `spills', and add entries to
1541 `spill_regs' and `spill_reg_order'.
1542
1543 ??? Note there is a problem here.
1544 When there is a need for a group in a high-numbered class,
1545 and also need for non-group regs that come from a lower class,
1546 the non-group regs are chosen first. If there aren't many regs,
1547 they might leave no room for a group.
1548
1549 This was happening on the 386. To fix it, we added the code
1550 that calls possible_group_p, so that the lower class won't
1551 break up the last possible group.
1552
1553 Really fixing the problem would require changes above
1554 in counting the regs already spilled, and in choose_reload_regs.
1555 It might be hard to avoid introducing bugs there. */
1556
1557 for (class = 0; class < N_REG_CLASSES; class++)
1558 {
1559 /* First get the groups of registers.
1560 If we got single registers first, we might fragment
1561 possible groups. */
1562 while (max_groups[class] > 0)
1563 {
1564 /* If any single spilled regs happen to form groups,
1565 count them now. Maybe we don't really need
1566 to spill another group. */
1567 count_possible_groups (group_size, group_mode, max_groups);
1568
1569 /* Groups of size 2 (the only groups used on most machines)
1570 are treated specially. */
1571 if (group_size[class] == 2)
1572 {
1573 /* First, look for a register that will complete a group. */
1574 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1575 {
1576 int j = potential_reload_regs[i];
1577 int other;
1578 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1579 &&
1580 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1581 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1582 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1583 && HARD_REGNO_MODE_OK (other, group_mode[class])
1584 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1585 other)
1586 /* We don't want one part of another group.
1587 We could get "two groups" that overlap! */
1588 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1589 ||
1590 (j < FIRST_PSEUDO_REGISTER - 1
1591 && (other = j + 1, spill_reg_order[other] >= 0)
1592 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1593 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1594 && HARD_REGNO_MODE_OK (j, group_mode[class])
1595 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1596 other)
1597 && ! TEST_HARD_REG_BIT (counted_for_groups,
1598 other))))
1599 {
1600 register enum reg_class *p;
1601
1602 /* We have found one that will complete a group,
1603 so count off one group as provided. */
1604 max_groups[class]--;
1605 p = reg_class_superclasses[class];
1606 while (*p != LIM_REG_CLASSES)
1607 max_groups[(int) *p++]--;
1608
1609 /* Indicate both these regs are part of a group. */
1610 SET_HARD_REG_BIT (counted_for_groups, j);
1611 SET_HARD_REG_BIT (counted_for_groups, other);
1612 break;
1613 }
1614 }
1615 /* We can't complete a group, so start one. */
1616 if (i == FIRST_PSEUDO_REGISTER)
1617 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1618 {
1619 int j = potential_reload_regs[i];
1620 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1621 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1622 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1623 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1624 && HARD_REGNO_MODE_OK (j, group_mode[class])
1625 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1626 j + 1))
1627 break;
1628 }
1629
1630 /* I should be the index in potential_reload_regs
1631 of the new reload reg we have found. */
1632
5352b11a
RS
1633 if (i >= FIRST_PSEUDO_REGISTER)
1634 {
1635 /* There are no groups left to spill. */
1636 spill_failure (max_groups_insn[class]);
1637 failure = 1;
1638 goto failed;
1639 }
1640 else
1641 something_changed
fb3821f7 1642 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1643 global, dumpfile);
32131a9c
RK
1644 }
1645 else
1646 {
1647 /* For groups of more than 2 registers,
1648 look for a sufficient sequence of unspilled registers,
1649 and spill them all at once. */
1650 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1651 {
1652 int j = potential_reload_regs[i];
1653 int k;
9d1a4667
RS
1654 if (j >= 0
1655 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1656 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1657 {
1658 /* Check each reg in the sequence. */
1659 for (k = 0; k < group_size[class]; k++)
1660 if (! (spill_reg_order[j + k] < 0
1661 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1662 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1663 break;
1664 /* We got a full sequence, so spill them all. */
1665 if (k == group_size[class])
1666 {
1667 register enum reg_class *p;
1668 for (k = 0; k < group_size[class]; k++)
1669 {
1670 int idx;
1671 SET_HARD_REG_BIT (counted_for_groups, j + k);
1672 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1673 if (potential_reload_regs[idx] == j + k)
1674 break;
9d1a4667
RS
1675 something_changed
1676 |= new_spill_reg (idx, class,
1677 max_needs, NULL_PTR,
1678 global, dumpfile);
32131a9c
RK
1679 }
1680
1681 /* We have found one that will complete a group,
1682 so count off one group as provided. */
1683 max_groups[class]--;
1684 p = reg_class_superclasses[class];
1685 while (*p != LIM_REG_CLASSES)
1686 max_groups[(int) *p++]--;
1687
1688 break;
1689 }
1690 }
1691 }
fa52261e 1692 /* We couldn't find any registers for this reload.
9d1a4667
RS
1693 Avoid going into an infinite loop. */
1694 if (i >= FIRST_PSEUDO_REGISTER)
1695 {
1696 /* There are no groups left. */
1697 spill_failure (max_groups_insn[class]);
1698 failure = 1;
1699 goto failed;
1700 }
32131a9c
RK
1701 }
1702 }
1703
1704 /* Now similarly satisfy all need for single registers. */
1705
1706 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1707 {
1708 /* Consider the potential reload regs that aren't
1709 yet in use as reload regs, in order of preference.
1710 Find the most preferred one that's in this class. */
1711
1712 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1713 if (potential_reload_regs[i] >= 0
1714 && TEST_HARD_REG_BIT (reg_class_contents[class],
1715 potential_reload_regs[i])
1716 /* If this reg will not be available for groups,
1717 pick one that does not foreclose possible groups.
1718 This is a kludge, and not very general,
1719 but it should be sufficient to make the 386 work,
1720 and the problem should not occur on machines with
1721 more registers. */
1722 && (max_nongroups[class] == 0
1723 || possible_group_p (potential_reload_regs[i], max_groups)))
1724 break;
1725
e404a39a
RK
1726 /* If we couldn't get a register, try to get one even if we
1727 might foreclose possible groups. This may cause problems
1728 later, but that's better than aborting now, since it is
1729 possible that we will, in fact, be able to form the needed
1730 group even with this allocation. */
1731
1732 if (i >= FIRST_PSEUDO_REGISTER
1733 && (asm_noperands (max_needs[class] > 0
1734 ? max_needs_insn[class]
1735 : max_nongroups_insn[class])
1736 < 0))
1737 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1738 if (potential_reload_regs[i] >= 0
1739 && TEST_HARD_REG_BIT (reg_class_contents[class],
1740 potential_reload_regs[i]))
1741 break;
1742
32131a9c
RK
1743 /* I should be the index in potential_reload_regs
1744 of the new reload reg we have found. */
1745
5352b11a
RS
1746 if (i >= FIRST_PSEUDO_REGISTER)
1747 {
1748 /* There are no possible registers left to spill. */
1749 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1750 : max_nongroups_insn[class]);
1751 failure = 1;
1752 goto failed;
1753 }
1754 else
1755 something_changed
1756 |= new_spill_reg (i, class, max_needs, max_nongroups,
1757 global, dumpfile);
32131a9c
RK
1758 }
1759 }
1760 }
1761
1762 /* If global-alloc was run, notify it of any register eliminations we have
1763 done. */
1764 if (global)
1765 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1766 if (ep->can_eliminate)
1767 mark_elimination (ep->from, ep->to);
1768
32131a9c 1769 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1770 around calls. Tell if what mode to use so that we will process
1771 those insns in reload_as_needed if we have to. */
32131a9c
RK
1772
1773 if (caller_save_needed)
a8efe40d
RK
1774 save_call_clobbered_regs (num_eliminable ? QImode
1775 : caller_save_spill_class != NO_REGS ? HImode
1776 : VOIDmode);
32131a9c
RK
1777
1778 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1779 If that insn didn't set the register (i.e., it copied the register to
1780 memory), just delete that insn instead of the equivalencing insn plus
1781 anything now dead. If we call delete_dead_insn on that insn, we may
1782 delete the insn that actually sets the register if the register die
1783 there and that is incorrect. */
1784
1785 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1786 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1787 && GET_CODE (reg_equiv_init[i]) != NOTE)
1788 {
1789 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1790 delete_dead_insn (reg_equiv_init[i]);
1791 else
1792 {
1793 PUT_CODE (reg_equiv_init[i], NOTE);
1794 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1795 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1796 }
1797 }
1798
1799 /* Use the reload registers where necessary
1800 by generating move instructions to move the must-be-register
1801 values into or out of the reload registers. */
1802
a8efe40d
RK
1803 if (something_needs_reloads || something_needs_elimination
1804 || (caller_save_needed && num_eliminable)
1805 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1806 reload_as_needed (first, global);
1807
2a1f8b6b 1808 /* If we were able to eliminate the frame pointer, show that it is no
33edf7f2 1809 longer live at the start of any basic block. If it is live by
2a1f8b6b
RK
1810 virtue of being in a pseudo, that pseudo will be marked live
1811 and hence the frame pointer will be known to be live via that
1812 pseudo. */
1813
1814 if (! frame_pointer_needed)
1815 for (i = 0; i < n_basic_blocks; i++)
1816 basic_block_live_at_start[i][FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1817 &= ~ ((REGSET_ELT_TYPE) 1 << (FRAME_POINTER_REGNUM % REGSET_ELT_BITS));
1818
32131a9c
RK
1819 reload_in_progress = 0;
1820
5352b11a
RS
1821 /* Come here (with failure set nonzero) if we can't get enough spill regs
1822 and we decide not to abort about it. */
1823 failed:
1824
32131a9c
RK
1825 /* Now eliminate all pseudo regs by modifying them into
1826 their equivalent memory references.
1827 The REG-rtx's for the pseudos are modified in place,
1828 so all insns that used to refer to them now refer to memory.
1829
1830 For a reg that has a reg_equiv_address, all those insns
1831 were changed by reloading so that no insns refer to it any longer;
1832 but the DECL_RTL of a variable decl may refer to it,
1833 and if so this causes the debugging info to mention the variable. */
1834
1835 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1836 {
1837 rtx addr = 0;
ab1fd483 1838 int in_struct = 0;
32131a9c 1839 if (reg_equiv_mem[i])
ab1fd483
RS
1840 {
1841 addr = XEXP (reg_equiv_mem[i], 0);
1842 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1843 }
32131a9c
RK
1844 if (reg_equiv_address[i])
1845 addr = reg_equiv_address[i];
1846 if (addr)
1847 {
1848 if (reg_renumber[i] < 0)
1849 {
1850 rtx reg = regno_reg_rtx[i];
1851 XEXP (reg, 0) = addr;
1852 REG_USERVAR_P (reg) = 0;
ab1fd483 1853 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1854 PUT_CODE (reg, MEM);
1855 }
1856 else if (reg_equiv_mem[i])
1857 XEXP (reg_equiv_mem[i], 0) = addr;
1858 }
1859 }
1860
1861#ifdef PRESERVE_DEATH_INFO_REGNO_P
1862 /* Make a pass over all the insns and remove death notes for things that
1863 are no longer registers or no longer die in the insn (e.g., an input
1864 and output pseudo being tied). */
1865
1866 for (insn = first; insn; insn = NEXT_INSN (insn))
1867 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1868 {
1869 rtx note, next;
1870
1871 for (note = REG_NOTES (insn); note; note = next)
1872 {
1873 next = XEXP (note, 1);
1874 if (REG_NOTE_KIND (note) == REG_DEAD
1875 && (GET_CODE (XEXP (note, 0)) != REG
1876 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1877 remove_note (insn, note);
1878 }
1879 }
1880#endif
1881
1882 /* Indicate that we no longer have known memory locations or constants. */
1883 reg_equiv_constant = 0;
1884 reg_equiv_memory_loc = 0;
5352b11a
RS
1885
1886 return failure;
32131a9c
RK
1887}
1888\f
1889/* Nonzero if, after spilling reg REGNO for non-groups,
1890 it will still be possible to find a group if we still need one. */
1891
1892static int
1893possible_group_p (regno, max_groups)
1894 int regno;
1895 int *max_groups;
1896{
1897 int i;
1898 int class = (int) NO_REGS;
1899
1900 for (i = 0; i < (int) N_REG_CLASSES; i++)
1901 if (max_groups[i] > 0)
1902 {
1903 class = i;
1904 break;
1905 }
1906
1907 if (class == (int) NO_REGS)
1908 return 1;
1909
1910 /* Consider each pair of consecutive registers. */
1911 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
1912 {
1913 /* Ignore pairs that include reg REGNO. */
1914 if (i == regno || i + 1 == regno)
1915 continue;
1916
1917 /* Ignore pairs that are outside the class that needs the group.
1918 ??? Here we fail to handle the case where two different classes
1919 independently need groups. But this never happens with our
1920 current machine descriptions. */
1921 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
1922 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
1923 continue;
1924
1925 /* A pair of consecutive regs we can still spill does the trick. */
1926 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
1927 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1928 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
1929 return 1;
1930
1931 /* A pair of one already spilled and one we can spill does it
1932 provided the one already spilled is not otherwise reserved. */
1933 if (spill_reg_order[i] < 0
1934 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1935 && spill_reg_order[i + 1] >= 0
1936 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
1937 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
1938 return 1;
1939 if (spill_reg_order[i + 1] < 0
1940 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
1941 && spill_reg_order[i] >= 0
1942 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
1943 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
1944 return 1;
1945 }
1946
1947 return 0;
1948}
1949\f
1950/* Count any groups that can be formed from the registers recently spilled.
1951 This is done class by class, in order of ascending class number. */
1952
1953static void
1954count_possible_groups (group_size, group_mode, max_groups)
1955 int *group_size, *max_groups;
1956 enum machine_mode *group_mode;
1957{
1958 int i;
1959 /* Now find all consecutive groups of spilled registers
1960 and mark each group off against the need for such groups.
1961 But don't count them against ordinary need, yet. */
1962
1963 for (i = 0; i < N_REG_CLASSES; i++)
1964 if (group_size[i] > 1)
1965 {
1966 char regmask[FIRST_PSEUDO_REGISTER];
1967 int j;
1968
1969 bzero (regmask, sizeof regmask);
1970 /* Make a mask of all the regs that are spill regs in class I. */
1971 for (j = 0; j < n_spills; j++)
1972 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
1973 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
1974 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1975 spill_regs[j]))
1976 regmask[spill_regs[j]] = 1;
1977 /* Find each consecutive group of them. */
1978 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
1979 if (regmask[j] && j + group_size[i] <= FIRST_PSEUDO_REGISTER
1980 /* Next line in case group-mode for this class
1981 demands an even-odd pair. */
1982 && HARD_REGNO_MODE_OK (j, group_mode[i]))
1983 {
1984 int k;
1985 for (k = 1; k < group_size[i]; k++)
1986 if (! regmask[j + k])
1987 break;
1988 if (k == group_size[i])
1989 {
1990 /* We found a group. Mark it off against this class's
1991 need for groups, and against each superclass too. */
1992 register enum reg_class *p;
1993 max_groups[i]--;
1994 p = reg_class_superclasses[i];
1995 while (*p != LIM_REG_CLASSES)
1996 max_groups[(int) *p++]--;
a8fdc208 1997 /* Don't count these registers again. */
32131a9c
RK
1998 for (k = 0; k < group_size[i]; k++)
1999 SET_HARD_REG_BIT (counted_for_groups, j + k);
2000 }
fa52261e
RS
2001 /* Skip to the last reg in this group. When j is incremented
2002 above, it will then point to the first reg of the next
2003 possible group. */
2004 j += k - 1;
32131a9c
RK
2005 }
2006 }
2007
2008}
2009\f
2010/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2011 another mode that needs to be reloaded for the same register class CLASS.
2012 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2013 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2014
2015 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2016 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2017 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2018 causes unnecessary failures on machines requiring alignment of register
2019 groups when the two modes are different sizes, because the larger mode has
2020 more strict alignment rules than the smaller mode. */
2021
2022static int
2023modes_equiv_for_class_p (allocate_mode, other_mode, class)
2024 enum machine_mode allocate_mode, other_mode;
2025 enum reg_class class;
2026{
2027 register int regno;
2028 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2029 {
2030 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2031 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2032 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2033 return 0;
2034 }
2035 return 1;
2036}
2037
5352b11a
RS
2038/* Handle the failure to find a register to spill.
2039 INSN should be one of the insns which needed this particular spill reg. */
2040
2041static void
2042spill_failure (insn)
2043 rtx insn;
2044{
2045 if (asm_noperands (PATTERN (insn)) >= 0)
2046 error_for_asm (insn, "`asm' needs too many reloads");
2047 else
2048 abort ();
2049}
2050
32131a9c
RK
2051/* Add a new register to the tables of available spill-registers
2052 (as well as spilling all pseudos allocated to the register).
2053 I is the index of this register in potential_reload_regs.
2054 CLASS is the regclass whose need is being satisfied.
2055 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2056 so that this register can count off against them.
2057 MAX_NONGROUPS is 0 if this register is part of a group.
2058 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2059
2060static int
2061new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2062 int i;
2063 int class;
2064 int *max_needs;
2065 int *max_nongroups;
2066 int global;
2067 FILE *dumpfile;
2068{
2069 register enum reg_class *p;
2070 int val;
2071 int regno = potential_reload_regs[i];
2072
2073 if (i >= FIRST_PSEUDO_REGISTER)
2074 abort (); /* Caller failed to find any register. */
2075
2076 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2077 fatal ("fixed or forbidden register was spilled.\n\
2078This may be due to a compiler bug or to impossible asm statements.");
2079
2080 /* Make reg REGNO an additional reload reg. */
2081
2082 potential_reload_regs[i] = -1;
2083 spill_regs[n_spills] = regno;
2084 spill_reg_order[regno] = n_spills;
2085 if (dumpfile)
2086 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2087
2088 /* Clear off the needs we just satisfied. */
2089
2090 max_needs[class]--;
2091 p = reg_class_superclasses[class];
2092 while (*p != LIM_REG_CLASSES)
2093 max_needs[(int) *p++]--;
2094
2095 if (max_nongroups && max_nongroups[class] > 0)
2096 {
2097 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2098 max_nongroups[class]--;
2099 p = reg_class_superclasses[class];
2100 while (*p != LIM_REG_CLASSES)
2101 max_nongroups[(int) *p++]--;
2102 }
2103
2104 /* Spill every pseudo reg that was allocated to this reg
2105 or to something that overlaps this reg. */
2106
2107 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2108
2109 /* If there are some registers still to eliminate and this register
2110 wasn't ever used before, additional stack space may have to be
2111 allocated to store this register. Thus, we may have changed the offset
2112 between the stack and frame pointers, so mark that something has changed.
2113 (If new pseudos were spilled, thus requiring more space, VAL would have
2114 been set non-zero by the call to spill_hard_reg above since additional
2115 reloads may be needed in that case.
2116
2117 One might think that we need only set VAL to 1 if this is a call-used
2118 register. However, the set of registers that must be saved by the
2119 prologue is not identical to the call-used set. For example, the
2120 register used by the call insn for the return PC is a call-used register,
2121 but must be saved by the prologue. */
2122 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2123 val = 1;
2124
2125 regs_ever_live[spill_regs[n_spills]] = 1;
2126 n_spills++;
2127
2128 return val;
2129}
2130\f
2131/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2132 data that is dead in INSN. */
2133
2134static void
2135delete_dead_insn (insn)
2136 rtx insn;
2137{
2138 rtx prev = prev_real_insn (insn);
2139 rtx prev_dest;
2140
2141 /* If the previous insn sets a register that dies in our insn, delete it
2142 too. */
2143 if (prev && GET_CODE (PATTERN (prev)) == SET
2144 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2145 && reg_mentioned_p (prev_dest, PATTERN (insn))
2146 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2147 delete_dead_insn (prev);
2148
2149 PUT_CODE (insn, NOTE);
2150 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2151 NOTE_SOURCE_FILE (insn) = 0;
2152}
2153
2154/* Modify the home of pseudo-reg I.
2155 The new home is present in reg_renumber[I].
2156
2157 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2158 or it may be -1, meaning there is none or it is not relevant.
2159 This is used so that all pseudos spilled from a given hard reg
2160 can share one stack slot. */
2161
2162static void
2163alter_reg (i, from_reg)
2164 register int i;
2165 int from_reg;
2166{
2167 /* When outputting an inline function, this can happen
2168 for a reg that isn't actually used. */
2169 if (regno_reg_rtx[i] == 0)
2170 return;
2171
2172 /* If the reg got changed to a MEM at rtl-generation time,
2173 ignore it. */
2174 if (GET_CODE (regno_reg_rtx[i]) != REG)
2175 return;
2176
2177 /* Modify the reg-rtx to contain the new hard reg
2178 number or else to contain its pseudo reg number. */
2179 REGNO (regno_reg_rtx[i])
2180 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2181
2182 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2183 allocate a stack slot for it. */
2184
2185 if (reg_renumber[i] < 0
2186 && reg_n_refs[i] > 0
2187 && reg_equiv_constant[i] == 0
2188 && reg_equiv_memory_loc[i] == 0)
2189 {
2190 register rtx x;
2191 int inherent_size = PSEUDO_REGNO_BYTES (i);
2192 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2193 int adjust = 0;
2194
2195 /* Each pseudo reg has an inherent size which comes from its own mode,
2196 and a total size which provides room for paradoxical subregs
2197 which refer to the pseudo reg in wider modes.
2198
2199 We can use a slot already allocated if it provides both
2200 enough inherent space and enough total space.
2201 Otherwise, we allocate a new slot, making sure that it has no less
2202 inherent space, and no less total space, then the previous slot. */
2203 if (from_reg == -1)
2204 {
2205 /* No known place to spill from => no slot to reuse. */
2206 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2207#if BYTES_BIG_ENDIAN
2208 /* Cancel the big-endian correction done in assign_stack_local.
2209 Get the address of the beginning of the slot.
2210 This is so we can do a big-endian correction unconditionally
2211 below. */
2212 adjust = inherent_size - total_size;
2213#endif
2214 }
2215 /* Reuse a stack slot if possible. */
2216 else if (spill_stack_slot[from_reg] != 0
2217 && spill_stack_slot_width[from_reg] >= total_size
2218 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2219 >= inherent_size))
2220 x = spill_stack_slot[from_reg];
2221 /* Allocate a bigger slot. */
2222 else
2223 {
2224 /* Compute maximum size needed, both for inherent size
2225 and for total size. */
2226 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2227 if (spill_stack_slot[from_reg])
2228 {
2229 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2230 > inherent_size)
2231 mode = GET_MODE (spill_stack_slot[from_reg]);
2232 if (spill_stack_slot_width[from_reg] > total_size)
2233 total_size = spill_stack_slot_width[from_reg];
2234 }
2235 /* Make a slot with that size. */
2236 x = assign_stack_local (mode, total_size, -1);
2237#if BYTES_BIG_ENDIAN
2238 /* Cancel the big-endian correction done in assign_stack_local.
2239 Get the address of the beginning of the slot.
2240 This is so we can do a big-endian correction unconditionally
2241 below. */
2242 adjust = GET_MODE_SIZE (mode) - total_size;
2243#endif
2244 spill_stack_slot[from_reg] = x;
2245 spill_stack_slot_width[from_reg] = total_size;
2246 }
2247
2248#if BYTES_BIG_ENDIAN
2249 /* On a big endian machine, the "address" of the slot
2250 is the address of the low part that fits its inherent mode. */
2251 if (inherent_size < total_size)
2252 adjust += (total_size - inherent_size);
2253#endif /* BYTES_BIG_ENDIAN */
2254
2255 /* If we have any adjustment to make, or if the stack slot is the
2256 wrong mode, make a new stack slot. */
2257 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2258 {
2259 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2260 plus_constant (XEXP (x, 0), adjust));
2261 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2262 }
2263
2264 /* Save the stack slot for later. */
2265 reg_equiv_memory_loc[i] = x;
2266 }
2267}
2268
2269/* Mark the slots in regs_ever_live for the hard regs
2270 used by pseudo-reg number REGNO. */
2271
2272void
2273mark_home_live (regno)
2274 int regno;
2275{
2276 register int i, lim;
2277 i = reg_renumber[regno];
2278 if (i < 0)
2279 return;
2280 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2281 while (i < lim)
2282 regs_ever_live[i++] = 1;
2283}
2284\f
2285/* This function handles the tracking of elimination offsets around branches.
2286
2287 X is a piece of RTL being scanned.
2288
2289 INSN is the insn that it came from, if any.
2290
2291 INITIAL_P is non-zero if we are to set the offset to be the initial
2292 offset and zero if we are setting the offset of the label to be the
2293 current offset. */
2294
2295static void
2296set_label_offsets (x, insn, initial_p)
2297 rtx x;
2298 rtx insn;
2299 int initial_p;
2300{
2301 enum rtx_code code = GET_CODE (x);
2302 rtx tem;
2303 int i;
2304 struct elim_table *p;
2305
2306 switch (code)
2307 {
2308 case LABEL_REF:
8be386d9
RS
2309 if (LABEL_REF_NONLOCAL_P (x))
2310 return;
2311
32131a9c
RK
2312 x = XEXP (x, 0);
2313
2314 /* ... fall through ... */
2315
2316 case CODE_LABEL:
2317 /* If we know nothing about this label, set the desired offsets. Note
2318 that this sets the offset at a label to be the offset before a label
2319 if we don't know anything about the label. This is not correct for
2320 the label after a BARRIER, but is the best guess we can make. If
2321 we guessed wrong, we will suppress an elimination that might have
2322 been possible had we been able to guess correctly. */
2323
2324 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2325 {
2326 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2327 offsets_at[CODE_LABEL_NUMBER (x)][i]
2328 = (initial_p ? reg_eliminate[i].initial_offset
2329 : reg_eliminate[i].offset);
2330 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2331 }
2332
2333 /* Otherwise, if this is the definition of a label and it is
d45cf215 2334 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2335 that label. */
2336
2337 else if (x == insn
2338 && (tem = prev_nonnote_insn (insn)) != 0
2339 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2340 {
2341 num_not_at_initial_offset = 0;
2342 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2343 {
2344 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2345 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2346 if (reg_eliminate[i].can_eliminate
2347 && (reg_eliminate[i].offset
2348 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2349 num_not_at_initial_offset++;
2350 }
2351 }
32131a9c
RK
2352
2353 else
2354 /* If neither of the above cases is true, compare each offset
2355 with those previously recorded and suppress any eliminations
2356 where the offsets disagree. */
a8fdc208 2357
32131a9c
RK
2358 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2359 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2360 != (initial_p ? reg_eliminate[i].initial_offset
2361 : reg_eliminate[i].offset))
2362 reg_eliminate[i].can_eliminate = 0;
2363
2364 return;
2365
2366 case JUMP_INSN:
2367 set_label_offsets (PATTERN (insn), insn, initial_p);
2368
2369 /* ... fall through ... */
2370
2371 case INSN:
2372 case CALL_INSN:
2373 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2374 and hence must have all eliminations at their initial offsets. */
2375 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2376 if (REG_NOTE_KIND (tem) == REG_LABEL)
2377 set_label_offsets (XEXP (tem, 0), insn, 1);
2378 return;
2379
2380 case ADDR_VEC:
2381 case ADDR_DIFF_VEC:
2382 /* Each of the labels in the address vector must be at their initial
2383 offsets. We want the first first for ADDR_VEC and the second
2384 field for ADDR_DIFF_VEC. */
2385
2386 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2387 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2388 insn, initial_p);
2389 return;
2390
2391 case SET:
2392 /* We only care about setting PC. If the source is not RETURN,
2393 IF_THEN_ELSE, or a label, disable any eliminations not at
2394 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2395 isn't one of those possibilities. For branches to a label,
2396 call ourselves recursively.
2397
2398 Note that this can disable elimination unnecessarily when we have
2399 a non-local goto since it will look like a non-constant jump to
2400 someplace in the current function. This isn't a significant
2401 problem since such jumps will normally be when all elimination
2402 pairs are back to their initial offsets. */
2403
2404 if (SET_DEST (x) != pc_rtx)
2405 return;
2406
2407 switch (GET_CODE (SET_SRC (x)))
2408 {
2409 case PC:
2410 case RETURN:
2411 return;
2412
2413 case LABEL_REF:
2414 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2415 return;
2416
2417 case IF_THEN_ELSE:
2418 tem = XEXP (SET_SRC (x), 1);
2419 if (GET_CODE (tem) == LABEL_REF)
2420 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2421 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2422 break;
2423
2424 tem = XEXP (SET_SRC (x), 2);
2425 if (GET_CODE (tem) == LABEL_REF)
2426 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2427 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2428 break;
2429 return;
2430 }
2431
2432 /* If we reach here, all eliminations must be at their initial
2433 offset because we are doing a jump to a variable address. */
2434 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2435 if (p->offset != p->initial_offset)
2436 p->can_eliminate = 0;
2437 }
2438}
2439\f
2440/* Used for communication between the next two function to properly share
2441 the vector for an ASM_OPERANDS. */
2442
2443static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2444
a8fdc208 2445/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2446 replacement (such as sp), plus an offset.
2447
2448 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2449 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2450 MEM, we are allowed to replace a sum of a register and the constant zero
2451 with the register, which we cannot do outside a MEM. In addition, we need
2452 to record the fact that a register is referenced outside a MEM.
2453
2454 If INSN is nonzero, it is the insn containing X. If we replace a REG
2455 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2456 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2457 that the REG is being modified.
2458
2459 If we see a modification to a register we know about, take the
2460 appropriate action (see case SET, below).
2461
2462 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2463 replacements done assuming all offsets are at their initial values. If
2464 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2465 encounter, return the actual location so that find_reloads will do
2466 the proper thing. */
2467
2468rtx
2469eliminate_regs (x, mem_mode, insn)
2470 rtx x;
2471 enum machine_mode mem_mode;
2472 rtx insn;
2473{
2474 enum rtx_code code = GET_CODE (x);
2475 struct elim_table *ep;
2476 int regno;
2477 rtx new;
2478 int i, j;
2479 char *fmt;
2480 int copied = 0;
2481
2482 switch (code)
2483 {
2484 case CONST_INT:
2485 case CONST_DOUBLE:
2486 case CONST:
2487 case SYMBOL_REF:
2488 case CODE_LABEL:
2489 case PC:
2490 case CC0:
2491 case ASM_INPUT:
2492 case ADDR_VEC:
2493 case ADDR_DIFF_VEC:
2494 case RETURN:
2495 return x;
2496
2497 case REG:
2498 regno = REGNO (x);
2499
2500 /* First handle the case where we encounter a bare register that
2501 is eliminable. Replace it with a PLUS. */
2502 if (regno < FIRST_PSEUDO_REGISTER)
2503 {
2504 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2505 ep++)
2506 if (ep->from_rtx == x && ep->can_eliminate)
2507 {
2508 if (! mem_mode)
2509 ep->ref_outside_mem = 1;
2510 return plus_constant (ep->to_rtx, ep->previous_offset);
2511 }
2512
2513 }
2514 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2515 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2516 {
2517 /* In this case, find_reloads would attempt to either use an
2518 incorrect address (if something is not at its initial offset)
2519 or substitute an replaced address into an insn (which loses
2520 if the offset is changed by some later action). So we simply
2521 return the replaced stack slot (assuming it is changed by
2522 elimination) and ignore the fact that this is actually a
2523 reference to the pseudo. Ensure we make a copy of the
2524 address in case it is shared. */
fb3821f7
CH
2525 new = eliminate_regs (reg_equiv_memory_loc[regno],
2526 mem_mode, NULL_RTX);
32131a9c
RK
2527 if (new != reg_equiv_memory_loc[regno])
2528 return copy_rtx (new);
2529 }
2530 return x;
2531
2532 case PLUS:
2533 /* If this is the sum of an eliminable register and a constant, rework
2534 the sum. */
2535 if (GET_CODE (XEXP (x, 0)) == REG
2536 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2537 && CONSTANT_P (XEXP (x, 1)))
2538 {
2539 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2540 ep++)
2541 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2542 {
2543 if (! mem_mode)
2544 ep->ref_outside_mem = 1;
2545
2546 /* The only time we want to replace a PLUS with a REG (this
2547 occurs when the constant operand of the PLUS is the negative
2548 of the offset) is when we are inside a MEM. We won't want
2549 to do so at other times because that would change the
2550 structure of the insn in a way that reload can't handle.
2551 We special-case the commonest situation in
2552 eliminate_regs_in_insn, so just replace a PLUS with a
2553 PLUS here, unless inside a MEM. */
a23b64d5 2554 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2555 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2556 return ep->to_rtx;
2557 else
2558 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2559 plus_constant (XEXP (x, 1),
2560 ep->previous_offset));
2561 }
2562
2563 /* If the register is not eliminable, we are done since the other
2564 operand is a constant. */
2565 return x;
2566 }
2567
2568 /* If this is part of an address, we want to bring any constant to the
2569 outermost PLUS. We will do this by doing register replacement in
2570 our operands and seeing if a constant shows up in one of them.
2571
2572 We assume here this is part of an address (or a "load address" insn)
2573 since an eliminable register is not likely to appear in any other
2574 context.
2575
2576 If we have (plus (eliminable) (reg)), we want to produce
2577 (plus (plus (replacement) (reg) (const))). If this was part of a
2578 normal add insn, (plus (replacement) (reg)) will be pushed as a
2579 reload. This is the desired action. */
2580
2581 {
fb3821f7
CH
2582 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2583 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2584
2585 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2586 {
2587 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2588 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2589 we must replace the constant here since it may no longer
2590 be in the position of any operand. */
2591 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2592 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2593 && reg_renumber[REGNO (new1)] < 0
2594 && reg_equiv_constant != 0
2595 && reg_equiv_constant[REGNO (new1)] != 0)
2596 new1 = reg_equiv_constant[REGNO (new1)];
2597 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2598 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2599 && reg_renumber[REGNO (new0)] < 0
2600 && reg_equiv_constant[REGNO (new0)] != 0)
2601 new0 = reg_equiv_constant[REGNO (new0)];
2602
2603 new = form_sum (new0, new1);
2604
2605 /* As above, if we are not inside a MEM we do not want to
2606 turn a PLUS into something else. We might try to do so here
2607 for an addition of 0 if we aren't optimizing. */
2608 if (! mem_mode && GET_CODE (new) != PLUS)
2609 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2610 else
2611 return new;
2612 }
2613 }
2614 return x;
2615
2616 case EXPR_LIST:
2617 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2618 if (XEXP (x, 0))
2619 {
fb3821f7 2620 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2621 if (new != XEXP (x, 0))
2622 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2623 }
2624
2625 /* ... fall through ... */
2626
2627 case INSN_LIST:
2628 /* Now do eliminations in the rest of the chain. If this was
2629 an EXPR_LIST, this might result in allocating more memory than is
2630 strictly needed, but it simplifies the code. */
2631 if (XEXP (x, 1))
2632 {
fb3821f7 2633 new = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2634 if (new != XEXP (x, 1))
2635 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2636 }
2637 return x;
2638
2639 case CALL:
2640 case COMPARE:
2641 case MINUS:
2642 case MULT:
2643 case DIV: case UDIV:
2644 case MOD: case UMOD:
2645 case AND: case IOR: case XOR:
2646 case LSHIFT: case ASHIFT: case ROTATE:
2647 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2648 case NE: case EQ:
2649 case GE: case GT: case GEU: case GTU:
2650 case LE: case LT: case LEU: case LTU:
2651 {
fb3821f7
CH
2652 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2653 rtx new1
2654 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX) : 0;
32131a9c
RK
2655
2656 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2657 return gen_rtx (code, GET_MODE (x), new0, new1);
2658 }
2659 return x;
2660
2661 case PRE_INC:
2662 case POST_INC:
2663 case PRE_DEC:
2664 case POST_DEC:
2665 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2666 if (ep->to_rtx == XEXP (x, 0))
2667 {
2668 if (code == PRE_DEC || code == POST_DEC)
2669 ep->offset += GET_MODE_SIZE (mem_mode);
2670 else
2671 ep->offset -= GET_MODE_SIZE (mem_mode);
2672 }
2673
2674 /* Fall through to generic unary operation case. */
2675 case USE:
2676 case STRICT_LOW_PART:
2677 case NEG: case NOT:
2678 case SIGN_EXTEND: case ZERO_EXTEND:
2679 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2680 case FLOAT: case FIX:
2681 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2682 case ABS:
2683 case SQRT:
2684 case FFS:
fb3821f7 2685 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2686 if (new != XEXP (x, 0))
2687 return gen_rtx (code, GET_MODE (x), new);
2688 return x;
2689
2690 case SUBREG:
2691 /* Similar to above processing, but preserve SUBREG_WORD.
2692 Convert (subreg (mem)) to (mem) if not paradoxical.
2693 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2694 pseudo didn't get a hard reg, we must replace this with the
2695 eliminated version of the memory location because push_reloads
2696 may do the replacement in certain circumstances. */
2697 if (GET_CODE (SUBREG_REG (x)) == REG
2698 && (GET_MODE_SIZE (GET_MODE (x))
2699 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2700 && reg_equiv_memory_loc != 0
2701 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2702 {
2703 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
fb3821f7 2704 mem_mode, NULL_RTX);
32131a9c
RK
2705
2706 /* If we didn't change anything, we must retain the pseudo. */
2707 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2708 new = XEXP (x, 0);
2709 else
2710 /* Otherwise, ensure NEW isn't shared in case we have to reload
2711 it. */
2712 new = copy_rtx (new);
2713 }
2714 else
fb3821f7 2715 new = eliminate_regs (SUBREG_REG (x), mem_mode, NULL_RTX);
32131a9c
RK
2716
2717 if (new != XEXP (x, 0))
2718 {
2719 if (GET_CODE (new) == MEM
2720 && (GET_MODE_SIZE (GET_MODE (x))
2721 <= GET_MODE_SIZE (GET_MODE (new))))
2722 {
2723 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2724 enum machine_mode mode = GET_MODE (x);
2725
2726#if BYTES_BIG_ENDIAN
2727 offset += (MIN (UNITS_PER_WORD,
2728 GET_MODE_SIZE (GET_MODE (new)))
2729 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2730#endif
2731
2732 PUT_MODE (new, mode);
2733 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2734 return new;
2735 }
2736 else
2737 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2738 }
2739
2740 return x;
2741
2742 case CLOBBER:
2743 /* If clobbering a register that is the replacement register for an
d45cf215 2744 elimination we still think can be performed, note that it cannot
32131a9c
RK
2745 be performed. Otherwise, we need not be concerned about it. */
2746 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2747 if (ep->to_rtx == XEXP (x, 0))
2748 ep->can_eliminate = 0;
2749
2750 return x;
2751
2752 case ASM_OPERANDS:
2753 {
2754 rtx *temp_vec;
2755 /* Properly handle sharing input and constraint vectors. */
2756 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2757 {
2758 /* When we come to a new vector not seen before,
2759 scan all its elements; keep the old vector if none
2760 of them changes; otherwise, make a copy. */
2761 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2762 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2763 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2764 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
fb3821f7 2765 mem_mode, NULL_RTX);
32131a9c
RK
2766
2767 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2768 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2769 break;
2770
2771 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2772 new_asm_operands_vec = old_asm_operands_vec;
2773 else
2774 new_asm_operands_vec
2775 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2776 }
2777
2778 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2779 if (new_asm_operands_vec == old_asm_operands_vec)
2780 return x;
2781
2782 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2783 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2784 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2785 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2786 ASM_OPERANDS_SOURCE_FILE (x),
2787 ASM_OPERANDS_SOURCE_LINE (x));
2788 new->volatil = x->volatil;
2789 return new;
2790 }
2791
2792 case SET:
2793 /* Check for setting a register that we know about. */
2794 if (GET_CODE (SET_DEST (x)) == REG)
2795 {
2796 /* See if this is setting the replacement register for an
a8fdc208 2797 elimination.
32131a9c
RK
2798
2799 If DEST is the frame pointer, we do nothing because we assume that
2800 all assignments to the frame pointer are for non-local gotos and
2801 are being done at a time when they are valid and do not disturb
2802 anything else. Some machines want to eliminate a fake argument
2803 pointer with either the frame or stack pointer. Assignments to
2804 the frame pointer must not prevent this elimination. */
2805
2806 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2807 ep++)
2808 if (ep->to_rtx == SET_DEST (x)
2809 && SET_DEST (x) != frame_pointer_rtx)
2810 {
6dc42e49 2811 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2812 this elimination can't be done. */
2813 rtx src = SET_SRC (x);
2814
2815 if (GET_CODE (src) == PLUS
2816 && XEXP (src, 0) == SET_DEST (x)
2817 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2818 ep->offset -= INTVAL (XEXP (src, 1));
2819 else
2820 ep->can_eliminate = 0;
2821 }
2822
2823 /* Now check to see we are assigning to a register that can be
2824 eliminated. If so, it must be as part of a PARALLEL, since we
2825 will not have been called if this is a single SET. So indicate
2826 that we can no longer eliminate this reg. */
2827 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2828 ep++)
2829 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2830 ep->can_eliminate = 0;
2831 }
2832
2833 /* Now avoid the loop below in this common case. */
2834 {
fb3821f7
CH
2835 rtx new0 = eliminate_regs (SET_DEST (x), 0, NULL_RTX);
2836 rtx new1 = eliminate_regs (SET_SRC (x), 0, NULL_RTX);
32131a9c
RK
2837
2838 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2839 write a CLOBBER insn. */
2840 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
2841 && insn != 0)
2842 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
2843
2844 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
2845 return gen_rtx (SET, VOIDmode, new0, new1);
2846 }
2847
2848 return x;
2849
2850 case MEM:
2851 /* Our only special processing is to pass the mode of the MEM to our
2852 recursive call and copy the flags. While we are here, handle this
2853 case more efficiently. */
fb3821f7 2854 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), NULL_RTX);
32131a9c
RK
2855 if (new != XEXP (x, 0))
2856 {
2857 new = gen_rtx (MEM, GET_MODE (x), new);
2858 new->volatil = x->volatil;
2859 new->unchanging = x->unchanging;
2860 new->in_struct = x->in_struct;
2861 return new;
2862 }
2863 else
2864 return x;
2865 }
2866
2867 /* Process each of our operands recursively. If any have changed, make a
2868 copy of the rtx. */
2869 fmt = GET_RTX_FORMAT (code);
2870 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2871 {
2872 if (*fmt == 'e')
2873 {
fb3821f7 2874 new = eliminate_regs (XEXP (x, i), mem_mode, NULL_RTX);
32131a9c
RK
2875 if (new != XEXP (x, i) && ! copied)
2876 {
2877 rtx new_x = rtx_alloc (code);
2878 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2879 + (sizeof (new_x->fld[0])
2880 * GET_RTX_LENGTH (code))));
2881 x = new_x;
2882 copied = 1;
2883 }
2884 XEXP (x, i) = new;
2885 }
2886 else if (*fmt == 'E')
2887 {
2888 int copied_vec = 0;
2889 for (j = 0; j < XVECLEN (x, i); j++)
2890 {
2891 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2892 if (new != XVECEXP (x, i, j) && ! copied_vec)
2893 {
2894 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2895 &XVECEXP (x, i, 0));
2896 if (! copied)
2897 {
2898 rtx new_x = rtx_alloc (code);
2899 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2900 + (sizeof (new_x->fld[0])
2901 * GET_RTX_LENGTH (code))));
2902 x = new_x;
2903 copied = 1;
2904 }
2905 XVEC (x, i) = new_v;
2906 copied_vec = 1;
2907 }
2908 XVECEXP (x, i, j) = new;
2909 }
2910 }
2911 }
2912
2913 return x;
2914}
2915\f
2916/* Scan INSN and eliminate all eliminable registers in it.
2917
2918 If REPLACE is nonzero, do the replacement destructively. Also
2919 delete the insn as dead it if it is setting an eliminable register.
2920
2921 If REPLACE is zero, do all our allocations in reload_obstack.
2922
2923 If no eliminations were done and this insn doesn't require any elimination
2924 processing (these are not identical conditions: it might be updating sp,
2925 but not referencing fp; this needs to be seen during reload_as_needed so
2926 that the offset between fp and sp can be taken into consideration), zero
2927 is returned. Otherwise, 1 is returned. */
2928
2929static int
2930eliminate_regs_in_insn (insn, replace)
2931 rtx insn;
2932 int replace;
2933{
2934 rtx old_body = PATTERN (insn);
2935 rtx new_body;
2936 int val = 0;
2937 struct elim_table *ep;
2938
2939 if (! replace)
2940 push_obstacks (&reload_obstack, &reload_obstack);
2941
2942 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
2943 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
2944 {
2945 /* Check for setting an eliminable register. */
2946 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2947 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
2948 {
2949 /* In this case this insn isn't serving a useful purpose. We
2950 will delete it in reload_as_needed once we know that this
2951 elimination is, in fact, being done.
2952
2953 If REPLACE isn't set, we can't delete this insn, but neededn't
2954 process it since it won't be used unless something changes. */
2955 if (replace)
2956 delete_dead_insn (insn);
2957 val = 1;
2958 goto done;
2959 }
2960
2961 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
2962 in the insn is the negative of the offset in FROM. Substitute
2963 (set (reg) (reg to)) for the insn and change its code.
2964
2965 We have to do this here, rather than in eliminate_regs, do that we can
2966 change the insn code. */
2967
2968 if (GET_CODE (SET_SRC (old_body)) == PLUS
2969 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
2970 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
2971 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2972 ep++)
2973 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
2974 && ep->can_eliminate
2975 && ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
2976 {
2977 PATTERN (insn) = gen_rtx (SET, VOIDmode,
2978 SET_DEST (old_body), ep->to_rtx);
2979 INSN_CODE (insn) = -1;
2980 val = 1;
2981 goto done;
2982 }
2983 }
2984
2985 old_asm_operands_vec = 0;
2986
2987 /* Replace the body of this insn with a substituted form. If we changed
2988 something, return non-zero. If this is the final call for this
2989 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
2990
2991 If we are replacing a body that was a (set X (plus Y Z)), try to
2992 re-recognize the insn. We do this in case we had a simple addition
2993 but now can do this as a load-address. This saves an insn in this
2994 common case. */
2995
fb3821f7 2996 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
2997 if (new_body != old_body)
2998 {
7c791b13
RK
2999 /* If we aren't replacing things permanently and we changed something,
3000 make another copy to ensure that all the RTL is new. Otherwise
3001 things can go wrong if find_reload swaps commutative operands
3002 and one is inside RTL that has been copied while the other is not. */
3003
4d411872
RS
3004 /* Don't copy an asm_operands because (1) there's no need and (2)
3005 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3006 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3007 new_body = copy_rtx (new_body);
3008
4a5d0fb5 3009 /* If we had a move insn but now we don't, rerecognize it. */
0ba846c7
RS
3010 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3011 && (GET_CODE (new_body) != SET
3012 || GET_CODE (SET_SRC (new_body)) != REG))
3013 /* If this was an add insn before, rerecognize. */
3014 ||
3015 (GET_CODE (old_body) == SET
3016 && GET_CODE (SET_SRC (old_body)) == PLUS))
4a5d0fb5
RS
3017 {
3018 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3019 /* If recognition fails, store the new body anyway.
3020 It's normal to have recognition failures here
3021 due to bizarre memory addresses; reloading will fix them. */
3022 PATTERN (insn) = new_body;
4a5d0fb5 3023 }
0ba846c7 3024 else
32131a9c
RK
3025 PATTERN (insn) = new_body;
3026
3027 if (replace && REG_NOTES (insn))
fb3821f7 3028 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, NULL_RTX);
32131a9c
RK
3029 val = 1;
3030 }
a8fdc208 3031
32131a9c
RK
3032 /* Loop through all elimination pairs. See if any have changed and
3033 recalculate the number not at initial offset.
3034
a8efe40d
RK
3035 Compute the maximum offset (minimum offset if the stack does not
3036 grow downward) for each elimination pair.
3037
32131a9c
RK
3038 We also detect a cases where register elimination cannot be done,
3039 namely, if a register would be both changed and referenced outside a MEM
3040 in the resulting insn since such an insn is often undefined and, even if
3041 not, we cannot know what meaning will be given to it. Note that it is
3042 valid to have a register used in an address in an insn that changes it
3043 (presumably with a pre- or post-increment or decrement).
3044
3045 If anything changes, return nonzero. */
3046
3047 num_not_at_initial_offset = 0;
3048 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3049 {
3050 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3051 ep->can_eliminate = 0;
3052
3053 ep->ref_outside_mem = 0;
3054
3055 if (ep->previous_offset != ep->offset)
3056 val = 1;
3057
3058 ep->previous_offset = ep->offset;
3059 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3060 num_not_at_initial_offset++;
a8efe40d
RK
3061
3062#ifdef STACK_GROWS_DOWNWARD
3063 ep->max_offset = MAX (ep->max_offset, ep->offset);
3064#else
3065 ep->max_offset = MIN (ep->max_offset, ep->offset);
3066#endif
32131a9c
RK
3067 }
3068
3069 done:
3070 if (! replace)
3071 pop_obstacks ();
3072
3073 return val;
3074}
3075
3076/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3077 replacement we currently believe is valid, mark it as not eliminable if X
3078 modifies DEST in any way other than by adding a constant integer to it.
3079
3080 If DEST is the frame pointer, we do nothing because we assume that
3081 all assignments to the frame pointer are nonlocal gotos and are being done
3082 at a time when they are valid and do not disturb anything else.
3083 Some machines want to eliminate a fake argument pointer with either the
3084 frame or stack pointer. Assignments to the frame pointer must not prevent
3085 this elimination.
3086
3087 Called via note_stores from reload before starting its passes to scan
3088 the insns of the function. */
3089
3090static void
3091mark_not_eliminable (dest, x)
3092 rtx dest;
3093 rtx x;
3094{
3095 register int i;
3096
3097 /* A SUBREG of a hard register here is just changing its mode. We should
3098 not see a SUBREG of an eliminable hard register, but check just in
3099 case. */
3100 if (GET_CODE (dest) == SUBREG)
3101 dest = SUBREG_REG (dest);
3102
3103 if (dest == frame_pointer_rtx)
3104 return;
3105
3106 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3107 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3108 && (GET_CODE (x) != SET
3109 || GET_CODE (SET_SRC (x)) != PLUS
3110 || XEXP (SET_SRC (x), 0) != dest
3111 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3112 {
3113 reg_eliminate[i].can_eliminate_previous
3114 = reg_eliminate[i].can_eliminate = 0;
3115 num_eliminable--;
3116 }
3117}
3118\f
3119/* Kick all pseudos out of hard register REGNO.
3120 If GLOBAL is nonzero, try to find someplace else to put them.
3121 If DUMPFILE is nonzero, log actions taken on that file.
3122
3123 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3124 because we found we can't eliminate some register. In the case, no pseudos
3125 are allowed to be in the register, even if they are only in a block that
3126 doesn't require spill registers, unlike the case when we are spilling this
3127 hard reg to produce another spill register.
3128
3129 Return nonzero if any pseudos needed to be kicked out. */
3130
3131static int
3132spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3133 register int regno;
3134 int global;
3135 FILE *dumpfile;
3136 int cant_eliminate;
3137{
3138 int something_changed = 0;
3139 register int i;
3140
3141 SET_HARD_REG_BIT (forbidden_regs, regno);
3142
3143 /* Spill every pseudo reg that was allocated to this reg
3144 or to something that overlaps this reg. */
3145
3146 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3147 if (reg_renumber[i] >= 0
3148 && reg_renumber[i] <= regno
a8fdc208 3149 && (reg_renumber[i]
32131a9c
RK
3150 + HARD_REGNO_NREGS (reg_renumber[i],
3151 PSEUDO_REGNO_MODE (i))
3152 > regno))
3153 {
3154 enum reg_class class = REGNO_REG_CLASS (regno);
3155
3156 /* If this register belongs solely to a basic block which needed no
3157 spilling of any class that this register is contained in,
3158 leave it be, unless we are spilling this register because
3159 it was a hard register that can't be eliminated. */
3160
3161 if (! cant_eliminate
3162 && basic_block_needs[0]
3163 && reg_basic_block[i] >= 0
3164 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3165 {
3166 enum reg_class *p;
3167
3168 for (p = reg_class_superclasses[(int) class];
3169 *p != LIM_REG_CLASSES; p++)
3170 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3171 break;
a8fdc208 3172
32131a9c
RK
3173 if (*p == LIM_REG_CLASSES)
3174 continue;
3175 }
3176
3177 /* Mark it as no longer having a hard register home. */
3178 reg_renumber[i] = -1;
3179 /* We will need to scan everything again. */
3180 something_changed = 1;
3181 if (global)
3182 retry_global_alloc (i, forbidden_regs);
3183
3184 alter_reg (i, regno);
3185 if (dumpfile)
3186 {
3187 if (reg_renumber[i] == -1)
3188 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3189 else
3190 fprintf (dumpfile, " Register %d now in %d.\n\n",
3191 i, reg_renumber[i]);
3192 }
3193 }
3194
3195 return something_changed;
3196}
3197\f
3198/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3199
3200static void
3201scan_paradoxical_subregs (x)
3202 register rtx x;
3203{
3204 register int i;
3205 register char *fmt;
3206 register enum rtx_code code = GET_CODE (x);
3207
3208 switch (code)
3209 {
3210 case CONST_INT:
3211 case CONST:
3212 case SYMBOL_REF:
3213 case LABEL_REF:
3214 case CONST_DOUBLE:
3215 case CC0:
3216 case PC:
3217 case REG:
3218 case USE:
3219 case CLOBBER:
3220 return;
3221
3222 case SUBREG:
3223 if (GET_CODE (SUBREG_REG (x)) == REG
3224 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3225 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3226 = GET_MODE_SIZE (GET_MODE (x));
3227 return;
3228 }
3229
3230 fmt = GET_RTX_FORMAT (code);
3231 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3232 {
3233 if (fmt[i] == 'e')
3234 scan_paradoxical_subregs (XEXP (x, i));
3235 else if (fmt[i] == 'E')
3236 {
3237 register int j;
3238 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3239 scan_paradoxical_subregs (XVECEXP (x, i, j));
3240 }
3241 }
3242}
3243\f
3244struct hard_reg_n_uses { int regno; int uses; };
3245
3246static int
3247hard_reg_use_compare (p1, p2)
3248 struct hard_reg_n_uses *p1, *p2;
3249{
3250 int tem = p1->uses - p2->uses;
3251 if (tem != 0) return tem;
3252 /* If regs are equally good, sort by regno,
3253 so that the results of qsort leave nothing to chance. */
3254 return p1->regno - p2->regno;
3255}
3256
3257/* Choose the order to consider regs for use as reload registers
3258 based on how much trouble would be caused by spilling one.
3259 Store them in order of decreasing preference in potential_reload_regs. */
3260
3261static void
3262order_regs_for_reload ()
3263{
3264 register int i;
3265 register int o = 0;
3266 int large = 0;
3267
3268 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3269
3270 CLEAR_HARD_REG_SET (bad_spill_regs);
3271
3272 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3273 potential_reload_regs[i] = -1;
3274
3275 /* Count number of uses of each hard reg by pseudo regs allocated to it
3276 and then order them by decreasing use. */
3277
3278 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3279 {
3280 hard_reg_n_uses[i].uses = 0;
3281 hard_reg_n_uses[i].regno = i;
3282 }
3283
3284 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3285 {
3286 int regno = reg_renumber[i];
3287 if (regno >= 0)
3288 {
3289 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3290 while (regno < lim)
3291 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3292 }
3293 large += reg_n_refs[i];
3294 }
3295
3296 /* Now fixed registers (which cannot safely be used for reloading)
3297 get a very high use count so they will be considered least desirable.
3298 Registers used explicitly in the rtl code are almost as bad. */
3299
3300 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3301 {
3302 if (fixed_regs[i])
3303 {
3304 hard_reg_n_uses[i].uses += 2 * large + 2;
3305 SET_HARD_REG_BIT (bad_spill_regs, i);
3306 }
3307 else if (regs_explicitly_used[i])
3308 {
3309 hard_reg_n_uses[i].uses += large + 1;
3310 /* ??? We are doing this here because of the potential that
3311 bad code may be generated if a register explicitly used in
3312 an insn was used as a spill register for that insn. But
3313 not using these are spill registers may lose on some machine.
3314 We'll have to see how this works out. */
3315 SET_HARD_REG_BIT (bad_spill_regs, i);
3316 }
3317 }
3318 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3319 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3320
3321#ifdef ELIMINABLE_REGS
3322 /* If registers other than the frame pointer are eliminable, mark them as
3323 poor choices. */
3324 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3325 {
3326 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3327 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3328 }
3329#endif
3330
3331 /* Prefer registers not so far used, for use in temporary loading.
3332 Among them, if REG_ALLOC_ORDER is defined, use that order.
3333 Otherwise, prefer registers not preserved by calls. */
3334
3335#ifdef REG_ALLOC_ORDER
3336 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3337 {
3338 int regno = reg_alloc_order[i];
3339
3340 if (hard_reg_n_uses[regno].uses == 0)
3341 potential_reload_regs[o++] = regno;
3342 }
3343#else
3344 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3345 {
3346 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3347 potential_reload_regs[o++] = i;
3348 }
3349 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3350 {
3351 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3352 potential_reload_regs[o++] = i;
3353 }
3354#endif
3355
3356 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3357 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3358
3359 /* Now add the regs that are already used,
3360 preferring those used less often. The fixed and otherwise forbidden
3361 registers will be at the end of this list. */
3362
3363 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3364 if (hard_reg_n_uses[i].uses != 0)
3365 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3366}
3367\f
3368/* Reload pseudo-registers into hard regs around each insn as needed.
3369 Additional register load insns are output before the insn that needs it
3370 and perhaps store insns after insns that modify the reloaded pseudo reg.
3371
3372 reg_last_reload_reg and reg_reloaded_contents keep track of
3373 which pseudo-registers are already available in reload registers.
3374 We update these for the reloads that we perform,
3375 as the insns are scanned. */
3376
3377static void
3378reload_as_needed (first, live_known)
3379 rtx first;
3380 int live_known;
3381{
3382 register rtx insn;
3383 register int i;
3384 int this_block = 0;
3385 rtx x;
3386 rtx after_call = 0;
3387
3388 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3389 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3390 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3391 reg_has_output_reload = (char *) alloca (max_regno);
3392 for (i = 0; i < n_spills; i++)
3393 {
3394 reg_reloaded_contents[i] = -1;
3395 reg_reloaded_insn[i] = 0;
3396 }
3397
3398 /* Reset all offsets on eliminable registers to their initial values. */
3399#ifdef ELIMINABLE_REGS
3400 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3401 {
3402 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3403 reg_eliminate[i].initial_offset);
32131a9c
RK
3404 reg_eliminate[i].previous_offset
3405 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3406 }
3407#else
3408 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3409 reg_eliminate[0].previous_offset
3410 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3411#endif
3412
3413 num_not_at_initial_offset = 0;
3414
3415 for (insn = first; insn;)
3416 {
3417 register rtx next = NEXT_INSN (insn);
3418
3419 /* Notice when we move to a new basic block. */
aa2c50d6 3420 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3421 && insn == basic_block_head[this_block+1])
3422 ++this_block;
3423
3424 /* If we pass a label, copy the offsets from the label information
3425 into the current offsets of each elimination. */
3426 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3427 {
3428 num_not_at_initial_offset = 0;
3429 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3430 {
3431 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3432 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3433 if (reg_eliminate[i].can_eliminate
3434 && (reg_eliminate[i].offset
3435 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3436 num_not_at_initial_offset++;
3437 }
3438 }
32131a9c
RK
3439
3440 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3441 {
3442 rtx avoid_return_reg = 0;
3443
3444#ifdef SMALL_REGISTER_CLASSES
3445 /* Set avoid_return_reg if this is an insn
3446 that might use the value of a function call. */
3447 if (GET_CODE (insn) == CALL_INSN)
3448 {
3449 if (GET_CODE (PATTERN (insn)) == SET)
3450 after_call = SET_DEST (PATTERN (insn));
3451 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3452 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3453 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3454 else
3455 after_call = 0;
3456 }
3457 else if (after_call != 0
3458 && !(GET_CODE (PATTERN (insn)) == SET
3459 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3460 {
3461 if (reg_mentioned_p (after_call, PATTERN (insn)))
3462 avoid_return_reg = after_call;
3463 after_call = 0;
3464 }
3465#endif /* SMALL_REGISTER_CLASSES */
3466
2758481d
RS
3467 /* If this is a USE and CLOBBER of a MEM, ensure that any
3468 references to eliminable registers have been removed. */
3469
3470 if ((GET_CODE (PATTERN (insn)) == USE
3471 || GET_CODE (PATTERN (insn)) == CLOBBER)
3472 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3473 XEXP (XEXP (PATTERN (insn), 0), 0)
3474 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3475 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3476
32131a9c
RK
3477 /* If we need to do register elimination processing, do so.
3478 This might delete the insn, in which case we are done. */
3479 if (num_eliminable && GET_MODE (insn) == QImode)
3480 {
3481 eliminate_regs_in_insn (insn, 1);
3482 if (GET_CODE (insn) == NOTE)
3483 {
3484 insn = next;
3485 continue;
3486 }
3487 }
3488
3489 if (GET_MODE (insn) == VOIDmode)
3490 n_reloads = 0;
3491 /* First find the pseudo regs that must be reloaded for this insn.
3492 This info is returned in the tables reload_... (see reload.h).
3493 Also modify the body of INSN by substituting RELOAD
3494 rtx's for those pseudo regs. */
3495 else
3496 {
3497 bzero (reg_has_output_reload, max_regno);
3498 CLEAR_HARD_REG_SET (reg_is_output_reload);
3499
3500 find_reloads (insn, 1, spill_indirect_levels, live_known,
3501 spill_reg_order);
3502 }
3503
3504 if (n_reloads > 0)
3505 {
3c3eeea6
RK
3506 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3507 rtx p;
32131a9c
RK
3508 int class;
3509
3510 /* If this block has not had spilling done for a
a8fdc208 3511 particular class, deactivate any optional reloads
32131a9c
RK
3512 of that class lest they try to use a spill-reg which isn't
3513 available here. If we have any non-optionals that need a
3514 spill reg, abort. */
3515
3516 for (class = 0; class < N_REG_CLASSES; class++)
3517 if (basic_block_needs[class] != 0
3518 && basic_block_needs[class][this_block] == 0)
3519 for (i = 0; i < n_reloads; i++)
3520 if (class == (int) reload_reg_class[i])
3521 {
3522 if (reload_optional[i])
b07ef7b9
RK
3523 {
3524 reload_in[i] = reload_out[i] = 0;
3525 reload_secondary_p[i] = 0;
3526 }
3527 else if (reload_reg_rtx[i] == 0
3528 && (reload_in[i] != 0 || reload_out[i] != 0
3529 || reload_secondary_p[i] != 0))
32131a9c
RK
3530 abort ();
3531 }
3532
3533 /* Now compute which reload regs to reload them into. Perhaps
3534 reusing reload regs from previous insns, or else output
3535 load insns to reload them. Maybe output store insns too.
3536 Record the choices of reload reg in reload_reg_rtx. */
3537 choose_reload_regs (insn, avoid_return_reg);
3538
3539 /* Generate the insns to reload operands into or out of
3540 their reload regs. */
3541 emit_reload_insns (insn);
3542
3543 /* Substitute the chosen reload regs from reload_reg_rtx
3544 into the insn's body (or perhaps into the bodies of other
3545 load and store insn that we just made for reloading
3546 and that we moved the structure into). */
3547 subst_reloads ();
3c3eeea6
RK
3548
3549 /* If this was an ASM, make sure that all the reload insns
3550 we have generated are valid. If not, give an error
3551 and delete them. */
3552
3553 if (asm_noperands (PATTERN (insn)) >= 0)
3554 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3555 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3556 && (recog_memoized (p) < 0
3557 || (insn_extract (p),
3558 ! constrain_operands (INSN_CODE (p), 1))))
3559 {
3560 error_for_asm (insn,
3561 "`asm' operand requires impossible reload");
3562 PUT_CODE (p, NOTE);
3563 NOTE_SOURCE_FILE (p) = 0;
3564 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3565 }
32131a9c
RK
3566 }
3567 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3568 is no longer validly lying around to save a future reload.
3569 Note that this does not detect pseudos that were reloaded
3570 for this insn in order to be stored in
3571 (obeying register constraints). That is correct; such reload
3572 registers ARE still valid. */
3573 note_stores (PATTERN (insn), forget_old_reloads_1);
3574
3575 /* There may have been CLOBBER insns placed after INSN. So scan
3576 between INSN and NEXT and use them to forget old reloads. */
3577 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3578 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3579 note_stores (PATTERN (x), forget_old_reloads_1);
3580
3581#ifdef AUTO_INC_DEC
3582 /* Likewise for regs altered by auto-increment in this insn.
3583 But note that the reg-notes are not changed by reloading:
3584 they still contain the pseudo-regs, not the spill regs. */
3585 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3586 if (REG_NOTE_KIND (x) == REG_INC)
3587 {
3588 /* See if this pseudo reg was reloaded in this insn.
3589 If so, its last-reload info is still valid
3590 because it is based on this insn's reload. */
3591 for (i = 0; i < n_reloads; i++)
3592 if (reload_out[i] == XEXP (x, 0))
3593 break;
3594
3595 if (i != n_reloads)
3596 forget_old_reloads_1 (XEXP (x, 0));
3597 }
3598#endif
3599 }
3600 /* A reload reg's contents are unknown after a label. */
3601 if (GET_CODE (insn) == CODE_LABEL)
3602 for (i = 0; i < n_spills; i++)
3603 {
3604 reg_reloaded_contents[i] = -1;
3605 reg_reloaded_insn[i] = 0;
3606 }
3607
3608 /* Don't assume a reload reg is still good after a call insn
3609 if it is a call-used reg. */
3610 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == CALL_INSN)
3611 for (i = 0; i < n_spills; i++)
3612 if (call_used_regs[spill_regs[i]])
3613 {
3614 reg_reloaded_contents[i] = -1;
3615 reg_reloaded_insn[i] = 0;
3616 }
3617
3618 /* In case registers overlap, allow certain insns to invalidate
3619 particular hard registers. */
3620
3621#ifdef INSN_CLOBBERS_REGNO_P
3622 for (i = 0 ; i < n_spills ; i++)
3623 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3624 {
3625 reg_reloaded_contents[i] = -1;
3626 reg_reloaded_insn[i] = 0;
3627 }
3628#endif
3629
3630 insn = next;
3631
3632#ifdef USE_C_ALLOCA
3633 alloca (0);
3634#endif
3635 }
3636}
3637
3638/* Discard all record of any value reloaded from X,
3639 or reloaded in X from someplace else;
3640 unless X is an output reload reg of the current insn.
3641
3642 X may be a hard reg (the reload reg)
3643 or it may be a pseudo reg that was reloaded from. */
3644
3645static void
3646forget_old_reloads_1 (x)
3647 rtx x;
3648{
3649 register int regno;
3650 int nr;
0a2e51a9
RS
3651 int offset = 0;
3652
3653 /* note_stores does give us subregs of hard regs. */
3654 while (GET_CODE (x) == SUBREG)
3655 {
3656 offset += SUBREG_WORD (x);
3657 x = SUBREG_REG (x);
3658 }
32131a9c
RK
3659
3660 if (GET_CODE (x) != REG)
3661 return;
3662
0a2e51a9 3663 regno = REGNO (x) + offset;
32131a9c
RK
3664
3665 if (regno >= FIRST_PSEUDO_REGISTER)
3666 nr = 1;
3667 else
3668 {
3669 int i;
3670 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3671 /* Storing into a spilled-reg invalidates its contents.
3672 This can happen if a block-local pseudo is allocated to that reg
3673 and it wasn't spilled because this block's total need is 0.
3674 Then some insn might have an optional reload and use this reg. */
3675 for (i = 0; i < nr; i++)
3676 if (spill_reg_order[regno + i] >= 0
3677 /* But don't do this if the reg actually serves as an output
3678 reload reg in the current instruction. */
3679 && (n_reloads == 0
3680 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3681 {
3682 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3683 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3684 }
3685 }
3686
3687 /* Since value of X has changed,
3688 forget any value previously copied from it. */
3689
3690 while (nr-- > 0)
3691 /* But don't forget a copy if this is the output reload
3692 that establishes the copy's validity. */
3693 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3694 reg_last_reload_reg[regno + nr] = 0;
3695}
3696\f
3697/* For each reload, the mode of the reload register. */
3698static enum machine_mode reload_mode[MAX_RELOADS];
3699
3700/* For each reload, the largest number of registers it will require. */
3701static int reload_nregs[MAX_RELOADS];
3702
3703/* Comparison function for qsort to decide which of two reloads
3704 should be handled first. *P1 and *P2 are the reload numbers. */
3705
3706static int
3707reload_reg_class_lower (p1, p2)
3708 short *p1, *p2;
3709{
3710 register int r1 = *p1, r2 = *p2;
3711 register int t;
a8fdc208 3712
32131a9c
RK
3713 /* Consider required reloads before optional ones. */
3714 t = reload_optional[r1] - reload_optional[r2];
3715 if (t != 0)
3716 return t;
3717
3718 /* Count all solitary classes before non-solitary ones. */
3719 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3720 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3721 if (t != 0)
3722 return t;
3723
3724 /* Aside from solitaires, consider all multi-reg groups first. */
3725 t = reload_nregs[r2] - reload_nregs[r1];
3726 if (t != 0)
3727 return t;
3728
3729 /* Consider reloads in order of increasing reg-class number. */
3730 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3731 if (t != 0)
3732 return t;
3733
3734 /* If reloads are equally urgent, sort by reload number,
3735 so that the results of qsort leave nothing to chance. */
3736 return r1 - r2;
3737}
3738\f
3739/* The following HARD_REG_SETs indicate when each hard register is
3740 used for a reload of various parts of the current insn. */
3741
3742/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3743static HARD_REG_SET reload_reg_used;
3744/* If reg is in use for a RELOAD_FOR_INPUT_RELOAD_ADDRESS reload. */
3745static HARD_REG_SET reload_reg_used_in_input_addr;
3746/* If reg is in use for a RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reload. */
3747static HARD_REG_SET reload_reg_used_in_output_addr;
3748/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3749static HARD_REG_SET reload_reg_used_in_op_addr;
3750/* If reg is in use for a RELOAD_FOR_INPUT reload. */
3751static HARD_REG_SET reload_reg_used_in_input;
3752/* If reg is in use for a RELOAD_FOR_OUTPUT reload. */
3753static HARD_REG_SET reload_reg_used_in_output;
3754
3755/* If reg is in use as a reload reg for any sort of reload. */
3756static HARD_REG_SET reload_reg_used_at_all;
3757
3758/* Mark reg REGNO as in use for a reload of the sort spec'd by WHEN_NEEDED.
3759 MODE is used to indicate how many consecutive regs are actually used. */
3760
3761static void
3762mark_reload_reg_in_use (regno, when_needed, mode)
3763 int regno;
3764 enum reload_when_needed when_needed;
3765 enum machine_mode mode;
3766{
3767 int nregs = HARD_REGNO_NREGS (regno, mode);
3768 int i;
3769
3770 for (i = regno; i < nregs + regno; i++)
3771 {
3772 switch (when_needed)
3773 {
3774 case RELOAD_OTHER:
3775 SET_HARD_REG_BIT (reload_reg_used, i);
3776 break;
3777
3778 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3779 SET_HARD_REG_BIT (reload_reg_used_in_input_addr, i);
3780 break;
3781
3782 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3783 SET_HARD_REG_BIT (reload_reg_used_in_output_addr, i);
3784 break;
3785
3786 case RELOAD_FOR_OPERAND_ADDRESS:
3787 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3788 break;
3789
3790 case RELOAD_FOR_INPUT:
3791 SET_HARD_REG_BIT (reload_reg_used_in_input, i);
3792 break;
3793
3794 case RELOAD_FOR_OUTPUT:
3795 SET_HARD_REG_BIT (reload_reg_used_in_output, i);
3796 break;
3797 }
3798
3799 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3800 }
3801}
3802
3803/* 1 if reg REGNO is free as a reload reg for a reload of the sort
3804 specified by WHEN_NEEDED. */
3805
3806static int
3807reload_reg_free_p (regno, when_needed)
3808 int regno;
3809 enum reload_when_needed when_needed;
3810{
3811 /* In use for a RELOAD_OTHER means it's not available for anything. */
3812 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
3813 return 0;
3814 switch (when_needed)
3815 {
3816 case RELOAD_OTHER:
3817 /* In use for anything means not available for a RELOAD_OTHER. */
3818 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
3819
3820 /* The other kinds of use can sometimes share a register. */
3821 case RELOAD_FOR_INPUT:
3822 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3823 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3824 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno));
3825 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3826 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno)
3827 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno));
3828 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3829 return (! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3830 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3831 case RELOAD_FOR_OPERAND_ADDRESS:
3832 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3833 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3834 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3835 case RELOAD_FOR_OUTPUT:
3836 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3837 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3838 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3839 }
3840 abort ();
3841}
3842
3843/* Return 1 if the value in reload reg REGNO, as used by a reload
3844 needed for the part of the insn specified by WHEN_NEEDED,
3845 is not in use for a reload in any prior part of the insn.
3846
3847 We can assume that the reload reg was already tested for availability
3848 at the time it is needed, and we should not check this again,
3849 in case the reg has already been marked in use. */
3850
3851static int
3852reload_reg_free_before_p (regno, when_needed)
3853 int regno;
3854 enum reload_when_needed when_needed;
3855{
3856 switch (when_needed)
3857 {
3858 case RELOAD_OTHER:
3859 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3860 its use starts from the beginning, so nothing can use it earlier. */
3861 return 1;
3862
3863 /* If this use is for part of the insn,
3864 check the reg is not in use for any prior part. */
3865 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3866 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
3867 return 0;
3868 case RELOAD_FOR_OUTPUT:
3869 if (TEST_HARD_REG_BIT (reload_reg_used_in_input, regno))
3870 return 0;
3871 case RELOAD_FOR_OPERAND_ADDRESS:
3872 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno))
3873 return 0;
3874 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3875 case RELOAD_FOR_INPUT:
3876 return 1;
3877 }
3878 abort ();
3879}
3880
3881/* Return 1 if the value in reload reg REGNO, as used by a reload
3882 needed for the part of the insn specified by WHEN_NEEDED,
3883 is still available in REGNO at the end of the insn.
3884
3885 We can assume that the reload reg was already tested for availability
3886 at the time it is needed, and we should not check this again,
3887 in case the reg has already been marked in use. */
3888
3889static int
3890reload_reg_reaches_end_p (regno, when_needed)
3891 int regno;
3892 enum reload_when_needed when_needed;
3893{
3894 switch (when_needed)
3895 {
3896 case RELOAD_OTHER:
3897 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3898 its value must reach the end. */
3899 return 1;
3900
3901 /* If this use is for part of the insn,
3902 its value reaches if no subsequent part uses the same register. */
3903 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3904 case RELOAD_FOR_INPUT:
3905 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3906 || TEST_HARD_REG_BIT (reload_reg_used_in_output, regno))
3907 return 0;
3908 case RELOAD_FOR_OPERAND_ADDRESS:
3909 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno))
3910 return 0;
3911 case RELOAD_FOR_OUTPUT:
3912 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3913 return 1;
3914 }
3915 abort ();
3916}
3917\f
3918/* Vector of reload-numbers showing the order in which the reloads should
3919 be processed. */
3920short reload_order[MAX_RELOADS];
3921
3922/* Indexed by reload number, 1 if incoming value
3923 inherited from previous insns. */
3924char reload_inherited[MAX_RELOADS];
3925
3926/* For an inherited reload, this is the insn the reload was inherited from,
3927 if we know it. Otherwise, this is 0. */
3928rtx reload_inheritance_insn[MAX_RELOADS];
3929
3930/* If non-zero, this is a place to get the value of the reload,
3931 rather than using reload_in. */
3932rtx reload_override_in[MAX_RELOADS];
3933
3934/* For each reload, the index in spill_regs of the spill register used,
3935 or -1 if we did not need one of the spill registers for this reload. */
3936int reload_spill_index[MAX_RELOADS];
3937
3938/* Index of last register assigned as a spill register. We allocate in
3939 a round-robin fashio. */
3940
3941static last_spill_reg = 0;
3942
3943/* Find a spill register to use as a reload register for reload R.
3944 LAST_RELOAD is non-zero if this is the last reload for the insn being
3945 processed.
3946
3947 Set reload_reg_rtx[R] to the register allocated.
3948
3949 If NOERROR is nonzero, we return 1 if successful,
3950 or 0 if we couldn't find a spill reg and we didn't change anything. */
3951
3952static int
3953allocate_reload_reg (r, insn, last_reload, noerror)
3954 int r;
3955 rtx insn;
3956 int last_reload;
3957 int noerror;
3958{
3959 int i;
3960 int pass;
3961 int count;
3962 rtx new;
3963 int regno;
3964
3965 /* If we put this reload ahead, thinking it is a group,
3966 then insist on finding a group. Otherwise we can grab a
a8fdc208 3967 reg that some other reload needs.
32131a9c
RK
3968 (That can happen when we have a 68000 DATA_OR_FP_REG
3969 which is a group of data regs or one fp reg.)
3970 We need not be so restrictive if there are no more reloads
3971 for this insn.
3972
3973 ??? Really it would be nicer to have smarter handling
3974 for that kind of reg class, where a problem like this is normal.
3975 Perhaps those classes should be avoided for reloading
3976 by use of more alternatives. */
3977
3978 int force_group = reload_nregs[r] > 1 && ! last_reload;
3979
3980 /* If we want a single register and haven't yet found one,
3981 take any reg in the right class and not in use.
3982 If we want a consecutive group, here is where we look for it.
3983
3984 We use two passes so we can first look for reload regs to
3985 reuse, which are already in use for other reloads in this insn,
3986 and only then use additional registers.
3987 I think that maximizing reuse is needed to make sure we don't
3988 run out of reload regs. Suppose we have three reloads, and
3989 reloads A and B can share regs. These need two regs.
3990 Suppose A and B are given different regs.
3991 That leaves none for C. */
3992 for (pass = 0; pass < 2; pass++)
3993 {
3994 /* I is the index in spill_regs.
3995 We advance it round-robin between insns to use all spill regs
3996 equally, so that inherited reloads have a chance
3997 of leapfrogging each other. */
3998
3999 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4000 {
4001 int class = (int) reload_reg_class[r];
4002
4003 i = (i + 1) % n_spills;
4004
4005 if (reload_reg_free_p (spill_regs[i], reload_when_needed[r])
4006 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4007 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4008 /* Look first for regs to share, then for unshared. */
4009 && (pass || TEST_HARD_REG_BIT (reload_reg_used_at_all,
4010 spill_regs[i])))
4011 {
4012 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4013 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4014 (on 68000) got us two FP regs. If NR is 1,
4015 we would reject both of them. */
4016 if (force_group)
4017 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4018 /* If we need only one reg, we have already won. */
4019 if (nr == 1)
4020 {
4021 /* But reject a single reg if we demand a group. */
4022 if (force_group)
4023 continue;
4024 break;
4025 }
4026 /* Otherwise check that as many consecutive regs as we need
4027 are available here.
4028 Also, don't use for a group registers that are
4029 needed for nongroups. */
4030 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4031 while (nr > 1)
4032 {
4033 regno = spill_regs[i] + nr - 1;
4034 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4035 && spill_reg_order[regno] >= 0
4036 && reload_reg_free_p (regno, reload_when_needed[r])
4037 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4038 regno)))
4039 break;
4040 nr--;
4041 }
4042 if (nr == 1)
4043 break;
4044 }
4045 }
4046
4047 /* If we found something on pass 1, omit pass 2. */
4048 if (count < n_spills)
4049 break;
4050 }
4051
4052 /* We should have found a spill register by now. */
4053 if (count == n_spills)
4054 {
4055 if (noerror)
4056 return 0;
139fc12e 4057 goto failure;
32131a9c
RK
4058 }
4059
4060 last_spill_reg = i;
4061
4062 /* Mark as in use for this insn the reload regs we use for this. */
4063 mark_reload_reg_in_use (spill_regs[i], reload_when_needed[r],
4064 reload_mode[r]);
4065
4066 new = spill_reg_rtx[i];
4067
4068 if (new == 0 || GET_MODE (new) != reload_mode[r])
4069 spill_reg_rtx[i] = new = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4070
4071 reload_reg_rtx[r] = new;
4072 reload_spill_index[r] = i;
4073 regno = true_regnum (new);
4074
4075 /* Detect when the reload reg can't hold the reload mode.
4076 This used to be one `if', but Sequent compiler can't handle that. */
4077 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4078 {
4079 enum machine_mode test_mode = VOIDmode;
4080 if (reload_in[r])
4081 test_mode = GET_MODE (reload_in[r]);
4082 /* If reload_in[r] has VOIDmode, it means we will load it
4083 in whatever mode the reload reg has: to wit, reload_mode[r].
4084 We have already tested that for validity. */
4085 /* Aside from that, we need to test that the expressions
4086 to reload from or into have modes which are valid for this
4087 reload register. Otherwise the reload insns would be invalid. */
4088 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4089 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4090 if (! (reload_out[r] != 0
4091 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4092 /* The reg is OK. */
4093 return 1;
4094 }
4095
4096 /* The reg is not OK. */
4097 if (noerror)
4098 return 0;
4099
139fc12e 4100 failure:
32131a9c
RK
4101 if (asm_noperands (PATTERN (insn)) < 0)
4102 /* It's the compiler's fault. */
4103 abort ();
4104
4105 /* It's the user's fault; the operand's mode and constraint
4106 don't match. Disable this reload so we don't crash in final. */
4107 error_for_asm (insn,
4108 "`asm' operand constraint incompatible with operand size");
4109 reload_in[r] = 0;
4110 reload_out[r] = 0;
4111 reload_reg_rtx[r] = 0;
4112 reload_optional[r] = 1;
4113 reload_secondary_p[r] = 1;
4114
4115 return 1;
4116}
4117\f
4118/* Assign hard reg targets for the pseudo-registers we must reload
4119 into hard regs for this insn.
4120 Also output the instructions to copy them in and out of the hard regs.
4121
4122 For machines with register classes, we are responsible for
4123 finding a reload reg in the proper class. */
4124
4125static void
4126choose_reload_regs (insn, avoid_return_reg)
4127 rtx insn;
4128 /* This argument is currently ignored. */
4129 rtx avoid_return_reg;
4130{
4131 register int i, j;
4132 int max_group_size = 1;
4133 enum reg_class group_class = NO_REGS;
4134 int inheritance;
4135
4136 rtx save_reload_reg_rtx[MAX_RELOADS];
4137 char save_reload_inherited[MAX_RELOADS];
4138 rtx save_reload_inheritance_insn[MAX_RELOADS];
4139 rtx save_reload_override_in[MAX_RELOADS];
4140 int save_reload_spill_index[MAX_RELOADS];
4141 HARD_REG_SET save_reload_reg_used;
4142 HARD_REG_SET save_reload_reg_used_in_input_addr;
4143 HARD_REG_SET save_reload_reg_used_in_output_addr;
4144 HARD_REG_SET save_reload_reg_used_in_op_addr;
4145 HARD_REG_SET save_reload_reg_used_in_input;
4146 HARD_REG_SET save_reload_reg_used_in_output;
4147 HARD_REG_SET save_reload_reg_used_at_all;
4148
4149 bzero (reload_inherited, MAX_RELOADS);
4150 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4151 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4152
4153 CLEAR_HARD_REG_SET (reload_reg_used);
4154 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4155 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr);
4156 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr);
4157 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4158 CLEAR_HARD_REG_SET (reload_reg_used_in_output);
4159 CLEAR_HARD_REG_SET (reload_reg_used_in_input);
4160
4161 /* Distinguish output-only and input-only reloads
4162 because they can overlap with other things. */
4163 for (j = 0; j < n_reloads; j++)
4164 if (reload_when_needed[j] == RELOAD_OTHER
4165 && ! reload_needed_for_multiple[j])
4166 {
4167 if (reload_in[j] == 0)
4168 {
4169 /* But earlyclobber operands must stay as RELOAD_OTHER. */
4170 for (i = 0; i < n_earlyclobbers; i++)
4171 if (rtx_equal_p (reload_out[j], reload_earlyclobbers[i]))
4172 break;
4173 if (i == n_earlyclobbers)
4174 reload_when_needed[j] = RELOAD_FOR_OUTPUT;
4175 }
4176 if (reload_out[j] == 0)
4177 reload_when_needed[j] = RELOAD_FOR_INPUT;
4178
4179 if (reload_secondary_reload[j] >= 0
4180 && ! reload_needed_for_multiple[reload_secondary_reload[j]])
4181 reload_when_needed[reload_secondary_reload[j]]
4182 = reload_when_needed[j];
4183 }
4184
4185#ifdef SMALL_REGISTER_CLASSES
4186 /* Don't bother with avoiding the return reg
4187 if we have no mandatory reload that could use it. */
4188 if (avoid_return_reg)
4189 {
4190 int do_avoid = 0;
4191 int regno = REGNO (avoid_return_reg);
4192 int nregs
4193 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4194 int r;
4195
4196 for (r = regno; r < regno + nregs; r++)
4197 if (spill_reg_order[r] >= 0)
4198 for (j = 0; j < n_reloads; j++)
4199 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4200 && (reload_in[j] != 0 || reload_out[j] != 0
4201 || reload_secondary_p[j])
4202 &&
4203 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4204 do_avoid = 1;
4205 if (!do_avoid)
4206 avoid_return_reg = 0;
4207 }
4208#endif /* SMALL_REGISTER_CLASSES */
4209
4210#if 0 /* Not needed, now that we can always retry without inheritance. */
4211 /* See if we have more mandatory reloads than spill regs.
4212 If so, then we cannot risk optimizations that could prevent
a8fdc208 4213 reloads from sharing one spill register.
32131a9c
RK
4214
4215 Since we will try finding a better register than reload_reg_rtx
4216 unless it is equal to reload_in or reload_out, count such reloads. */
4217
4218 {
4219 int tem = 0;
4220#ifdef SMALL_REGISTER_CLASSES
4221 int tem = (avoid_return_reg != 0);
a8fdc208 4222#endif
32131a9c
RK
4223 for (j = 0; j < n_reloads; j++)
4224 if (! reload_optional[j]
4225 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4226 && (reload_reg_rtx[j] == 0
4227 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4228 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4229 tem++;
4230 if (tem > n_spills)
4231 must_reuse = 1;
4232 }
4233#endif
4234
4235#ifdef SMALL_REGISTER_CLASSES
4236 /* Don't use the subroutine call return reg for a reload
4237 if we are supposed to avoid it. */
4238 if (avoid_return_reg)
4239 {
4240 int regno = REGNO (avoid_return_reg);
4241 int nregs
4242 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4243 int r;
4244
4245 for (r = regno; r < regno + nregs; r++)
4246 if (spill_reg_order[r] >= 0)
4247 SET_HARD_REG_BIT (reload_reg_used, r);
4248 }
4249#endif /* SMALL_REGISTER_CLASSES */
4250
4251 /* In order to be certain of getting the registers we need,
4252 we must sort the reloads into order of increasing register class.
4253 Then our grabbing of reload registers will parallel the process
a8fdc208 4254 that provided the reload registers.
32131a9c
RK
4255
4256 Also note whether any of the reloads wants a consecutive group of regs.
4257 If so, record the maximum size of the group desired and what
4258 register class contains all the groups needed by this insn. */
4259
4260 for (j = 0; j < n_reloads; j++)
4261 {
4262 reload_order[j] = j;
4263 reload_spill_index[j] = -1;
4264
4265 reload_mode[j]
4266 = (reload_strict_low[j] && reload_out[j]
4267 ? GET_MODE (SUBREG_REG (reload_out[j]))
4268 : (reload_inmode[j] == VOIDmode
4269 || (GET_MODE_SIZE (reload_outmode[j])
4270 > GET_MODE_SIZE (reload_inmode[j])))
4271 ? reload_outmode[j] : reload_inmode[j]);
4272
4273 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4274
4275 if (reload_nregs[j] > 1)
4276 {
4277 max_group_size = MAX (reload_nregs[j], max_group_size);
4278 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4279 }
4280
4281 /* If we have already decided to use a certain register,
4282 don't use it in another way. */
4283 if (reload_reg_rtx[j])
4284 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]),
4285 reload_when_needed[j], reload_mode[j]);
4286 }
4287
4288 if (n_reloads > 1)
4289 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4290
4291 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4292 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4293 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4294 sizeof reload_inheritance_insn);
4295 bcopy (reload_override_in, save_reload_override_in,
4296 sizeof reload_override_in);
4297 bcopy (reload_spill_index, save_reload_spill_index,
4298 sizeof reload_spill_index);
4299 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4300 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4301 COPY_HARD_REG_SET (save_reload_reg_used_in_output,
4302 reload_reg_used_in_output);
4303 COPY_HARD_REG_SET (save_reload_reg_used_in_input,
4304 reload_reg_used_in_input);
4305 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr,
4306 reload_reg_used_in_input_addr);
4307 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr,
4308 reload_reg_used_in_output_addr);
4309 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4310 reload_reg_used_in_op_addr);
4311
58b1581b
RS
4312 /* If -O, try first with inheritance, then turning it off.
4313 If not -O, don't do inheritance.
4314 Using inheritance when not optimizing leads to paradoxes
4315 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4316 because one side of the comparison might be inherited. */
32131a9c 4317
58b1581b 4318 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
4319 {
4320 /* Process the reloads in order of preference just found.
4321 Beyond this point, subregs can be found in reload_reg_rtx.
4322
4323 This used to look for an existing reloaded home for all
4324 of the reloads, and only then perform any new reloads.
4325 But that could lose if the reloads were done out of reg-class order
4326 because a later reload with a looser constraint might have an old
4327 home in a register needed by an earlier reload with a tighter constraint.
4328
4329 To solve this, we make two passes over the reloads, in the order
4330 described above. In the first pass we try to inherit a reload
4331 from a previous insn. If there is a later reload that needs a
4332 class that is a proper subset of the class being processed, we must
4333 also allocate a spill register during the first pass.
4334
4335 Then make a second pass over the reloads to allocate any reloads
4336 that haven't been given registers yet. */
4337
4338 for (j = 0; j < n_reloads; j++)
4339 {
4340 register int r = reload_order[j];
4341
4342 /* Ignore reloads that got marked inoperative. */
4343 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4344 continue;
4345
4346 /* If find_reloads chose a to use reload_in or reload_out as a reload
4347 register, we don't need to chose one. Otherwise, try even if it found
4348 one since we might save an insn if we find the value lying around. */
4349 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4350 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4351 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4352 continue;
4353
4354#if 0 /* No longer needed for correct operation.
4355 It might give better code, or might not; worth an experiment? */
4356 /* If this is an optional reload, we can't inherit from earlier insns
4357 until we are sure that any non-optional reloads have been allocated.
4358 The following code takes advantage of the fact that optional reloads
4359 are at the end of reload_order. */
4360 if (reload_optional[r] != 0)
4361 for (i = 0; i < j; i++)
4362 if ((reload_out[reload_order[i]] != 0
4363 || reload_in[reload_order[i]] != 0
4364 || reload_secondary_p[reload_order[i]])
4365 && ! reload_optional[reload_order[i]]
4366 && reload_reg_rtx[reload_order[i]] == 0)
4367 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4368#endif
4369
4370 /* First see if this pseudo is already available as reloaded
4371 for a previous insn. We cannot try to inherit for reloads
4372 that are smaller than the maximum number of registers needed
4373 for groups unless the register we would allocate cannot be used
4374 for the groups.
4375
4376 We could check here to see if this is a secondary reload for
4377 an object that is already in a register of the desired class.
4378 This would avoid the need for the secondary reload register.
4379 But this is complex because we can't easily determine what
4380 objects might want to be loaded via this reload. So let a register
4381 be allocated here. In `emit_reload_insns' we suppress one of the
4382 loads in the case described above. */
4383
4384 if (inheritance)
4385 {
4386 register int regno = -1;
db660765 4387 enum machine_mode mode;
32131a9c
RK
4388
4389 if (reload_in[r] == 0)
4390 ;
4391 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
4392 {
4393 regno = REGNO (reload_in[r]);
4394 mode = GET_MODE (reload_in[r]);
4395 }
32131a9c 4396 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
4397 {
4398 regno = REGNO (reload_in_reg[r]);
4399 mode = GET_MODE (reload_in_reg[r]);
4400 }
32131a9c
RK
4401#if 0
4402 /* This won't work, since REGNO can be a pseudo reg number.
4403 Also, it takes much more hair to keep track of all the things
4404 that can invalidate an inherited reload of part of a pseudoreg. */
4405 else if (GET_CODE (reload_in[r]) == SUBREG
4406 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4407 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4408#endif
4409
4410 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4411 {
4412 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4413
4414 if (reg_reloaded_contents[i] == regno
db660765
TW
4415 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4416 >= GET_MODE_SIZE (mode))
32131a9c
RK
4417 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4418 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4419 spill_regs[i])
4420 && (reload_nregs[r] == max_group_size
4421 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4422 spill_regs[i]))
4423 && reload_reg_free_p (spill_regs[i], reload_when_needed[r])
4424 && reload_reg_free_before_p (spill_regs[i],
4425 reload_when_needed[r]))
4426 {
4427 /* If a group is needed, verify that all the subsequent
4428 registers still have their values intact. */
4429 int nr
4430 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4431 int k;
4432
4433 for (k = 1; k < nr; k++)
4434 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4435 != regno)
4436 break;
4437
4438 if (k == nr)
4439 {
4440 /* Mark the register as in use for this part of
4441 the insn. */
4442 mark_reload_reg_in_use (spill_regs[i],
4443 reload_when_needed[r],
4444 reload_mode[r]);
4445 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4446 reload_inherited[r] = 1;
4447 reload_inheritance_insn[r] = reg_reloaded_insn[i];
4448 reload_spill_index[r] = i;
4449 }
4450 }
4451 }
4452 }
4453
4454 /* Here's another way to see if the value is already lying around. */
4455 if (inheritance
4456 && reload_in[r] != 0
4457 && ! reload_inherited[r]
4458 && reload_out[r] == 0
4459 && (CONSTANT_P (reload_in[r])
4460 || GET_CODE (reload_in[r]) == PLUS
4461 || GET_CODE (reload_in[r]) == REG
4462 || GET_CODE (reload_in[r]) == MEM)
4463 && (reload_nregs[r] == max_group_size
4464 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4465 {
4466 register rtx equiv
4467 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 4468 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
4469 int regno;
4470
4471 if (equiv != 0)
4472 {
4473 if (GET_CODE (equiv) == REG)
4474 regno = REGNO (equiv);
4475 else if (GET_CODE (equiv) == SUBREG)
4476 {
4477 regno = REGNO (SUBREG_REG (equiv));
4478 if (regno < FIRST_PSEUDO_REGISTER)
4479 regno += SUBREG_WORD (equiv);
4480 }
4481 else
4482 abort ();
4483 }
4484
4485 /* If we found a spill reg, reject it unless it is free
4486 and of the desired class. */
4487 if (equiv != 0
4488 && ((spill_reg_order[regno] >= 0
4489 && ! reload_reg_free_before_p (regno,
4490 reload_when_needed[r]))
4491 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4492 regno)))
4493 equiv = 0;
4494
4495 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4496 equiv = 0;
4497
4498 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4499 equiv = 0;
4500
4501 /* We found a register that contains the value we need.
4502 If this register is the same as an `earlyclobber' operand
4503 of the current insn, just mark it as a place to reload from
4504 since we can't use it as the reload register itself. */
4505
4506 if (equiv != 0)
4507 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
4508 if (reg_overlap_mentioned_for_reload_p (equiv,
4509 reload_earlyclobbers[i]))
32131a9c
RK
4510 {
4511 reload_override_in[r] = equiv;
4512 equiv = 0;
4513 break;
4514 }
4515
4516 /* JRV: If the equiv register we have found is explicitly
4517 clobbered in the current insn, mark but don't use, as above. */
4518
4519 if (equiv != 0 && regno_clobbered_p (regno, insn))
4520 {
4521 reload_override_in[r] = equiv;
4522 equiv = 0;
4523 }
4524
4525 /* If we found an equivalent reg, say no code need be generated
4526 to load it, and use it as our reload reg. */
4527 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4528 {
4529 reload_reg_rtx[r] = equiv;
4530 reload_inherited[r] = 1;
4531 /* If it is a spill reg,
4532 mark the spill reg as in use for this insn. */
4533 i = spill_reg_order[regno];
4534 if (i >= 0)
4535 mark_reload_reg_in_use (regno, reload_when_needed[r],
4536 reload_mode[r]);
4537 }
4538 }
4539
4540 /* If we found a register to use already, or if this is an optional
4541 reload, we are done. */
4542 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
4543 continue;
4544
4545#if 0 /* No longer needed for correct operation. Might or might not
4546 give better code on the average. Want to experiment? */
4547
4548 /* See if there is a later reload that has a class different from our
4549 class that intersects our class or that requires less register
4550 than our reload. If so, we must allocate a register to this
4551 reload now, since that reload might inherit a previous reload
4552 and take the only available register in our class. Don't do this
4553 for optional reloads since they will force all previous reloads
4554 to be allocated. Also don't do this for reloads that have been
4555 turned off. */
4556
4557 for (i = j + 1; i < n_reloads; i++)
4558 {
4559 int s = reload_order[i];
4560
d45cf215
RS
4561 if ((reload_in[s] == 0 && reload_out[s] == 0
4562 && ! reload_secondary_p[s])
32131a9c
RK
4563 || reload_optional[s])
4564 continue;
4565
4566 if ((reload_reg_class[s] != reload_reg_class[r]
4567 && reg_classes_intersect_p (reload_reg_class[r],
4568 reload_reg_class[s]))
4569 || reload_nregs[s] < reload_nregs[r])
4570 break;
4571 }
4572
4573 if (i == n_reloads)
4574 continue;
4575
4576 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
4577#endif
4578 }
4579
4580 /* Now allocate reload registers for anything non-optional that
4581 didn't get one yet. */
4582 for (j = 0; j < n_reloads; j++)
4583 {
4584 register int r = reload_order[j];
4585
4586 /* Ignore reloads that got marked inoperative. */
4587 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4588 continue;
4589
4590 /* Skip reloads that already have a register allocated or are
4591 optional. */
4592 if (reload_reg_rtx[r] != 0 || reload_optional[r])
4593 continue;
4594
4595 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
4596 break;
4597 }
4598
4599 /* If that loop got all the way, we have won. */
4600 if (j == n_reloads)
4601 break;
4602
4603 fail:
4604 /* Loop around and try without any inheritance. */
4605 /* First undo everything done by the failed attempt
4606 to allocate with inheritance. */
4607 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
4608 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
4609 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
4610 sizeof reload_inheritance_insn);
4611 bcopy (save_reload_override_in, reload_override_in,
4612 sizeof reload_override_in);
4613 bcopy (save_reload_spill_index, reload_spill_index,
4614 sizeof reload_spill_index);
4615 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
4616 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
4617 COPY_HARD_REG_SET (reload_reg_used_in_input,
4618 save_reload_reg_used_in_input);
4619 COPY_HARD_REG_SET (reload_reg_used_in_output,
4620 save_reload_reg_used_in_output);
4621 COPY_HARD_REG_SET (reload_reg_used_in_input_addr,
4622 save_reload_reg_used_in_input_addr);
4623 COPY_HARD_REG_SET (reload_reg_used_in_output_addr,
4624 save_reload_reg_used_in_output_addr);
4625 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
4626 save_reload_reg_used_in_op_addr);
4627 }
4628
4629 /* If we thought we could inherit a reload, because it seemed that
4630 nothing else wanted the same reload register earlier in the insn,
4631 verify that assumption, now that all reloads have been assigned. */
4632
4633 for (j = 0; j < n_reloads; j++)
4634 {
4635 register int r = reload_order[j];
4636
4637 if (reload_inherited[r] && reload_reg_rtx[r] != 0
4638 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
4639 reload_when_needed[r]))
4640 reload_inherited[r] = 0;
4641
4642 /* If we found a better place to reload from,
4643 validate it in the same fashion, if it is a reload reg. */
4644 if (reload_override_in[r]
4645 && (GET_CODE (reload_override_in[r]) == REG
4646 || GET_CODE (reload_override_in[r]) == SUBREG))
4647 {
4648 int regno = true_regnum (reload_override_in[r]);
4649 if (spill_reg_order[regno] >= 0
4650 && ! reload_reg_free_before_p (regno, reload_when_needed[r]))
4651 reload_override_in[r] = 0;
4652 }
4653 }
4654
4655 /* Now that reload_override_in is known valid,
4656 actually override reload_in. */
4657 for (j = 0; j < n_reloads; j++)
4658 if (reload_override_in[j])
4659 reload_in[j] = reload_override_in[j];
4660
4661 /* If this reload won't be done because it has been cancelled or is
4662 optional and not inherited, clear reload_reg_rtx so other
4663 routines (such as subst_reloads) don't get confused. */
4664 for (j = 0; j < n_reloads; j++)
4665 if ((reload_optional[j] && ! reload_inherited[j])
4666 || (reload_in[j] == 0 && reload_out[j] == 0
4667 && ! reload_secondary_p[j]))
4668 reload_reg_rtx[j] = 0;
4669
4670 /* Record which pseudos and which spill regs have output reloads. */
4671 for (j = 0; j < n_reloads; j++)
4672 {
4673 register int r = reload_order[j];
4674
4675 i = reload_spill_index[r];
4676
4677 /* I is nonneg if this reload used one of the spill regs.
4678 If reload_reg_rtx[r] is 0, this is an optional reload
4679 that we opted to ignore. */
4680 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
4681 && reload_reg_rtx[r] != 0)
4682 {
4683 register int nregno = REGNO (reload_out[r]);
372e033b
RS
4684 int nr = 1;
4685
4686 if (nregno < FIRST_PSEUDO_REGISTER)
4687 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
4688
4689 while (--nr >= 0)
372e033b
RS
4690 reg_has_output_reload[nregno + nr] = 1;
4691
4692 if (i >= 0)
32131a9c 4693 {
372e033b
RS
4694 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4695 while (--nr >= 0)
32131a9c
RK
4696 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
4697 }
4698
4699 if (reload_when_needed[r] != RELOAD_OTHER
4700 && reload_when_needed[r] != RELOAD_FOR_OUTPUT)
4701 abort ();
4702 }
4703 }
4704}
4705\f
4706/* Output insns to reload values in and out of the chosen reload regs. */
4707
4708static void
4709emit_reload_insns (insn)
4710 rtx insn;
4711{
4712 register int j;
4713 rtx following_insn = NEXT_INSN (insn);
a8efe40d 4714 rtx before_insn = insn;
32131a9c
RK
4715 rtx first_output_reload_insn = NEXT_INSN (insn);
4716 rtx first_other_reload_insn = insn;
4717 rtx first_operand_address_reload_insn = insn;
4718 int special;
4719 /* Values to be put in spill_reg_store are put here first. */
4720 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
4721
d45cf215 4722 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
4723 must go in front of the first USE insn, not in front of INSN. */
4724
4725 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
4726 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
4727 while (GET_CODE (PREV_INSN (before_insn)) == INSN
4728 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
4729 first_other_reload_insn = first_operand_address_reload_insn
4730 = before_insn = PREV_INSN (before_insn);
4731
32131a9c
RK
4732 /* Now output the instructions to copy the data into and out of the
4733 reload registers. Do these in the order that the reloads were reported,
4734 since reloads of base and index registers precede reloads of operands
4735 and the operands may need the base and index registers reloaded. */
4736
4737 for (j = 0; j < n_reloads; j++)
4738 {
4739 register rtx old;
4740 rtx oldequiv_reg = 0;
4741 rtx this_reload_insn = 0;
4742 rtx store_insn = 0;
4743
4744 old = reload_in[j];
4745 if (old != 0 && ! reload_inherited[j]
4746 && ! rtx_equal_p (reload_reg_rtx[j], old)
4747 && reload_reg_rtx[j] != 0)
4748 {
4749 register rtx reloadreg = reload_reg_rtx[j];
4750 rtx oldequiv = 0;
4751 enum machine_mode mode;
4752 rtx where;
d445b551 4753 rtx reload_insn;
32131a9c
RK
4754
4755 /* Determine the mode to reload in.
4756 This is very tricky because we have three to choose from.
4757 There is the mode the insn operand wants (reload_inmode[J]).
4758 There is the mode of the reload register RELOADREG.
4759 There is the intrinsic mode of the operand, which we could find
4760 by stripping some SUBREGs.
4761 It turns out that RELOADREG's mode is irrelevant:
4762 we can change that arbitrarily.
4763
4764 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
4765 then the reload reg may not support QImode moves, so use SImode.
4766 If foo is in memory due to spilling a pseudo reg, this is safe,
4767 because the QImode value is in the least significant part of a
4768 slot big enough for a SImode. If foo is some other sort of
4769 memory reference, then it is impossible to reload this case,
4770 so previous passes had better make sure this never happens.
4771
4772 Then consider a one-word union which has SImode and one of its
4773 members is a float, being fetched as (SUBREG:SF union:SI).
4774 We must fetch that as SFmode because we could be loading into
4775 a float-only register. In this case OLD's mode is correct.
4776
4777 Consider an immediate integer: it has VOIDmode. Here we need
4778 to get a mode from something else.
4779
4780 In some cases, there is a fourth mode, the operand's
4781 containing mode. If the insn specifies a containing mode for
4782 this operand, it overrides all others.
4783
4784 I am not sure whether the algorithm here is always right,
4785 but it does the right things in those cases. */
4786
4787 mode = GET_MODE (old);
4788 if (mode == VOIDmode)
4789 mode = reload_inmode[j];
4790 if (reload_strict_low[j])
4791 mode = GET_MODE (SUBREG_REG (reload_in[j]));
4792
4793#ifdef SECONDARY_INPUT_RELOAD_CLASS
4794 /* If we need a secondary register for this operation, see if
4795 the value is already in a register in that class. Don't
4796 do this if the secondary register will be used as a scratch
4797 register. */
4798
4799 if (reload_secondary_reload[j] >= 0
58b1581b
RS
4800 && reload_secondary_icode[j] == CODE_FOR_nothing
4801 && optimize)
32131a9c
RK
4802 oldequiv
4803 = find_equiv_reg (old, insn,
4804 reload_reg_class[reload_secondary_reload[j]],
fb3821f7 4805 -1, NULL_PTR, 0, mode);
32131a9c
RK
4806#endif
4807
4808 /* If reloading from memory, see if there is a register
4809 that already holds the same value. If so, reload from there.
4810 We can pass 0 as the reload_reg_p argument because
4811 any other reload has either already been emitted,
4812 in which case find_equiv_reg will see the reload-insn,
4813 or has yet to be emitted, in which case it doesn't matter
4814 because we will use this equiv reg right away. */
4815
58b1581b 4816 if (oldequiv == 0 && optimize
32131a9c
RK
4817 && (GET_CODE (old) == MEM
4818 || (GET_CODE (old) == REG
4819 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4820 && reg_renumber[REGNO (old)] < 0)))
4821 oldequiv = find_equiv_reg (old, insn, GENERAL_REGS,
fb3821f7 4822 -1, NULL_PTR, 0, mode);
32131a9c
RK
4823
4824 if (oldequiv)
4825 {
4826 int regno = true_regnum (oldequiv);
4827
4828 /* If OLDEQUIV is a spill register, don't use it for this
4829 if any other reload needs it at an earlier stage of this insn
a8fdc208 4830 or at this stage. */
32131a9c
RK
4831 if (spill_reg_order[regno] >= 0
4832 && (! reload_reg_free_p (regno, reload_when_needed[j])
4833 || ! reload_reg_free_before_p (regno,
4834 reload_when_needed[j])))
4835 oldequiv = 0;
4836
4837 /* If OLDEQUIV is not a spill register,
4838 don't use it if any other reload wants it. */
4839 if (spill_reg_order[regno] < 0)
4840 {
4841 int k;
4842 for (k = 0; k < n_reloads; k++)
4843 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
4844 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
4845 oldequiv))
32131a9c
RK
4846 {
4847 oldequiv = 0;
4848 break;
4849 }
4850 }
4851 }
4852
4853 if (oldequiv == 0)
4854 oldequiv = old;
4855 else if (GET_CODE (oldequiv) == REG)
4856 oldequiv_reg = oldequiv;
4857 else if (GET_CODE (oldequiv) == SUBREG)
4858 oldequiv_reg = SUBREG_REG (oldequiv);
4859
4860 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
4861 then load RELOADREG from OLDEQUIV. */
4862
4863 if (GET_MODE (reloadreg) != mode)
4864 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
4865 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
4866 oldequiv = SUBREG_REG (oldequiv);
4867 if (GET_MODE (oldequiv) != VOIDmode
4868 && mode != GET_MODE (oldequiv))
4869 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
4870
4871 /* Decide where to put reload insn for this reload. */
4872 switch (reload_when_needed[j])
4873 {
4874 case RELOAD_FOR_INPUT:
4875 case RELOAD_OTHER:
4876 where = first_operand_address_reload_insn;
4877 break;
4878 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
4879 where = first_other_reload_insn;
4880 break;
4881 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
4882 where = first_output_reload_insn;
4883 break;
4884 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4885 where = before_insn;
32131a9c
RK
4886 }
4887
4888 special = 0;
4889
4890 /* Auto-increment addresses must be reloaded in a special way. */
4891 if (GET_CODE (oldequiv) == POST_INC
4892 || GET_CODE (oldequiv) == POST_DEC
4893 || GET_CODE (oldequiv) == PRE_INC
4894 || GET_CODE (oldequiv) == PRE_DEC)
4895 {
4896 /* We are not going to bother supporting the case where a
4897 incremented register can't be copied directly from
4898 OLDEQUIV since this seems highly unlikely. */
4899 if (reload_secondary_reload[j] >= 0)
4900 abort ();
4901 /* Prevent normal processing of this reload. */
4902 special = 1;
4903 /* Output a special code sequence for this case. */
4904 this_reload_insn
4905 = inc_for_reload (reloadreg, oldequiv, reload_inc[j], where);
4906 }
4907
4908 /* If we are reloading a pseudo-register that was set by the previous
4909 insn, see if we can get rid of that pseudo-register entirely
4910 by redirecting the previous insn into our reload register. */
4911
4912 else if (optimize && GET_CODE (old) == REG
4913 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4914 && dead_or_set_p (insn, old)
4915 /* This is unsafe if some other reload
4916 uses the same reg first. */
4917 && (reload_when_needed[j] == RELOAD_OTHER
4918 || reload_when_needed[j] == RELOAD_FOR_INPUT
4919 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS))
4920 {
4921 rtx temp = PREV_INSN (insn);
4922 while (temp && GET_CODE (temp) == NOTE)
4923 temp = PREV_INSN (temp);
4924 if (temp
4925 && GET_CODE (temp) == INSN
4926 && GET_CODE (PATTERN (temp)) == SET
4927 && SET_DEST (PATTERN (temp)) == old
4928 /* Make sure we can access insn_operand_constraint. */
4929 && asm_noperands (PATTERN (temp)) < 0
4930 /* This is unsafe if prev insn rejects our reload reg. */
4931 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
4932 reloadreg)
4933 /* This is unsafe if operand occurs more than once in current
4934 insn. Perhaps some occurrences aren't reloaded. */
4935 && count_occurrences (PATTERN (insn), old) == 1
4936 /* Don't risk splitting a matching pair of operands. */
4937 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
4938 {
4939 /* Store into the reload register instead of the pseudo. */
4940 SET_DEST (PATTERN (temp)) = reloadreg;
4941 /* If these are the only uses of the pseudo reg,
4942 pretend for GDB it lives in the reload reg we used. */
4943 if (reg_n_deaths[REGNO (old)] == 1
4944 && reg_n_sets[REGNO (old)] == 1)
4945 {
4946 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
4947 alter_reg (REGNO (old), -1);
4948 }
4949 special = 1;
4950 }
4951 }
4952
4953 /* We can't do that, so output an insn to load RELOADREG.
4954 Keep them in the following order:
4955 all reloads for input reload addresses,
4956 all reloads for ordinary input operands,
4957 all reloads for addresses of non-reloaded operands,
4958 the insn being reloaded,
4959 all reloads for addresses of output reloads,
4960 the output reloads. */
4961 if (! special)
4962 {
4963#ifdef SECONDARY_INPUT_RELOAD_CLASS
4964 rtx second_reload_reg = 0;
4965 enum insn_code icode;
4966
4967 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
4968 and icode, if any. If OLDEQUIV and OLD are different or
4969 if this is an in-out reload, recompute whether or not we
4970 still need a secondary register and what the icode should
4971 be. If we still need a secondary register and the class or
4972 icode is different, go back to reloading from OLD if using
4973 OLDEQUIV means that we got the wrong type of register. We
4974 cannot have different class or icode due to an in-out reload
4975 because we don't make such reloads when both the input and
4976 output need secondary reload registers. */
32131a9c
RK
4977
4978 if (reload_secondary_reload[j] >= 0)
4979 {
4980 int secondary_reload = reload_secondary_reload[j];
1554c2c6
RK
4981 rtx real_oldequiv = oldequiv;
4982 rtx real_old = old;
4983
4984 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
4985 and similarly for OLD.
4986 See comments in find_secondary_reload in reload.c. */
4987 if (GET_CODE (oldequiv) == REG
4988 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
4989 && reg_equiv_mem[REGNO (oldequiv)] != 0)
4990 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
4991
4992 if (GET_CODE (old) == REG
4993 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4994 && reg_equiv_mem[REGNO (old)] != 0)
4995 real_old = reg_equiv_mem[REGNO (old)];
4996
32131a9c
RK
4997 second_reload_reg = reload_reg_rtx[secondary_reload];
4998 icode = reload_secondary_icode[j];
4999
d445b551
RK
5000 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5001 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
5002 {
5003 enum reg_class new_class
5004 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 5005 mode, real_oldequiv);
32131a9c
RK
5006
5007 if (new_class == NO_REGS)
5008 second_reload_reg = 0;
5009 else
5010 {
5011 enum insn_code new_icode;
5012 enum machine_mode new_mode;
5013
5014 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5015 REGNO (second_reload_reg)))
1554c2c6 5016 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5017 else
5018 {
5019 new_icode = reload_in_optab[(int) mode];
5020 if (new_icode != CODE_FOR_nothing
5021 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 5022 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 5023 (reloadreg, mode)))
a8fdc208
RS
5024 || (insn_operand_predicate[(int) new_icode][1]
5025 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 5026 (real_oldequiv, mode)))))
32131a9c
RK
5027 new_icode = CODE_FOR_nothing;
5028
5029 if (new_icode == CODE_FOR_nothing)
5030 new_mode = mode;
5031 else
5032 new_mode = insn_operand_mode[new_icode][2];
5033
5034 if (GET_MODE (second_reload_reg) != new_mode)
5035 {
5036 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5037 new_mode))
1554c2c6 5038 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5039 else
5040 second_reload_reg
3aaa90c7
MM
5041 = gen_rtx (REG, new_mode,
5042 REGNO (second_reload_reg));
32131a9c
RK
5043 }
5044 }
5045 }
5046 }
5047
5048 /* If we still need a secondary reload register, check
5049 to see if it is being used as a scratch or intermediate
1554c2c6
RK
5050 register and generate code appropriately. If we need
5051 a scratch register, use REAL_OLDEQUIV since the form of
5052 the insn may depend on the actual address if it is
5053 a MEM. */
32131a9c
RK
5054
5055 if (second_reload_reg)
5056 {
5057 if (icode != CODE_FOR_nothing)
5058 {
d445b551 5059 reload_insn = emit_insn_before (GEN_FCN (icode)
1554c2c6
RK
5060 (reloadreg,
5061 real_oldequiv,
d445b551
RK
5062 second_reload_reg),
5063 where);
5064 if (this_reload_insn == 0)
5065 this_reload_insn = reload_insn;
32131a9c
RK
5066 special = 1;
5067 }
5068 else
5069 {
5070 /* See if we need a scratch register to load the
5071 intermediate register (a tertiary reload). */
5072 enum insn_code tertiary_icode
5073 = reload_secondary_icode[secondary_reload];
5074
5075 if (tertiary_icode != CODE_FOR_nothing)
5076 {
5077 rtx third_reload_reg
5078 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5079
d445b551
RK
5080 reload_insn
5081 = emit_insn_before ((GEN_FCN (tertiary_icode)
5082 (second_reload_reg,
1554c2c6 5083 real_oldequiv,
d445b551
RK
5084 third_reload_reg)),
5085 where);
5086 if (this_reload_insn == 0)
5087 this_reload_insn = reload_insn;
32131a9c
RK
5088 }
5089 else
5090 {
d445b551
RK
5091 reload_insn
5092 = gen_input_reload (second_reload_reg,
fe751ebf 5093 oldequiv, where);
d445b551
RK
5094 if (this_reload_insn == 0)
5095 this_reload_insn = reload_insn;
32131a9c
RK
5096 oldequiv = second_reload_reg;
5097 }
5098 }
5099 }
5100 }
5101#endif
5102
5103 if (! special)
d445b551 5104 {
3c3eeea6 5105 reload_insn = gen_input_reload (reloadreg, oldequiv, where);
d445b551
RK
5106 if (this_reload_insn == 0)
5107 this_reload_insn = reload_insn;
5108 }
32131a9c
RK
5109
5110#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5111 /* We may have to make a REG_DEAD note for the secondary reload
5112 register in the insns we just made. Find the last insn that
5113 mentioned the register. */
5114 if (! special && second_reload_reg
5115 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5116 {
5117 rtx prev;
5118
5119 for (prev = where;
5120 prev != PREV_INSN (this_reload_insn);
5121 prev = PREV_INSN (prev))
5122 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5123 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5124 PATTERN (prev)))
32131a9c
RK
5125 {
5126 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5127 second_reload_reg,
5128 REG_NOTES (prev));
5129 break;
5130 }
5131 }
5132#endif
5133 }
5134
5135 /* Update where to put other reload insns. */
5136 if (this_reload_insn)
5137 switch (reload_when_needed[j])
5138 {
5139 case RELOAD_FOR_INPUT:
5140 case RELOAD_OTHER:
5141 if (first_other_reload_insn == first_operand_address_reload_insn)
5142 first_other_reload_insn = this_reload_insn;
5143 break;
5144 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 5145 if (first_operand_address_reload_insn == before_insn)
32131a9c 5146 first_operand_address_reload_insn = this_reload_insn;
a8efe40d 5147 if (first_other_reload_insn == before_insn)
32131a9c
RK
5148 first_other_reload_insn = this_reload_insn;
5149 }
5150
5151 /* reload_inc[j] was formerly processed here. */
5152 }
5153
5154 /* Add a note saying the input reload reg
5155 dies in this insn, if anyone cares. */
5156#ifdef PRESERVE_DEATH_INFO_REGNO_P
5157 if (old != 0
5158 && reload_reg_rtx[j] != old
5159 && reload_reg_rtx[j] != 0
5160 && reload_out[j] == 0
5161 && ! reload_inherited[j]
5162 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5163 {
5164 register rtx reloadreg = reload_reg_rtx[j];
5165
a8fdc208 5166#if 0
32131a9c
RK
5167 /* We can't abort here because we need to support this for sched.c.
5168 It's not terrible to miss a REG_DEAD note, but we should try
5169 to figure out how to do this correctly. */
5170 /* The code below is incorrect for address-only reloads. */
5171 if (reload_when_needed[j] != RELOAD_OTHER
5172 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5173 abort ();
5174#endif
5175
5176 /* Add a death note to this insn, for an input reload. */
5177
5178 if ((reload_when_needed[j] == RELOAD_OTHER
5179 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5180 && ! dead_or_set_p (insn, reloadreg))
5181 REG_NOTES (insn)
5182 = gen_rtx (EXPR_LIST, REG_DEAD,
5183 reloadreg, REG_NOTES (insn));
5184 }
5185
5186 /* When we inherit a reload, the last marked death of the reload reg
5187 may no longer really be a death. */
5188 if (reload_reg_rtx[j] != 0
5189 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5190 && reload_inherited[j])
5191 {
5192 /* Handle inheriting an output reload.
5193 Remove the death note from the output reload insn. */
5194 if (reload_spill_index[j] >= 0
5195 && GET_CODE (reload_in[j]) == REG
5196 && spill_reg_store[reload_spill_index[j]] != 0
5197 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5198 REG_DEAD, REGNO (reload_reg_rtx[j])))
5199 remove_death (REGNO (reload_reg_rtx[j]),
5200 spill_reg_store[reload_spill_index[j]]);
5201 /* Likewise for input reloads that were inherited. */
5202 else if (reload_spill_index[j] >= 0
5203 && GET_CODE (reload_in[j]) == REG
5204 && spill_reg_store[reload_spill_index[j]] == 0
5205 && reload_inheritance_insn[j] != 0
a8fdc208 5206 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5207 REGNO (reload_reg_rtx[j])))
5208 remove_death (REGNO (reload_reg_rtx[j]),
5209 reload_inheritance_insn[j]);
5210 else
5211 {
5212 rtx prev;
5213
5214 /* We got this register from find_equiv_reg.
5215 Search back for its last death note and get rid of it.
5216 But don't search back too far.
5217 Don't go past a place where this reg is set,
5218 since a death note before that remains valid. */
5219 for (prev = PREV_INSN (insn);
5220 prev && GET_CODE (prev) != CODE_LABEL;
5221 prev = PREV_INSN (prev))
5222 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5223 && dead_or_set_p (prev, reload_reg_rtx[j]))
5224 {
5225 if (find_regno_note (prev, REG_DEAD,
5226 REGNO (reload_reg_rtx[j])))
5227 remove_death (REGNO (reload_reg_rtx[j]), prev);
5228 break;
5229 }
5230 }
5231 }
5232
5233 /* We might have used find_equiv_reg above to choose an alternate
5234 place from which to reload. If so, and it died, we need to remove
5235 that death and move it to one of the insns we just made. */
5236
5237 if (oldequiv_reg != 0
5238 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5239 {
5240 rtx prev, prev1;
5241
5242 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5243 prev = PREV_INSN (prev))
5244 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5245 && dead_or_set_p (prev, oldequiv_reg))
5246 {
5247 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5248 {
5249 for (prev1 = this_reload_insn;
5250 prev1; prev1 = PREV_INSN (prev1))
5251 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5252 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5253 PATTERN (prev1)))
32131a9c
RK
5254 {
5255 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5256 oldequiv_reg,
5257 REG_NOTES (prev1));
5258 break;
5259 }
5260 remove_death (REGNO (oldequiv_reg), prev);
5261 }
5262 break;
5263 }
5264 }
5265#endif
5266
5267 /* If we are reloading a register that was recently stored in with an
5268 output-reload, see if we can prove there was
5269 actually no need to store the old value in it. */
5270
5271 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5272 /* This is unsafe if some other reload uses the same reg first. */
5273 && (reload_when_needed[j] == RELOAD_OTHER
5274 || reload_when_needed[j] == RELOAD_FOR_INPUT
5275 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS)
5276 && GET_CODE (reload_in[j]) == REG
5277#if 0
5278 /* There doesn't seem to be any reason to restrict this to pseudos
5279 and doing so loses in the case where we are copying from a
5280 register of the wrong class. */
5281 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5282#endif
5283 && spill_reg_store[reload_spill_index[j]] != 0
5284 && dead_or_set_p (insn, reload_in[j])
5285 /* This is unsafe if operand occurs more than once in current
5286 insn. Perhaps some occurrences weren't reloaded. */
5287 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5288 delete_output_reload (insn, j,
5289 spill_reg_store[reload_spill_index[j]]);
5290
5291 /* Input-reloading is done. Now do output-reloading,
5292 storing the value from the reload-register after the main insn
5293 if reload_out[j] is nonzero.
5294
5295 ??? At some point we need to support handling output reloads of
5296 JUMP_INSNs or insns that set cc0. */
5297 old = reload_out[j];
5298 if (old != 0
5299 && reload_reg_rtx[j] != old
5300 && reload_reg_rtx[j] != 0)
5301 {
5302 register rtx reloadreg = reload_reg_rtx[j];
5303 register rtx second_reloadreg = 0;
5304 rtx prev_insn = PREV_INSN (first_output_reload_insn);
5305 rtx note, p;
5306 enum machine_mode mode;
5307 int special = 0;
5308
5309 /* An output operand that dies right away does need a reload,
5310 but need not be copied from it. Show the new location in the
5311 REG_UNUSED note. */
5312 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5313 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5314 {
5315 XEXP (note, 0) = reload_reg_rtx[j];
5316 continue;
5317 }
5318 else if (GET_CODE (old) == SCRATCH)
5319 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5320 but we don't want to make an output reload. */
5321 continue;
5322
5323#if 0
5324 /* Strip off of OLD any size-increasing SUBREGs such as
5325 (SUBREG:SI foo:QI 0). */
5326
5327 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5328 && (GET_MODE_SIZE (GET_MODE (old))
5329 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5330 old = SUBREG_REG (old);
5331#endif
5332
5333 /* If is a JUMP_INSN, we can't support output reloads yet. */
5334 if (GET_CODE (insn) == JUMP_INSN)
5335 abort ();
5336
5337 /* Determine the mode to reload in.
5338 See comments above (for input reloading). */
5339
5340 mode = GET_MODE (old);
5341 if (mode == VOIDmode)
79a365a7
RS
5342 {
5343 /* VOIDmode should never happen for an output. */
5344 if (asm_noperands (PATTERN (insn)) < 0)
5345 /* It's the compiler's fault. */
5346 abort ();
5347 error_for_asm (insn, "output operand is constant in `asm'");
5348 /* Prevent crash--use something we know is valid. */
5349 mode = word_mode;
5350 old = gen_rtx (REG, mode, REGNO (reloadreg));
5351 }
32131a9c
RK
5352
5353 /* A strict-low-part output operand needs to be reloaded
5354 in the mode of the entire value. */
5355 if (reload_strict_low[j])
5356 {
5357 mode = GET_MODE (SUBREG_REG (reload_out[j]));
5358 /* Encapsulate OLD into that mode. */
5359 /* If OLD is a subreg, then strip it, since the subreg will
5360 be altered by this very reload. */
5361 while (GET_CODE (old) == SUBREG && GET_MODE (old) != mode)
5362 old = SUBREG_REG (old);
5363 if (GET_MODE (old) != VOIDmode
5364 && mode != GET_MODE (old))
5365 old = gen_rtx (SUBREG, mode, old, 0);
5366 }
5367
5368 if (GET_MODE (reloadreg) != mode)
5369 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5370
5371#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5372
5373 /* If we need two reload regs, set RELOADREG to the intermediate
5374 one, since it will be stored into OUT. We might need a secondary
5375 register only for an input reload, so check again here. */
5376
1554c2c6 5377 if (reload_secondary_reload[j] >= 0)
32131a9c 5378 {
1554c2c6 5379 rtx real_old = old;
32131a9c 5380
1554c2c6
RK
5381 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5382 && reg_equiv_mem[REGNO (old)] != 0)
5383 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 5384
1554c2c6
RK
5385 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5386 mode, real_old)
5387 != NO_REGS))
5388 {
5389 second_reloadreg = reloadreg;
5390 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
32131a9c 5391
1554c2c6
RK
5392 /* See if RELOADREG is to be used as a scratch register
5393 or as an intermediate register. */
5394 if (reload_secondary_icode[j] != CODE_FOR_nothing)
32131a9c 5395 {
1554c2c6
RK
5396 emit_insn_before ((GEN_FCN (reload_secondary_icode[j])
5397 (real_old, second_reloadreg,
5398 reloadreg)),
5399 first_output_reload_insn);
5400 special = 1;
32131a9c
RK
5401 }
5402 else
1554c2c6
RK
5403 {
5404 /* See if we need both a scratch and intermediate reload
5405 register. */
5406 int secondary_reload = reload_secondary_reload[j];
5407 enum insn_code tertiary_icode
5408 = reload_secondary_icode[secondary_reload];
5409 rtx pat;
32131a9c 5410
1554c2c6
RK
5411 if (GET_MODE (reloadreg) != mode)
5412 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5413
5414 if (tertiary_icode != CODE_FOR_nothing)
5415 {
5416 rtx third_reloadreg
5417 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5418 pat = (GEN_FCN (tertiary_icode)
5419 (reloadreg, second_reloadreg, third_reloadreg));
5420 }
9ad5f9f6
JW
5421#ifdef SECONDARY_MEMORY_NEEDED
5422 /* If we need a memory location to do the move, do it that way. */
5423 else if (GET_CODE (reloadreg) == REG
5424 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
5425 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
5426 REGNO_REG_CLASS (REGNO (second_reloadreg)),
5427 GET_MODE (second_reloadreg)))
5428 {
5429 /* Get the memory to use and rewrite both registers
5430 to its mode. */
5431 rtx loc = get_secondary_mem (reloadreg,
5432 GET_MODE (second_reloadreg));
5433 rtx tmp_reloadreg;
5434
5435 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
5436 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
5437 REGNO (second_reloadreg));
5438
5439 if (GET_MODE (loc) != GET_MODE (reloadreg))
5440 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
5441 REGNO (reloadreg));
5442 else
5443 tmp_reloadreg = reloadreg;
5444
5445 emit_insn_before (gen_move_insn (loc, second_reloadreg),
5446 first_output_reload_insn);
5447 pat = gen_move_insn (tmp_reloadreg, loc);
5448 }
5449#endif
1554c2c6
RK
5450 else
5451 pat = gen_move_insn (reloadreg, second_reloadreg);
5452
5453 emit_insn_before (pat, first_output_reload_insn);
5454 }
32131a9c
RK
5455 }
5456 }
5457#endif
5458
5459 /* Output the last reload insn. */
5460 if (! special)
0dadecf6
RK
5461 {
5462#ifdef SECONDARY_MEMORY_NEEDED
5463 /* If we need a memory location to do the move, do it that way. */
5464 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
5465 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
5466 REGNO_REG_CLASS (REGNO (reloadreg)),
5467 GET_MODE (reloadreg)))
5468 {
5469 /* Get the memory to use and rewrite both registers to
5470 its mode. */
5471 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg));
5472
5473 if (GET_MODE (loc) != GET_MODE (reloadreg))
5474 reloadreg = gen_rtx (REG, GET_MODE (loc),
5475 REGNO (reloadreg));
5476
5477 if (GET_MODE (loc) != GET_MODE (old))
5478 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
5479
5480 emit_insn_before (gen_move_insn (loc, reloadreg),
5481 first_output_reload_insn);
5482 emit_insn_before (gen_move_insn (old, loc),
5483 first_output_reload_insn);
5484 }
5485 else
5486#endif
5487 emit_insn_before (gen_move_insn (old, reloadreg),
5488 first_output_reload_insn);
5489 }
32131a9c
RK
5490
5491#ifdef PRESERVE_DEATH_INFO_REGNO_P
5492 /* If final will look at death notes for this reg,
5493 put one on the last output-reload insn to use it. Similarly
5494 for any secondary register. */
5495 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
5496 for (p = PREV_INSN (first_output_reload_insn);
5497 p != prev_insn; p = PREV_INSN (p))
5498 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5499 && reg_overlap_mentioned_for_reload_p (reloadreg,
5500 PATTERN (p)))
32131a9c
RK
5501 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5502 reloadreg, REG_NOTES (p));
5503
5504#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5505 if (! special
5506 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
5507 for (p = PREV_INSN (first_output_reload_insn);
5508 p != prev_insn; p = PREV_INSN (p))
5509 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5510 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
5511 PATTERN (p)))
32131a9c
RK
5512 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5513 second_reloadreg, REG_NOTES (p));
5514#endif
5515#endif
5516 /* Look at all insns we emitted, just to be safe. */
5517 for (p = NEXT_INSN (prev_insn); p != first_output_reload_insn;
5518 p = NEXT_INSN (p))
5519 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
5520 {
5521 /* If this output reload doesn't come from a spill reg,
5522 clear any memory of reloaded copies of the pseudo reg.
5523 If this output reload comes from a spill reg,
5524 reg_has_output_reload will make this do nothing. */
5525 note_stores (PATTERN (p), forget_old_reloads_1);
5526
5527 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
5528 store_insn = p;
5529 }
5530
5531 first_output_reload_insn = NEXT_INSN (prev_insn);
5532 }
5533
5534 if (reload_spill_index[j] >= 0)
5535 new_spill_reg_store[reload_spill_index[j]] = store_insn;
5536 }
5537
32131a9c
RK
5538 /* Move death notes from INSN
5539 to output-operand-address and output reload insns. */
5540#ifdef PRESERVE_DEATH_INFO_REGNO_P
5541 {
5542 rtx insn1;
5543 /* Loop over those insns, last ones first. */
5544 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
5545 insn1 = PREV_INSN (insn1))
5546 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
5547 {
5548 rtx source = SET_SRC (PATTERN (insn1));
5549 rtx dest = SET_DEST (PATTERN (insn1));
5550
5551 /* The note we will examine next. */
5552 rtx reg_notes = REG_NOTES (insn);
5553 /* The place that pointed to this note. */
5554 rtx *prev_reg_note = &REG_NOTES (insn);
5555
5556 /* If the note is for something used in the source of this
5557 reload insn, or in the output address, move the note. */
5558 while (reg_notes)
5559 {
5560 rtx next_reg_notes = XEXP (reg_notes, 1);
5561 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
5562 && GET_CODE (XEXP (reg_notes, 0)) == REG
5563 && ((GET_CODE (dest) != REG
bfa30b22
RK
5564 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5565 dest))
5566 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5567 source)))
32131a9c
RK
5568 {
5569 *prev_reg_note = next_reg_notes;
5570 XEXP (reg_notes, 1) = REG_NOTES (insn1);
5571 REG_NOTES (insn1) = reg_notes;
5572 }
5573 else
5574 prev_reg_note = &XEXP (reg_notes, 1);
5575
5576 reg_notes = next_reg_notes;
5577 }
5578 }
5579 }
5580#endif
5581
5582 /* For all the spill regs newly reloaded in this instruction,
5583 record what they were reloaded from, so subsequent instructions
d445b551
RK
5584 can inherit the reloads.
5585
5586 Update spill_reg_store for the reloads of this insn.
e9e79d69 5587 Copy the elements that were updated in the loop above. */
32131a9c
RK
5588
5589 for (j = 0; j < n_reloads; j++)
5590 {
5591 register int r = reload_order[j];
5592 register int i = reload_spill_index[r];
5593
5594 /* I is nonneg if this reload used one of the spill regs.
5595 If reload_reg_rtx[r] is 0, this is an optional reload
5596 that we opted to ignore. */
d445b551 5597
32131a9c
RK
5598 if (i >= 0 && reload_reg_rtx[r] != 0)
5599 {
5600 /* First, clear out memory of what used to be in this spill reg.
5601 If consecutive registers are used, clear them all. */
5602 int nr
5603 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
5604 int k;
5605
5606 for (k = 0; k < nr; k++)
5607 {
5608 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
5609 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
5610 }
5611
5612 /* Maybe the spill reg contains a copy of reload_out. */
5613 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5614 {
5615 register int nregno = REGNO (reload_out[r]);
d445b551
RK
5616
5617 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 5618 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 5619
32131a9c
RK
5620 for (k = 0; k < nr; k++)
5621 {
5622 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5623 = nregno;
5624 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
5625 }
5626 }
d445b551 5627
32131a9c
RK
5628 /* Maybe the spill reg contains a copy of reload_in. */
5629 else if (reload_out[r] == 0
5630 && reload_in[r] != 0
5631 && (GET_CODE (reload_in[r]) == REG
5632 || GET_CODE (reload_in_reg[r]) == REG))
5633 {
5634 register int nregno;
5635 if (GET_CODE (reload_in[r]) == REG)
5636 nregno = REGNO (reload_in[r]);
5637 else
5638 nregno = REGNO (reload_in_reg[r]);
5639
5640 /* If there are two separate reloads (one in and one out)
5641 for the same (hard or pseudo) reg,
a8fdc208 5642 leave reg_last_reload_reg set
32131a9c
RK
5643 based on the output reload.
5644 Otherwise, set it from this input reload. */
5645 if (!reg_has_output_reload[nregno]
5646 /* But don't do so if another input reload
5647 will clobber this one's value. */
5648 && reload_reg_reaches_end_p (spill_regs[i],
5649 reload_when_needed[r]))
5650 {
5651 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551
RK
5652
5653 /* Unless we inherited this reload, show we haven't
5654 recently done a store. */
5655 if (! reload_inherited[r])
5656 spill_reg_store[i] = 0;
5657
32131a9c
RK
5658 for (k = 0; k < nr; k++)
5659 {
5660 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5661 = nregno;
5662 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
5663 = insn;
5664 }
5665 }
5666 }
5667 }
5668
5669 /* The following if-statement was #if 0'd in 1.34 (or before...).
5670 It's reenabled in 1.35 because supposedly nothing else
5671 deals with this problem. */
5672
5673 /* If a register gets output-reloaded from a non-spill register,
5674 that invalidates any previous reloaded copy of it.
5675 But forget_old_reloads_1 won't get to see it, because
5676 it thinks only about the original insn. So invalidate it here. */
5677 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5678 {
5679 register int nregno = REGNO (reload_out[r]);
5680 reg_last_reload_reg[nregno] = 0;
5681 }
5682 }
5683}
5684\f
5685/* Emit code before BEFORE_INSN to perform an input reload of IN to RELOADREG.
3c3eeea6 5686 Returns first insn emitted. */
32131a9c
RK
5687
5688rtx
3c3eeea6 5689gen_input_reload (reloadreg, in, before_insn)
32131a9c
RK
5690 rtx reloadreg;
5691 rtx in;
5692 rtx before_insn;
5693{
5694 register rtx prev_insn = PREV_INSN (before_insn);
5695
a8fdc208 5696 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
5697 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
5698 register that didn't get a hard register. In that case we can just
5699 call emit_move_insn.
5700
3002e160
JW
5701 We can also be asked to reload a PLUS that adds either two registers, or
5702 a register and a constant or MEM, or a MEM and a constant. This can
5703 occur during frame pointer elimination and while reloading addresses.
5704 This case is handled by trying to emit a single insn
32131a9c
RK
5705 to perform the add. If it is not valid, we use a two insn sequence.
5706
5707 Finally, we could be called to handle an 'o' constraint by putting
5708 an address into a register. In that case, we first try to do this
5709 with a named pattern of "reload_load_address". If no such pattern
5710 exists, we just emit a SET insn and hope for the best (it will normally
5711 be valid on machines that use 'o').
5712
5713 This entire process is made complex because reload will never
5714 process the insns we generate here and so we must ensure that
5715 they will fit their constraints and also by the fact that parts of
5716 IN might be being reloaded separately and replaced with spill registers.
5717 Because of this, we are, in some sense, just guessing the right approach
5718 here. The one listed above seems to work.
5719
5720 ??? At some point, this whole thing needs to be rethought. */
5721
5722 if (GET_CODE (in) == PLUS
3002e160
JW
5723 && ((GET_CODE (XEXP (in, 0)) == REG
5724 && (GET_CODE (XEXP (in, 1)) == REG
5725 || CONSTANT_P (XEXP (in, 1))
5726 || GET_CODE (XEXP (in, 1)) == MEM))
5727 || (GET_CODE (XEXP (in, 0)) == MEM
5728 && CONSTANT_P (XEXP (in, 1)))))
32131a9c
RK
5729 {
5730 /* We need to compute the sum of what is either a register and a
3002e160
JW
5731 constant, a register and memory, a hard register and a pseudo
5732 register, or memory and a constant and put it into the reload
5733 register. The best possible way of doing this is if the machine
5734 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
5735
5736 The simplest approach is to try to generate such an insn and see if it
5737 is recognized and matches its constraints. If so, it can be used.
5738
5739 It might be better not to actually emit the insn unless it is valid,
0009eff2 5740 but we need to pass the insn as an operand to `recog' and
b36d7dd7 5741 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 5742 not valid than to dummy things up. */
a8fdc208 5743
af929c62 5744 rtx op0, op1, tem, insn;
32131a9c 5745 int code;
a8fdc208 5746
af929c62
RK
5747 op0 = find_replacement (&XEXP (in, 0));
5748 op1 = find_replacement (&XEXP (in, 1));
5749
32131a9c
RK
5750 /* Since constraint checking is strict, commutativity won't be
5751 checked, so we need to do that here to avoid spurious failure
5752 if the add instruction is two-address and the second operand
5753 of the add is the same as the reload reg, which is frequently
5754 the case. If the insn would be A = B + A, rearrange it so
5755 it will be A = A + B as constrain_operands expects. */
a8fdc208 5756
32131a9c
RK
5757 if (GET_CODE (XEXP (in, 1)) == REG
5758 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
5759 tem = op0, op0 = op1, op1 = tem;
5760
5761 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
5762 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c
RK
5763
5764 insn = emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in),
5765 before_insn);
5766 code = recog_memoized (insn);
5767
5768 if (code >= 0)
5769 {
5770 insn_extract (insn);
5771 /* We want constrain operands to treat this insn strictly in
5772 its validity determination, i.e., the way it would after reload
5773 has completed. */
5774 if (constrain_operands (code, 1))
5775 return insn;
5776 }
5777
5778 if (PREV_INSN (insn))
5779 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
5780 if (NEXT_INSN (insn))
5781 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
5782
5783 /* If that failed, we must use a conservative two-insn sequence.
5784 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
5785 register since "move" will be able to handle an arbitrary operand,
5786 unlike add which can't, in general. Then add the registers.
32131a9c
RK
5787
5788 If there is another way to do this for a specific machine, a
5789 DEFINE_PEEPHOLE should be specified that recognizes the sequence
5790 we emit below. */
5791
af929c62
RK
5792 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
5793 || (GET_CODE (op1) == REG
5794 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
5795 tem = op0, op0 = op1, op1 = tem;
32131a9c 5796
af929c62 5797 emit_insn_before (gen_move_insn (reloadreg, op0), before_insn);
39b56c2a
RK
5798
5799 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
5800 This fixes a problem on the 32K where the stack pointer cannot
5801 be used as an operand of an add insn. */
5802
5803 if (rtx_equal_p (op0, op1))
5804 op1 = reloadreg;
5805
af929c62 5806 emit_insn_before (gen_add2_insn (reloadreg, op1), before_insn);
32131a9c
RK
5807 }
5808
0dadecf6
RK
5809#ifdef SECONDARY_MEMORY_NEEDED
5810 /* If we need a memory location to do the move, do it that way. */
5811 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
5812 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
5813 REGNO_REG_CLASS (REGNO (reloadreg)),
5814 GET_MODE (reloadreg)))
5815 {
5816 /* Get the memory to use and rewrite both registers to its mode. */
5817 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg));
5818
5819 if (GET_MODE (loc) != GET_MODE (reloadreg))
5820 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
5821
5822 if (GET_MODE (loc) != GET_MODE (in))
5823 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
5824
0dadecf6 5825 emit_insn_before (gen_move_insn (loc, in), before_insn);
58c8c593 5826 emit_insn_before (gen_move_insn (reloadreg, loc), before_insn);
0dadecf6
RK
5827 }
5828#endif
5829
32131a9c
RK
5830 /* If IN is a simple operand, use gen_move_insn. */
5831 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
3c3eeea6 5832 emit_insn_before (gen_move_insn (reloadreg, in), before_insn);
32131a9c
RK
5833
5834#ifdef HAVE_reload_load_address
5835 else if (HAVE_reload_load_address)
3c3eeea6 5836 emit_insn_before (gen_reload_load_address (reloadreg, in), before_insn);
32131a9c
RK
5837#endif
5838
5839 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
5840 else
3c3eeea6 5841 emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in), before_insn);
32131a9c
RK
5842
5843 /* Return the first insn emitted.
5844 We can not just return PREV_INSN (before_insn), because there may have
5845 been multiple instructions emitted. Also note that gen_move_insn may
5846 emit more than one insn itself, so we can not assume that there is one
5847 insn emitted per emit_insn_before call. */
5848
5849 return NEXT_INSN (prev_insn);
5850}
5851\f
5852/* Delete a previously made output-reload
5853 whose result we now believe is not needed.
5854 First we double-check.
5855
5856 INSN is the insn now being processed.
5857 OUTPUT_RELOAD_INSN is the insn of the output reload.
5858 J is the reload-number for this insn. */
5859
5860static void
5861delete_output_reload (insn, j, output_reload_insn)
5862 rtx insn;
5863 int j;
5864 rtx output_reload_insn;
5865{
5866 register rtx i1;
5867
5868 /* Get the raw pseudo-register referred to. */
5869
5870 rtx reg = reload_in[j];
5871 while (GET_CODE (reg) == SUBREG)
5872 reg = SUBREG_REG (reg);
5873
5874 /* If the pseudo-reg we are reloading is no longer referenced
5875 anywhere between the store into it and here,
5876 and no jumps or labels intervene, then the value can get
5877 here through the reload reg alone.
5878 Otherwise, give up--return. */
5879 for (i1 = NEXT_INSN (output_reload_insn);
5880 i1 != insn; i1 = NEXT_INSN (i1))
5881 {
5882 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
5883 return;
5884 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
5885 && reg_mentioned_p (reg, PATTERN (i1)))
5886 return;
5887 }
5888
5889 /* If this insn will store in the pseudo again,
5890 the previous store can be removed. */
5891 if (reload_out[j] == reload_in[j])
5892 delete_insn (output_reload_insn);
5893
5894 /* See if the pseudo reg has been completely replaced
5895 with reload regs. If so, delete the store insn
5896 and forget we had a stack slot for the pseudo. */
5897 else if (reg_n_deaths[REGNO (reg)] == 1
5898 && reg_basic_block[REGNO (reg)] >= 0
5899 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
5900 {
5901 rtx i2;
5902
5903 /* We know that it was used only between here
5904 and the beginning of the current basic block.
5905 (We also know that the last use before INSN was
5906 the output reload we are thinking of deleting, but never mind that.)
5907 Search that range; see if any ref remains. */
5908 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5909 {
d445b551
RK
5910 rtx set = single_set (i2);
5911
32131a9c
RK
5912 /* Uses which just store in the pseudo don't count,
5913 since if they are the only uses, they are dead. */
d445b551 5914 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5915 continue;
5916 if (GET_CODE (i2) == CODE_LABEL
5917 || GET_CODE (i2) == JUMP_INSN)
5918 break;
5919 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
5920 && reg_mentioned_p (reg, PATTERN (i2)))
5921 /* Some other ref remains;
5922 we can't do anything. */
5923 return;
5924 }
5925
5926 /* Delete the now-dead stores into this pseudo. */
5927 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5928 {
d445b551
RK
5929 rtx set = single_set (i2);
5930
5931 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5932 delete_insn (i2);
5933 if (GET_CODE (i2) == CODE_LABEL
5934 || GET_CODE (i2) == JUMP_INSN)
5935 break;
5936 }
5937
5938 /* For the debugging info,
5939 say the pseudo lives in this reload reg. */
5940 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
5941 alter_reg (REGNO (reg), -1);
5942 }
5943}
5944
5945\f
a8fdc208 5946/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 5947 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
5948 is a register or memory location;
5949 so reloading involves incrementing that location.
5950
5951 INC_AMOUNT is the number to increment or decrement by (always positive).
5952 This cannot be deduced from VALUE.
5953
5954 INSN is the insn before which the new insns should be emitted.
5955
5956 The return value is the first of the insns emitted. */
5957
5958static rtx
5959inc_for_reload (reloadreg, value, inc_amount, insn)
5960 rtx reloadreg;
5961 rtx value;
5962 int inc_amount;
5963 rtx insn;
5964{
5965 /* REG or MEM to be copied and incremented. */
5966 rtx incloc = XEXP (value, 0);
5967 /* Nonzero if increment after copying. */
5968 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
0009eff2
RK
5969 rtx prev = PREV_INSN (insn);
5970 rtx inc;
5971 rtx add_insn;
5972 int code;
32131a9c
RK
5973
5974 /* No hard register is equivalent to this register after
5975 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
5976 we could inc/dec that register as well (maybe even using it for
5977 the source), but I'm not sure it's worth worrying about. */
5978 if (GET_CODE (incloc) == REG)
5979 reg_last_reload_reg[REGNO (incloc)] = 0;
5980
5981 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
5982 inc_amount = - inc_amount;
5983
fb3821f7 5984 inc = GEN_INT (inc_amount);
0009eff2
RK
5985
5986 /* If this is post-increment, first copy the location to the reload reg. */
5987 if (post)
5988 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5989
5990 /* See if we can directly increment INCLOC. Use a method similar to that
5991 in gen_input_reload. */
5992
5993 add_insn = emit_insn_before (gen_rtx (SET, VOIDmode, incloc,
5994 gen_rtx (PLUS, GET_MODE (incloc),
5995 incloc, inc)), insn);
5996
5997 code = recog_memoized (add_insn);
5998 if (code >= 0)
32131a9c 5999 {
0009eff2
RK
6000 insn_extract (add_insn);
6001 if (constrain_operands (code, 1))
32131a9c 6002 {
0009eff2
RK
6003 /* If this is a pre-increment and we have incremented the value
6004 where it lives, copy the incremented value to RELOADREG to
6005 be used as an address. */
6006
6007 if (! post)
6008 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
6009 return NEXT_INSN (prev);
32131a9c
RK
6010 }
6011 }
0009eff2
RK
6012
6013 if (PREV_INSN (add_insn))
6014 NEXT_INSN (PREV_INSN (add_insn)) = NEXT_INSN (add_insn);
6015 if (NEXT_INSN (add_insn))
6016 PREV_INSN (NEXT_INSN (add_insn)) = PREV_INSN (add_insn);
6017
6018 /* If couldn't do the increment directly, must increment in RELOADREG.
6019 The way we do this depends on whether this is pre- or post-increment.
6020 For pre-increment, copy INCLOC to the reload register, increment it
6021 there, then save back. */
6022
6023 if (! post)
6024 {
6025 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
6026 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
6027 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
6028 }
32131a9c
RK
6029 else
6030 {
0009eff2
RK
6031 /* Postincrement.
6032 Because this might be a jump insn or a compare, and because RELOADREG
6033 may not be available after the insn in an input reload, we must do
6034 the incrementation before the insn being reloaded for.
6035
6036 We have already copied INCLOC to RELOADREG. Increment the copy in
6037 RELOADREG, save that back, then decrement RELOADREG so it has
6038 the original value. */
6039
6040 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
6041 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
fb3821f7 6042 emit_insn_before (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)),
0009eff2 6043 insn);
32131a9c 6044 }
0009eff2
RK
6045
6046 return NEXT_INSN (prev);
32131a9c
RK
6047}
6048\f
6049/* Return 1 if we are certain that the constraint-string STRING allows
6050 the hard register REG. Return 0 if we can't be sure of this. */
6051
6052static int
6053constraint_accepts_reg_p (string, reg)
6054 char *string;
6055 rtx reg;
6056{
6057 int value = 0;
6058 int regno = true_regnum (reg);
6059 int c;
6060
6061 /* Initialize for first alternative. */
6062 value = 0;
6063 /* Check that each alternative contains `g' or `r'. */
6064 while (1)
6065 switch (c = *string++)
6066 {
6067 case 0:
6068 /* If an alternative lacks `g' or `r', we lose. */
6069 return value;
6070 case ',':
6071 /* If an alternative lacks `g' or `r', we lose. */
6072 if (value == 0)
6073 return 0;
6074 /* Initialize for next alternative. */
6075 value = 0;
6076 break;
6077 case 'g':
6078 case 'r':
6079 /* Any general reg wins for this alternative. */
6080 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6081 value = 1;
6082 break;
6083 default:
6084 /* Any reg in specified class wins for this alternative. */
6085 {
0009eff2 6086 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 6087
0009eff2 6088 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
6089 value = 1;
6090 }
6091 }
6092}
6093\f
d445b551
RK
6094/* Return the number of places FIND appears within X, but don't count
6095 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
6096
6097static int
6098count_occurrences (x, find)
6099 register rtx x, find;
6100{
6101 register int i, j;
6102 register enum rtx_code code;
6103 register char *format_ptr;
6104 int count;
6105
6106 if (x == find)
6107 return 1;
6108 if (x == 0)
6109 return 0;
6110
6111 code = GET_CODE (x);
6112
6113 switch (code)
6114 {
6115 case REG:
6116 case QUEUED:
6117 case CONST_INT:
6118 case CONST_DOUBLE:
6119 case SYMBOL_REF:
6120 case CODE_LABEL:
6121 case PC:
6122 case CC0:
6123 return 0;
d445b551
RK
6124
6125 case SET:
6126 if (SET_DEST (x) == find)
6127 return count_occurrences (SET_SRC (x), find);
6128 break;
32131a9c
RK
6129 }
6130
6131 format_ptr = GET_RTX_FORMAT (code);
6132 count = 0;
6133
6134 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6135 {
6136 switch (*format_ptr++)
6137 {
6138 case 'e':
6139 count += count_occurrences (XEXP (x, i), find);
6140 break;
6141
6142 case 'E':
6143 if (XVEC (x, i) != NULL)
6144 {
6145 for (j = 0; j < XVECLEN (x, i); j++)
6146 count += count_occurrences (XVECEXP (x, i, j), find);
6147 }
6148 break;
6149 }
6150 }
6151 return count;
6152}
This page took 0.760412 seconds and 5 git commands to generate.