]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(expand_unop): Handle complex negate and abs val.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
a8efe40d 2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69\f
70/* During reload_as_needed, element N contains a REG rtx for the hard reg
71 into which pseudo reg N has been reloaded (perhaps for a previous insn). */
72static rtx *reg_last_reload_reg;
73
74/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
75 for an output reload that stores into reg N. */
76static char *reg_has_output_reload;
77
78/* Indicates which hard regs are reload-registers for an output reload
79 in the current insn. */
80static HARD_REG_SET reg_is_output_reload;
81
82/* Element N is the constant value to which pseudo reg N is equivalent,
83 or zero if pseudo reg N is not equivalent to a constant.
84 find_reloads looks at this in order to replace pseudo reg N
85 with the constant it stands for. */
86rtx *reg_equiv_constant;
87
88/* Element N is a memory location to which pseudo reg N is equivalent,
89 prior to any register elimination (such as frame pointer to stack
90 pointer). Depending on whether or not it is a valid address, this value
91 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 92rtx *reg_equiv_memory_loc;
32131a9c
RK
93
94/* Element N is the address of stack slot to which pseudo reg N is equivalent.
95 This is used when the address is not valid as a memory address
96 (because its displacement is too big for the machine.) */
97rtx *reg_equiv_address;
98
99/* Element N is the memory slot to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a memory slot. */
101rtx *reg_equiv_mem;
102
103/* Widest width in which each pseudo reg is referred to (via subreg). */
104static int *reg_max_ref_width;
105
106/* Element N is the insn that initialized reg N from its equivalent
107 constant or memory slot. */
108static rtx *reg_equiv_init;
109
110/* During reload_as_needed, element N contains the last pseudo regno
111 reloaded into the Nth reload register. This vector is in parallel
112 with spill_regs. If that pseudo reg occupied more than one register,
113 reg_reloaded_contents points to that pseudo for each spill register in
114 use; all of these must remain set for an inheritance to occur. */
115static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
116
117/* During reload_as_needed, element N contains the insn for which
118 the Nth reload register was last used. This vector is in parallel
119 with spill_regs, and its contents are significant only when
120 reg_reloaded_contents is significant. */
121static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
122
123/* Number of spill-regs so far; number of valid elements of spill_regs. */
124static int n_spills;
125
126/* In parallel with spill_regs, contains REG rtx's for those regs.
127 Holds the last rtx used for any given reg, or 0 if it has never
128 been used for spilling yet. This rtx is reused, provided it has
129 the proper mode. */
130static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
131
132/* In parallel with spill_regs, contains nonzero for a spill reg
133 that was stored after the last time it was used.
134 The precise value is the insn generated to do the store. */
135static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
136
137/* This table is the inverse mapping of spill_regs:
138 indexed by hard reg number,
139 it contains the position of that reg in spill_regs,
140 or -1 for something that is not in spill_regs. */
141static short spill_reg_order[FIRST_PSEUDO_REGISTER];
142
143/* This reg set indicates registers that may not be used for retrying global
144 allocation. The registers that may not be used include all spill registers
145 and the frame pointer (if we are using one). */
146HARD_REG_SET forbidden_regs;
147
148/* This reg set indicates registers that are not good for spill registers.
149 They will not be used to complete groups of spill registers. This includes
150 all fixed registers, registers that may be eliminated, and registers
151 explicitly used in the rtl.
152
153 (spill_reg_order prevents these registers from being used to start a
154 group.) */
155static HARD_REG_SET bad_spill_regs;
156
157/* Describes order of use of registers for reloading
158 of spilled pseudo-registers. `spills' is the number of
159 elements that are actually valid; new ones are added at the end. */
160static short spill_regs[FIRST_PSEUDO_REGISTER];
161
162/* Describes order of preference for putting regs into spill_regs.
163 Contains the numbers of all the hard regs, in order most preferred first.
164 This order is different for each function.
165 It is set up by order_regs_for_reload.
166 Empty elements at the end contain -1. */
167static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
168
169/* 1 for a hard register that appears explicitly in the rtl
170 (for example, function value registers, special registers
171 used by insns, structure value pointer registers). */
172static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
173
174/* Indicates if a register was counted against the need for
175 groups. 0 means it can count against max_nongroup instead. */
176static HARD_REG_SET counted_for_groups;
177
178/* Indicates if a register was counted against the need for
179 non-groups. 0 means it can become part of a new group.
180 During choose_reload_regs, 1 here means don't use this reg
181 as part of a group, even if it seems to be otherwise ok. */
182static HARD_REG_SET counted_for_nongroups;
183
184/* Nonzero if indirect addressing is supported on the machine; this means
185 that spilling (REG n) does not require reloading it into a register in
186 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
187 value indicates the level of indirect addressing supported, e.g., two
188 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
189 a hard register. */
190
191static char spill_indirect_levels;
192
193/* Nonzero if indirect addressing is supported when the innermost MEM is
194 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
195 which these are valid is the same as spill_indirect_levels, above. */
196
197char indirect_symref_ok;
198
199/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
200
201char double_reg_address_ok;
202
203/* Record the stack slot for each spilled hard register. */
204
205static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
206
207/* Width allocated so far for that stack slot. */
208
209static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
210
211/* Indexed by register class and basic block number, nonzero if there is
212 any need for a spill register of that class in that basic block.
213 The pointer is 0 if we did stupid allocation and don't know
214 the structure of basic blocks. */
215
216char *basic_block_needs[N_REG_CLASSES];
217
218/* First uid used by insns created by reload in this function.
219 Used in find_equiv_reg. */
220int reload_first_uid;
221
222/* Flag set by local-alloc or global-alloc if anything is live in
223 a call-clobbered reg across calls. */
224
225int caller_save_needed;
226
227/* Set to 1 while reload_as_needed is operating.
228 Required by some machines to handle any generated moves differently. */
229
230int reload_in_progress = 0;
231
232/* These arrays record the insn_code of insns that may be needed to
233 perform input and output reloads of special objects. They provide a
234 place to pass a scratch register. */
235
236enum insn_code reload_in_optab[NUM_MACHINE_MODES];
237enum insn_code reload_out_optab[NUM_MACHINE_MODES];
238
d45cf215 239/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
240 The allocated storage can be freed once find_reloads has processed the
241 insn. */
242
243struct obstack reload_obstack;
244char *reload_firstobj;
245
246#define obstack_chunk_alloc xmalloc
247#define obstack_chunk_free free
248
32131a9c
RK
249/* List of labels that must never be deleted. */
250extern rtx forced_labels;
251\f
252/* This structure is used to record information about register eliminations.
253 Each array entry describes one possible way of eliminating a register
254 in favor of another. If there is more than one way of eliminating a
255 particular register, the most preferred should be specified first. */
256
257static struct elim_table
258{
259 int from; /* Register number to be eliminated. */
260 int to; /* Register number used as replacement. */
261 int initial_offset; /* Initial difference between values. */
262 int can_eliminate; /* Non-zero if this elimination can be done. */
263 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
264 insns made by reload. */
265 int offset; /* Current offset between the two regs. */
a8efe40d 266 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
267 int previous_offset; /* Offset at end of previous insn. */
268 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
269 rtx from_rtx; /* REG rtx for the register to be eliminated.
270 We cannot simply compare the number since
271 we might then spuriously replace a hard
272 register corresponding to a pseudo
273 assigned to the reg to be eliminated. */
274 rtx to_rtx; /* REG rtx for the replacement. */
275} reg_eliminate[] =
276
277/* If a set of eliminable registers was specified, define the table from it.
278 Otherwise, default to the normal case of the frame pointer being
279 replaced by the stack pointer. */
280
281#ifdef ELIMINABLE_REGS
282 ELIMINABLE_REGS;
283#else
284 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
285#endif
286
287#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
288
289/* Record the number of pending eliminations that have an offset not equal
290 to their initial offset. If non-zero, we use a new copy of each
291 replacement result in any insns encountered. */
292static int num_not_at_initial_offset;
293
294/* Count the number of registers that we may be able to eliminate. */
295static int num_eliminable;
296
297/* For each label, we record the offset of each elimination. If we reach
298 a label by more than one path and an offset differs, we cannot do the
299 elimination. This information is indexed by the number of the label.
300 The first table is an array of flags that records whether we have yet
301 encountered a label and the second table is an array of arrays, one
302 entry in the latter array for each elimination. */
303
304static char *offsets_known_at;
305static int (*offsets_at)[NUM_ELIMINABLE_REGS];
306
307/* Number of labels in the current function. */
308
309static int num_labels;
310\f
311void mark_home_live ();
312static void count_possible_groups ();
313static int possible_group_p ();
314static void scan_paradoxical_subregs ();
315static void reload_as_needed ();
316static int modes_equiv_for_class_p ();
317static void alter_reg ();
318static void delete_dead_insn ();
5352b11a 319static void spill_failure ();
32131a9c
RK
320static int new_spill_reg();
321static void set_label_offsets ();
322static int eliminate_regs_in_insn ();
323static void mark_not_eliminable ();
324static int spill_hard_reg ();
325static void choose_reload_regs ();
326static void emit_reload_insns ();
327static void delete_output_reload ();
328static void forget_old_reloads_1 ();
329static void order_regs_for_reload ();
330static rtx inc_for_reload ();
331static int constraint_accepts_reg_p ();
332static int count_occurrences ();
333
334extern void remove_death ();
335extern rtx adj_offsettable_operand ();
336extern rtx form_sum ();
337\f
338void
339init_reload ()
340{
341 register int i;
342
343 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
344 Set spill_indirect_levels to the number of levels such addressing is
345 permitted, zero if it is not permitted at all. */
346
347 register rtx tem
348 = gen_rtx (MEM, Pmode,
349 gen_rtx (PLUS, Pmode,
350 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 351 GEN_INT (4)));
32131a9c
RK
352 spill_indirect_levels = 0;
353
354 while (memory_address_p (QImode, tem))
355 {
356 spill_indirect_levels++;
357 tem = gen_rtx (MEM, Pmode, tem);
358 }
359
360 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
361
362 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
363 indirect_symref_ok = memory_address_p (QImode, tem);
364
365 /* See if reg+reg is a valid (and offsettable) address. */
366
65701fd2 367 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
368 {
369 tem = gen_rtx (PLUS, Pmode,
370 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
371 gen_rtx (REG, Pmode, i));
372 /* This way, we make sure that reg+reg is an offsettable address. */
373 tem = plus_constant (tem, 4);
374
375 if (memory_address_p (QImode, tem))
376 {
377 double_reg_address_ok = 1;
378 break;
379 }
380 }
32131a9c
RK
381
382 /* Initialize obstack for our rtl allocation. */
383 gcc_obstack_init (&reload_obstack);
384 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
385
386#ifdef HAVE_SECONDARY_RELOADS
387
388 /* Initialize the optabs for doing special input and output reloads. */
389
390 for (i = 0; i < NUM_MACHINE_MODES; i++)
391 reload_in_optab[i] = reload_out_optab[i] = CODE_FOR_nothing;
392
393#ifdef HAVE_reload_inqi
394 if (HAVE_reload_inqi)
395 reload_in_optab[(int) QImode] = CODE_FOR_reload_inqi;
396#endif
397#ifdef HAVE_reload_inhi
398 if (HAVE_reload_inhi)
399 reload_in_optab[(int) HImode] = CODE_FOR_reload_inhi;
400#endif
401#ifdef HAVE_reload_insi
402 if (HAVE_reload_insi)
403 reload_in_optab[(int) SImode] = CODE_FOR_reload_insi;
404#endif
405#ifdef HAVE_reload_indi
406 if (HAVE_reload_indi)
407 reload_in_optab[(int) DImode] = CODE_FOR_reload_indi;
408#endif
409#ifdef HAVE_reload_inti
410 if (HAVE_reload_inti)
411 reload_in_optab[(int) TImode] = CODE_FOR_reload_inti;
412#endif
413#ifdef HAVE_reload_insf
414 if (HAVE_reload_insf)
415 reload_in_optab[(int) SFmode] = CODE_FOR_reload_insf;
416#endif
417#ifdef HAVE_reload_indf
418 if (HAVE_reload_indf)
419 reload_in_optab[(int) DFmode] = CODE_FOR_reload_indf;
420#endif
421#ifdef HAVE_reload_inxf
422 if (HAVE_reload_inxf)
423 reload_in_optab[(int) XFmode] = CODE_FOR_reload_inxf;
424#endif
425#ifdef HAVE_reload_intf
426 if (HAVE_reload_intf)
427 reload_in_optab[(int) TFmode] = CODE_FOR_reload_intf;
428#endif
429
430#ifdef HAVE_reload_outqi
431 if (HAVE_reload_outqi)
432 reload_out_optab[(int) QImode] = CODE_FOR_reload_outqi;
433#endif
434#ifdef HAVE_reload_outhi
435 if (HAVE_reload_outhi)
436 reload_out_optab[(int) HImode] = CODE_FOR_reload_outhi;
437#endif
438#ifdef HAVE_reload_outsi
439 if (HAVE_reload_outsi)
440 reload_out_optab[(int) SImode] = CODE_FOR_reload_outsi;
441#endif
442#ifdef HAVE_reload_outdi
443 if (HAVE_reload_outdi)
444 reload_out_optab[(int) DImode] = CODE_FOR_reload_outdi;
445#endif
446#ifdef HAVE_reload_outti
447 if (HAVE_reload_outti)
448 reload_out_optab[(int) TImode] = CODE_FOR_reload_outti;
449#endif
450#ifdef HAVE_reload_outsf
451 if (HAVE_reload_outsf)
452 reload_out_optab[(int) SFmode] = CODE_FOR_reload_outsf;
453#endif
454#ifdef HAVE_reload_outdf
455 if (HAVE_reload_outdf)
456 reload_out_optab[(int) DFmode] = CODE_FOR_reload_outdf;
457#endif
458#ifdef HAVE_reload_outxf
459 if (HAVE_reload_outxf)
460 reload_out_optab[(int) XFmode] = CODE_FOR_reload_outxf;
461#endif
462#ifdef HAVE_reload_outtf
463 if (HAVE_reload_outtf)
464 reload_out_optab[(int) TFmode] = CODE_FOR_reload_outtf;
465#endif
466
467#endif /* HAVE_SECONDARY_RELOADS */
468
469}
470
471/* Main entry point for the reload pass, and only entry point
472 in this file.
473
474 FIRST is the first insn of the function being compiled.
475
476 GLOBAL nonzero means we were called from global_alloc
477 and should attempt to reallocate any pseudoregs that we
478 displace from hard regs we will use for reloads.
479 If GLOBAL is zero, we do not have enough information to do that,
480 so any pseudo reg that is spilled must go to the stack.
481
482 DUMPFILE is the global-reg debugging dump file stream, or 0.
483 If it is nonzero, messages are written to it to describe
484 which registers are seized as reload regs, which pseudo regs
5352b11a 485 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 486
5352b11a
RS
487 Return value is nonzero if reload failed
488 and we must not do any more for this function. */
489
490int
32131a9c
RK
491reload (first, global, dumpfile)
492 rtx first;
493 int global;
494 FILE *dumpfile;
495{
496 register int class;
497 register int i;
498 register rtx insn;
499 register struct elim_table *ep;
500
501 int something_changed;
502 int something_needs_reloads;
503 int something_needs_elimination;
504 int new_basic_block_needs;
a8efe40d
RK
505 enum reg_class caller_save_spill_class = NO_REGS;
506 int caller_save_group_size = 1;
32131a9c 507
5352b11a
RS
508 /* Nonzero means we couldn't get enough spill regs. */
509 int failure = 0;
510
32131a9c
RK
511 /* The basic block number currently being processed for INSN. */
512 int this_block;
513
514 /* Make sure even insns with volatile mem refs are recognizable. */
515 init_recog ();
516
517 /* Enable find_equiv_reg to distinguish insns made by reload. */
518 reload_first_uid = get_max_uid ();
519
520 for (i = 0; i < N_REG_CLASSES; i++)
521 basic_block_needs[i] = 0;
522
0dadecf6
RK
523#ifdef SECONDARY_MEMORY_NEEDED
524 /* Initialize the secondary memory table. */
525 clear_secondary_mem ();
526#endif
527
32131a9c
RK
528 /* Remember which hard regs appear explicitly
529 before we merge into `regs_ever_live' the ones in which
530 pseudo regs have been allocated. */
531 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
532
533 /* We don't have a stack slot for any spill reg yet. */
534 bzero (spill_stack_slot, sizeof spill_stack_slot);
535 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
536
a8efe40d
RK
537 /* Initialize the save area information for caller-save, in case some
538 are needed. */
539 init_save_areas ();
a8fdc208 540
32131a9c
RK
541 /* Compute which hard registers are now in use
542 as homes for pseudo registers.
543 This is done here rather than (eg) in global_alloc
544 because this point is reached even if not optimizing. */
545
546 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
547 mark_home_live (i);
548
549 /* Make sure that the last insn in the chain
550 is not something that needs reloading. */
fb3821f7 551 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
552
553 /* Find all the pseudo registers that didn't get hard regs
554 but do have known equivalent constants or memory slots.
555 These include parameters (known equivalent to parameter slots)
556 and cse'd or loop-moved constant memory addresses.
557
558 Record constant equivalents in reg_equiv_constant
559 so they will be substituted by find_reloads.
560 Record memory equivalents in reg_mem_equiv so they can
561 be substituted eventually by altering the REG-rtx's. */
562
563 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
564 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
565 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
566 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
567 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
568 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
569 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
570 bzero (reg_equiv_init, max_regno * sizeof (rtx));
571 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
572 bzero (reg_equiv_address, max_regno * sizeof (rtx));
573 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
574 bzero (reg_max_ref_width, max_regno * sizeof (int));
575
576 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
577 Also find all paradoxical subregs
578 and find largest such for each pseudo. */
579
580 for (insn = first; insn; insn = NEXT_INSN (insn))
581 {
582 rtx set = single_set (insn);
583
584 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
585 {
fb3821f7 586 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
587 if (note
588#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 589 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
590 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
591#endif
592 )
32131a9c
RK
593 {
594 rtx x = XEXP (note, 0);
595 i = REGNO (SET_DEST (set));
596 if (i > LAST_VIRTUAL_REGISTER)
597 {
598 if (GET_CODE (x) == MEM)
599 reg_equiv_memory_loc[i] = x;
600 else if (CONSTANT_P (x))
601 {
602 if (LEGITIMATE_CONSTANT_P (x))
603 reg_equiv_constant[i] = x;
604 else
605 reg_equiv_memory_loc[i]
d445b551 606 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
607 }
608 else
609 continue;
610
611 /* If this register is being made equivalent to a MEM
612 and the MEM is not SET_SRC, the equivalencing insn
613 is one with the MEM as a SET_DEST and it occurs later.
614 So don't mark this insn now. */
615 if (GET_CODE (x) != MEM
616 || rtx_equal_p (SET_SRC (set), x))
617 reg_equiv_init[i] = insn;
618 }
619 }
620 }
621
622 /* If this insn is setting a MEM from a register equivalent to it,
623 this is the equivalencing insn. */
624 else if (set && GET_CODE (SET_DEST (set)) == MEM
625 && GET_CODE (SET_SRC (set)) == REG
626 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
627 && rtx_equal_p (SET_DEST (set),
628 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
629 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
630
631 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
632 scan_paradoxical_subregs (PATTERN (insn));
633 }
634
635 /* Does this function require a frame pointer? */
636
637 frame_pointer_needed = (! flag_omit_frame_pointer
638#ifdef EXIT_IGNORE_STACK
639 /* ?? If EXIT_IGNORE_STACK is set, we will not save
640 and restore sp for alloca. So we can't eliminate
641 the frame pointer in that case. At some point,
642 we should improve this by emitting the
643 sp-adjusting insns for this case. */
644 || (current_function_calls_alloca
645 && EXIT_IGNORE_STACK)
646#endif
647 || FRAME_POINTER_REQUIRED);
648
649 num_eliminable = 0;
650
651 /* Initialize the table of registers to eliminate. The way we do this
652 depends on how the eliminable registers were defined. */
653#ifdef ELIMINABLE_REGS
654 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
655 {
656 ep->can_eliminate = ep->can_eliminate_previous
657 = (CAN_ELIMINATE (ep->from, ep->to)
658 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
659 }
660#else
661 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
662 = ! frame_pointer_needed;
663#endif
664
665 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 666 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
667 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
668 We depend on this. */
669 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
670 {
671 num_eliminable += ep->can_eliminate;
672 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
673 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
674 }
675
676 num_labels = max_label_num () - get_first_label_num ();
677
678 /* Allocate the tables used to store offset information at labels. */
679 offsets_known_at = (char *) alloca (num_labels);
680 offsets_at
681 = (int (*)[NUM_ELIMINABLE_REGS])
682 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
683
684 offsets_known_at -= get_first_label_num ();
685 offsets_at -= get_first_label_num ();
686
687 /* Alter each pseudo-reg rtx to contain its hard reg number.
688 Assign stack slots to the pseudos that lack hard regs or equivalents.
689 Do not touch virtual registers. */
690
691 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
692 alter_reg (i, -1);
693
694 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
695 because the stack size may be a part of the offset computation for
696 register elimination. */
697 assign_stack_local (BLKmode, 0, 0);
698
699 /* If we have some registers we think can be eliminated, scan all insns to
700 see if there is an insn that sets one of these registers to something
701 other than itself plus a constant. If so, the register cannot be
702 eliminated. Doing this scan here eliminates an extra pass through the
703 main reload loop in the most common case where register elimination
704 cannot be done. */
705 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
706 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
707 || GET_CODE (insn) == CALL_INSN)
708 note_stores (PATTERN (insn), mark_not_eliminable);
709
710#ifndef REGISTER_CONSTRAINTS
711 /* If all the pseudo regs have hard regs,
712 except for those that are never referenced,
713 we know that no reloads are needed. */
714 /* But that is not true if there are register constraints, since
715 in that case some pseudos might be in the wrong kind of hard reg. */
716
717 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
718 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
719 break;
720
b8093d02 721 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
722 return;
723#endif
724
725 /* Compute the order of preference for hard registers to spill.
726 Store them by decreasing preference in potential_reload_regs. */
727
728 order_regs_for_reload ();
729
730 /* So far, no hard regs have been spilled. */
731 n_spills = 0;
732 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
733 spill_reg_order[i] = -1;
734
735 /* On most machines, we can't use any register explicitly used in the
736 rtl as a spill register. But on some, we have to. Those will have
737 taken care to keep the life of hard regs as short as possible. */
738
739#ifdef SMALL_REGISTER_CLASSES
740 CLEAR_HARD_REG_SET (forbidden_regs);
741#else
742 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
743#endif
744
745 /* Spill any hard regs that we know we can't eliminate. */
746 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
747 if (! ep->can_eliminate)
748 {
749 spill_hard_reg (ep->from, global, dumpfile, 1);
750 regs_ever_live[ep->from] = 1;
751 }
752
753 if (global)
754 for (i = 0; i < N_REG_CLASSES; i++)
755 {
756 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
757 bzero (basic_block_needs[i], n_basic_blocks);
758 }
759
b2f15f94
RK
760 /* From now on, we need to emit any moves without making new pseudos. */
761 reload_in_progress = 1;
762
32131a9c
RK
763 /* This loop scans the entire function each go-round
764 and repeats until one repetition spills no additional hard regs. */
765
d45cf215 766 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
767 to require another pass. Note that getting an additional reload
768 reg does not necessarily imply any pseudo reg was spilled;
769 sometimes we find a reload reg that no pseudo reg was allocated in. */
770 something_changed = 1;
771 /* This flag is set if there are any insns that require reloading. */
772 something_needs_reloads = 0;
773 /* This flag is set if there are any insns that require register
774 eliminations. */
775 something_needs_elimination = 0;
776 while (something_changed)
777 {
778 rtx after_call = 0;
779
780 /* For each class, number of reload regs needed in that class.
781 This is the maximum over all insns of the needs in that class
782 of the individual insn. */
783 int max_needs[N_REG_CLASSES];
784 /* For each class, size of group of consecutive regs
785 that is needed for the reloads of this class. */
786 int group_size[N_REG_CLASSES];
787 /* For each class, max number of consecutive groups needed.
788 (Each group contains group_size[CLASS] consecutive registers.) */
789 int max_groups[N_REG_CLASSES];
790 /* For each class, max number needed of regs that don't belong
791 to any of the groups. */
792 int max_nongroups[N_REG_CLASSES];
793 /* For each class, the machine mode which requires consecutive
794 groups of regs of that class.
795 If two different modes ever require groups of one class,
796 they must be the same size and equally restrictive for that class,
797 otherwise we can't handle the complexity. */
798 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
799 /* Record the insn where each maximum need is first found. */
800 rtx max_needs_insn[N_REG_CLASSES];
801 rtx max_groups_insn[N_REG_CLASSES];
802 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 803 rtx x;
0dadecf6 804 int starting_frame_size = get_frame_size ();
e404a39a 805 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
806
807 something_changed = 0;
808 bzero (max_needs, sizeof max_needs);
809 bzero (max_groups, sizeof max_groups);
810 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
811 bzero (max_needs_insn, sizeof max_needs_insn);
812 bzero (max_groups_insn, sizeof max_groups_insn);
813 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
814 bzero (group_size, sizeof group_size);
815 for (i = 0; i < N_REG_CLASSES; i++)
816 group_mode[i] = VOIDmode;
817
818 /* Keep track of which basic blocks are needing the reloads. */
819 this_block = 0;
820
821 /* Remember whether any element of basic_block_needs
822 changes from 0 to 1 in this pass. */
823 new_basic_block_needs = 0;
824
825 /* Reset all offsets on eliminable registers to their initial values. */
826#ifdef ELIMINABLE_REGS
827 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
828 {
829 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
830 ep->previous_offset = ep->offset
831 = ep->max_offset = ep->initial_offset;
32131a9c
RK
832 }
833#else
834#ifdef INITIAL_FRAME_POINTER_OFFSET
835 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
836#else
837 if (!FRAME_POINTER_REQUIRED)
838 abort ();
839 reg_eliminate[0].initial_offset = 0;
840#endif
a8efe40d 841 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
842 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
843#endif
844
845 num_not_at_initial_offset = 0;
846
847 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
848
849 /* Set a known offset for each forced label to be at the initial offset
850 of each elimination. We do this because we assume that all
851 computed jumps occur from a location where each elimination is
852 at its initial offset. */
853
854 for (x = forced_labels; x; x = XEXP (x, 1))
855 if (XEXP (x, 0))
fb3821f7 856 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
857
858 /* For each pseudo register that has an equivalent location defined,
859 try to eliminate any eliminable registers (such as the frame pointer)
860 assuming initial offsets for the replacement register, which
861 is the normal case.
862
863 If the resulting location is directly addressable, substitute
864 the MEM we just got directly for the old REG.
865
866 If it is not addressable but is a constant or the sum of a hard reg
867 and constant, it is probably not addressable because the constant is
868 out of range, in that case record the address; we will generate
869 hairy code to compute the address in a register each time it is
a8fdc208 870 needed.
32131a9c
RK
871
872 If the location is not addressable, but does not have one of the
873 above forms, assign a stack slot. We have to do this to avoid the
874 potential of producing lots of reloads if, e.g., a location involves
875 a pseudo that didn't get a hard register and has an equivalent memory
876 location that also involves a pseudo that didn't get a hard register.
877
878 Perhaps at some point we will improve reload_when_needed handling
879 so this problem goes away. But that's very hairy. */
880
881 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
882 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
883 {
fb3821f7 884 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
885
886 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
887 XEXP (x, 0)))
888 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
889 else if (CONSTANT_P (XEXP (x, 0))
890 || (GET_CODE (XEXP (x, 0)) == PLUS
891 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
892 && (REGNO (XEXP (XEXP (x, 0), 0))
893 < FIRST_PSEUDO_REGISTER)
894 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
895 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
896 else
897 {
898 /* Make a new stack slot. Then indicate that something
a8fdc208 899 changed so we go back and recompute offsets for
32131a9c
RK
900 eliminable registers because the allocation of memory
901 below might change some offset. reg_equiv_{mem,address}
902 will be set up for this pseudo on the next pass around
903 the loop. */
904 reg_equiv_memory_loc[i] = 0;
905 reg_equiv_init[i] = 0;
906 alter_reg (i, -1);
907 something_changed = 1;
908 }
909 }
a8fdc208 910
d45cf215 911 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
912 bookkeeping. */
913 if (something_changed)
914 continue;
915
a8efe40d
RK
916 /* If caller-saves needs a group, initialize the group to include
917 the size and mode required for caller-saves. */
918
919 if (caller_save_group_size > 1)
920 {
921 group_mode[(int) caller_save_spill_class] = Pmode;
922 group_size[(int) caller_save_spill_class] = caller_save_group_size;
923 }
924
32131a9c
RK
925 /* Compute the most additional registers needed by any instruction.
926 Collect information separately for each class of regs. */
927
928 for (insn = first; insn; insn = NEXT_INSN (insn))
929 {
930 if (global && this_block + 1 < n_basic_blocks
931 && insn == basic_block_head[this_block+1])
932 ++this_block;
933
934 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
935 might include REG_LABEL), we need to see what effects this
936 has on the known offsets at labels. */
937
938 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
939 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
940 && REG_NOTES (insn) != 0))
941 set_label_offsets (insn, insn, 0);
942
943 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
944 {
945 /* Nonzero means don't use a reload reg that overlaps
946 the place where a function value can be returned. */
947 rtx avoid_return_reg = 0;
948
949 rtx old_body = PATTERN (insn);
950 int old_code = INSN_CODE (insn);
951 rtx old_notes = REG_NOTES (insn);
952 int did_elimination = 0;
953
954 /* Initially, count RELOAD_OTHER reloads.
955 Later, merge in the other kinds. */
956 int insn_needs[N_REG_CLASSES];
957 int insn_groups[N_REG_CLASSES];
958 int insn_total_groups = 0;
959
960 /* Count RELOAD_FOR_INPUT_RELOAD_ADDRESS reloads. */
961 int insn_needs_for_inputs[N_REG_CLASSES];
962 int insn_groups_for_inputs[N_REG_CLASSES];
963 int insn_total_groups_for_inputs = 0;
964
965 /* Count RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reloads. */
966 int insn_needs_for_outputs[N_REG_CLASSES];
967 int insn_groups_for_outputs[N_REG_CLASSES];
968 int insn_total_groups_for_outputs = 0;
969
970 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
971 int insn_needs_for_operands[N_REG_CLASSES];
972 int insn_groups_for_operands[N_REG_CLASSES];
973 int insn_total_groups_for_operands = 0;
974
32131a9c
RK
975#if 0 /* This wouldn't work nowadays, since optimize_bit_field
976 looks for non-strict memory addresses. */
977 /* Optimization: a bit-field instruction whose field
978 happens to be a byte or halfword in memory
979 can be changed to a move instruction. */
980
981 if (GET_CODE (PATTERN (insn)) == SET)
982 {
983 rtx dest = SET_DEST (PATTERN (insn));
984 rtx src = SET_SRC (PATTERN (insn));
985
986 if (GET_CODE (dest) == ZERO_EXTRACT
987 || GET_CODE (dest) == SIGN_EXTRACT)
988 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
989 if (GET_CODE (src) == ZERO_EXTRACT
990 || GET_CODE (src) == SIGN_EXTRACT)
991 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
992 }
993#endif
994
995 /* If needed, eliminate any eliminable registers. */
996 if (num_eliminable)
997 did_elimination = eliminate_regs_in_insn (insn, 0);
998
999#ifdef SMALL_REGISTER_CLASSES
1000 /* Set avoid_return_reg if this is an insn
1001 that might use the value of a function call. */
1002 if (GET_CODE (insn) == CALL_INSN)
1003 {
1004 if (GET_CODE (PATTERN (insn)) == SET)
1005 after_call = SET_DEST (PATTERN (insn));
1006 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1007 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1008 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1009 else
1010 after_call = 0;
1011 }
1012 else if (after_call != 0
1013 && !(GET_CODE (PATTERN (insn)) == SET
1014 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1015 {
1016 if (reg_mentioned_p (after_call, PATTERN (insn)))
1017 avoid_return_reg = after_call;
1018 after_call = 0;
1019 }
1020#endif /* SMALL_REGISTER_CLASSES */
1021
1022 /* Analyze the instruction. */
1023 find_reloads (insn, 0, spill_indirect_levels, global,
1024 spill_reg_order);
1025
1026 /* Remember for later shortcuts which insns had any reloads or
1027 register eliminations.
1028
1029 One might think that it would be worthwhile to mark insns
1030 that need register replacements but not reloads, but this is
1031 not safe because find_reloads may do some manipulation of
1032 the insn (such as swapping commutative operands), which would
1033 be lost when we restore the old pattern after register
1034 replacement. So the actions of find_reloads must be redone in
1035 subsequent passes or in reload_as_needed.
1036
1037 However, it is safe to mark insns that need reloads
1038 but not register replacement. */
1039
1040 PUT_MODE (insn, (did_elimination ? QImode
1041 : n_reloads ? HImode
1042 : VOIDmode));
1043
1044 /* Discard any register replacements done. */
1045 if (did_elimination)
1046 {
1047 obstack_free (&reload_obstack, reload_firstobj);
1048 PATTERN (insn) = old_body;
1049 INSN_CODE (insn) = old_code;
1050 REG_NOTES (insn) = old_notes;
1051 something_needs_elimination = 1;
1052 }
1053
a8efe40d 1054 /* If this insn has no reloads, we need not do anything except
a8fdc208 1055 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1056 caller-save needs reloads. */
1057
1058 if (n_reloads == 0
1059 && ! (GET_CODE (insn) == CALL_INSN
1060 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1061 continue;
1062
1063 something_needs_reloads = 1;
1064
a8efe40d
RK
1065 for (i = 0; i < N_REG_CLASSES; i++)
1066 {
1067 insn_needs[i] = 0, insn_groups[i] = 0;
1068 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1069 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1070 insn_needs_for_operands[i] = 0, insn_groups_for_operands[i] = 0;
1071 }
1072
32131a9c
RK
1073 /* Count each reload once in every class
1074 containing the reload's own class. */
1075
1076 for (i = 0; i < n_reloads; i++)
1077 {
1078 register enum reg_class *p;
e85ddd99 1079 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1080 int size;
1081 enum machine_mode mode;
1082 int *this_groups;
1083 int *this_needs;
1084 int *this_total_groups;
1085
1086 /* Don't count the dummy reloads, for which one of the
1087 regs mentioned in the insn can be used for reloading.
1088 Don't count optional reloads.
1089 Don't count reloads that got combined with others. */
1090 if (reload_reg_rtx[i] != 0
1091 || reload_optional[i] != 0
1092 || (reload_out[i] == 0 && reload_in[i] == 0
1093 && ! reload_secondary_p[i]))
1094 continue;
1095
e85ddd99
RK
1096 /* Show that a reload register of this class is needed
1097 in this basic block. We do not use insn_needs and
1098 insn_groups because they are overly conservative for
1099 this purpose. */
1100 if (global && ! basic_block_needs[(int) class][this_block])
1101 {
1102 basic_block_needs[(int) class][this_block] = 1;
1103 new_basic_block_needs = 1;
1104 }
1105
32131a9c
RK
1106 /* Decide which time-of-use to count this reload for. */
1107 switch (reload_when_needed[i])
1108 {
1109 case RELOAD_OTHER:
1110 case RELOAD_FOR_OUTPUT:
1111 case RELOAD_FOR_INPUT:
1112 this_needs = insn_needs;
1113 this_groups = insn_groups;
1114 this_total_groups = &insn_total_groups;
1115 break;
1116
1117 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
1118 this_needs = insn_needs_for_inputs;
1119 this_groups = insn_groups_for_inputs;
1120 this_total_groups = &insn_total_groups_for_inputs;
1121 break;
1122
1123 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
1124 this_needs = insn_needs_for_outputs;
1125 this_groups = insn_groups_for_outputs;
1126 this_total_groups = &insn_total_groups_for_outputs;
1127 break;
1128
1129 case RELOAD_FOR_OPERAND_ADDRESS:
1130 this_needs = insn_needs_for_operands;
1131 this_groups = insn_groups_for_operands;
1132 this_total_groups = &insn_total_groups_for_operands;
1133 break;
1134 }
1135
1136 mode = reload_inmode[i];
1137 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1138 mode = reload_outmode[i];
e85ddd99 1139 size = CLASS_MAX_NREGS (class, mode);
32131a9c
RK
1140 if (size > 1)
1141 {
1142 enum machine_mode other_mode, allocate_mode;
1143
1144 /* Count number of groups needed separately from
1145 number of individual regs needed. */
e85ddd99
RK
1146 this_groups[(int) class]++;
1147 p = reg_class_superclasses[(int) class];
32131a9c
RK
1148 while (*p != LIM_REG_CLASSES)
1149 this_groups[(int) *p++]++;
1150 (*this_total_groups)++;
1151
1152 /* Record size and mode of a group of this class. */
1153 /* If more than one size group is needed,
1154 make all groups the largest needed size. */
e85ddd99 1155 if (group_size[(int) class] < size)
32131a9c 1156 {
e85ddd99 1157 other_mode = group_mode[(int) class];
32131a9c
RK
1158 allocate_mode = mode;
1159
e85ddd99
RK
1160 group_size[(int) class] = size;
1161 group_mode[(int) class] = mode;
32131a9c
RK
1162 }
1163 else
1164 {
1165 other_mode = mode;
e85ddd99 1166 allocate_mode = group_mode[(int) class];
32131a9c
RK
1167 }
1168
1169 /* Crash if two dissimilar machine modes both need
1170 groups of consecutive regs of the same class. */
1171
1172 if (other_mode != VOIDmode
1173 && other_mode != allocate_mode
1174 && ! modes_equiv_for_class_p (allocate_mode,
1175 other_mode,
e85ddd99 1176 class))
32131a9c
RK
1177 abort ();
1178 }
1179 else if (size == 1)
1180 {
e85ddd99
RK
1181 this_needs[(int) class] += 1;
1182 p = reg_class_superclasses[(int) class];
32131a9c
RK
1183 while (*p != LIM_REG_CLASSES)
1184 this_needs[(int) *p++] += 1;
1185 }
1186 else
1187 abort ();
1188 }
1189
1190 /* All reloads have been counted for this insn;
1191 now merge the various times of use.
1192 This sets insn_needs, etc., to the maximum total number
1193 of registers needed at any point in this insn. */
1194
1195 for (i = 0; i < N_REG_CLASSES; i++)
1196 {
1197 int this_max;
1198 this_max = insn_needs_for_inputs[i];
1199 if (insn_needs_for_outputs[i] > this_max)
1200 this_max = insn_needs_for_outputs[i];
1201 if (insn_needs_for_operands[i] > this_max)
1202 this_max = insn_needs_for_operands[i];
1203 insn_needs[i] += this_max;
1204 this_max = insn_groups_for_inputs[i];
1205 if (insn_groups_for_outputs[i] > this_max)
1206 this_max = insn_groups_for_outputs[i];
1207 if (insn_groups_for_operands[i] > this_max)
1208 this_max = insn_groups_for_operands[i];
1209 insn_groups[i] += this_max;
32131a9c 1210 }
a8efe40d 1211
32131a9c
RK
1212 insn_total_groups += MAX (insn_total_groups_for_inputs,
1213 MAX (insn_total_groups_for_outputs,
1214 insn_total_groups_for_operands));
1215
a8efe40d
RK
1216 /* If this is a CALL_INSN and caller-saves will need
1217 a spill register, act as if the spill register is
1218 needed for this insn. However, the spill register
1219 can be used by any reload of this insn, so we only
1220 need do something if no need for that class has
a8fdc208 1221 been recorded.
a8efe40d
RK
1222
1223 The assumption that every CALL_INSN will trigger a
1224 caller-save is highly conservative, however, the number
1225 of cases where caller-saves will need a spill register but
1226 a block containing a CALL_INSN won't need a spill register
1227 of that class should be quite rare.
1228
1229 If a group is needed, the size and mode of the group will
d45cf215 1230 have been set up at the beginning of this loop. */
a8efe40d
RK
1231
1232 if (GET_CODE (insn) == CALL_INSN
1233 && caller_save_spill_class != NO_REGS)
1234 {
1235 int *caller_save_needs
1236 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1237
1238 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1239 {
1240 register enum reg_class *p
1241 = reg_class_superclasses[(int) caller_save_spill_class];
1242
1243 caller_save_needs[(int) caller_save_spill_class]++;
1244
1245 while (*p != LIM_REG_CLASSES)
0aaa6af8 1246 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1247 }
1248
1249 if (caller_save_group_size > 1)
1250 insn_total_groups = MAX (insn_total_groups, 1);
d1c1397e
RS
1251
1252
1253 /* Show that this basic block will need a register of
1254 this class. */
1255
1256 if (global
1257 && ! (basic_block_needs[(int) caller_save_spill_class]
1258 [this_block]))
1259 {
1260 basic_block_needs[(int) caller_save_spill_class]
1261 [this_block] = 1;
1262 new_basic_block_needs = 1;
1263 }
a8efe40d
RK
1264 }
1265
32131a9c
RK
1266#ifdef SMALL_REGISTER_CLASSES
1267 /* If this insn stores the value of a function call,
1268 and that value is in a register that has been spilled,
1269 and if the insn needs a reload in a class
1270 that might use that register as the reload register,
1271 then add add an extra need in that class.
1272 This makes sure we have a register available that does
1273 not overlap the return value. */
1274 if (avoid_return_reg)
1275 {
1276 int regno = REGNO (avoid_return_reg);
1277 int nregs
1278 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1279 int r;
1280 int inc_groups = 0;
1281 for (r = regno; r < regno + nregs; r++)
1282 if (spill_reg_order[r] >= 0)
1283 for (i = 0; i < N_REG_CLASSES; i++)
1284 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1285 {
1286 if (insn_needs[i] > 0)
1287 insn_needs[i]++;
1288 if (insn_groups[i] > 0
1289 && nregs > 1)
1290 inc_groups = 1;
1291 }
1292 if (inc_groups)
1293 insn_groups[i]++;
1294 }
1295#endif /* SMALL_REGISTER_CLASSES */
1296
1297 /* For each class, collect maximum need of any insn. */
1298
1299 for (i = 0; i < N_REG_CLASSES; i++)
1300 {
1301 if (max_needs[i] < insn_needs[i])
5352b11a
RS
1302 {
1303 max_needs[i] = insn_needs[i];
1304 max_needs_insn[i] = insn;
1305 }
32131a9c 1306 if (max_groups[i] < insn_groups[i])
5352b11a
RS
1307 {
1308 max_groups[i] = insn_groups[i];
1309 max_groups_insn[i] = insn;
1310 }
32131a9c
RK
1311 if (insn_total_groups > 0)
1312 if (max_nongroups[i] < insn_needs[i])
5352b11a
RS
1313 {
1314 max_nongroups[i] = insn_needs[i];
1315 max_nongroups_insn[i] = insn;
1316 }
32131a9c
RK
1317 }
1318 }
1319 /* Note that there is a continue statement above. */
1320 }
1321
0dadecf6
RK
1322 /* If we allocated any new memory locations, make another pass
1323 since it might have changed elimination offsets. */
1324 if (starting_frame_size != get_frame_size ())
1325 something_changed = 1;
1326
e404a39a
RK
1327 if (dumpfile)
1328 for (i = 0; i < N_REG_CLASSES; i++)
1329 {
1330 if (max_needs[i] > 0)
1331 fprintf (dumpfile,
1332 ";; Need %d reg%s of class %s (for insn %d).\n",
1333 max_needs[i], max_needs[i] == 1 ? "" : "s",
1334 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1335 if (max_nongroups[i] > 0)
1336 fprintf (dumpfile,
1337 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1338 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1339 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1340 if (max_groups[i] > 0)
1341 fprintf (dumpfile,
1342 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1343 max_groups[i], max_groups[i] == 1 ? "" : "s",
1344 mode_name[(int) group_mode[i]],
1345 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1346 }
1347
d445b551 1348 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1349 will need a spill register. */
32131a9c 1350
d445b551 1351 if (caller_save_needed
a8efe40d
RK
1352 && ! setup_save_areas (&something_changed)
1353 && caller_save_spill_class == NO_REGS)
32131a9c 1354 {
a8efe40d
RK
1355 /* The class we will need depends on whether the machine
1356 supports the sum of two registers for an address; see
1357 find_address_reloads for details. */
1358
a8fdc208 1359 caller_save_spill_class
a8efe40d
RK
1360 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1361 caller_save_group_size
1362 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1363 something_changed = 1;
32131a9c
RK
1364 }
1365
1366 /* Now deduct from the needs for the registers already
1367 available (already spilled). */
1368
1369 CLEAR_HARD_REG_SET (counted_for_groups);
1370 CLEAR_HARD_REG_SET (counted_for_nongroups);
1371
1372 /* First find all regs alone in their class
1373 and count them (if desired) for non-groups.
1374 We would be screwed if a group took the only reg in a class
d445b551 1375 for which a non-group reload is needed.
32131a9c
RK
1376 (Note there is still a bug; if a class has 2 regs,
1377 both could be stolen by groups and we would lose the same way.
1378 With luck, no machine will need a nongroup in a 2-reg class.) */
1379
1380 for (i = 0; i < n_spills; i++)
1381 {
1382 register enum reg_class *p;
1383 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1384
1385 if (reg_class_size[class] == 1 && max_nongroups[class] > 0)
1386 {
1387 max_needs[class]--;
1388 p = reg_class_superclasses[class];
1389 while (*p != LIM_REG_CLASSES)
1390 max_needs[(int) *p++]--;
1391
1392 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1393 max_nongroups[class]--;
1394 p = reg_class_superclasses[class];
1395 while (*p != LIM_REG_CLASSES)
1396 {
1397 if (max_nongroups[(int) *p] > 0)
1398 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1399 max_nongroups[(int) *p++]--;
1400 }
1401 }
1402 }
1403
1404 /* Now find all consecutive groups of spilled registers
1405 and mark each group off against the need for such groups.
1406 But don't count them against ordinary need, yet. */
1407
1408 count_possible_groups (group_size, group_mode, max_groups);
1409
1410 /* Now count all spill regs against the individual need,
a8fdc208 1411 This includes those counted above for groups,
32131a9c
RK
1412 but not those previously counted for nongroups.
1413
1414 Those that weren't counted_for_groups can also count against
1415 the not-in-group need. */
1416
1417 for (i = 0; i < n_spills; i++)
1418 {
1419 register enum reg_class *p;
1420 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1421
1422 /* Those counted at the beginning shouldn't be counted twice. */
1423 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
1424 {
1425 max_needs[class]--;
1426 p = reg_class_superclasses[class];
1427 while (*p != LIM_REG_CLASSES)
1428 max_needs[(int) *p++]--;
1429
1430 if (! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i]))
1431 {
1432 if (max_nongroups[class] > 0)
1433 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1434 max_nongroups[class]--;
1435 p = reg_class_superclasses[class];
1436 while (*p != LIM_REG_CLASSES)
1437 {
1438 if (max_nongroups[(int) *p] > 0)
1439 SET_HARD_REG_BIT (counted_for_nongroups,
1440 spill_regs[i]);
1441 max_nongroups[(int) *p++]--;
1442 }
1443 }
1444 }
1445 }
1446
5c23c401
RK
1447 /* See if anything that happened changes which eliminations are valid.
1448 For example, on the Sparc, whether or not the frame pointer can
1449 be eliminated can depend on what registers have been used. We need
1450 not check some conditions again (such as flag_omit_frame_pointer)
1451 since they can't have changed. */
1452
1453 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1454 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1455#ifdef ELIMINABLE_REGS
1456 || ! CAN_ELIMINATE (ep->from, ep->to)
1457#endif
1458 )
1459 ep->can_eliminate = 0;
1460
32131a9c
RK
1461 /* Look for the case where we have discovered that we can't replace
1462 register A with register B and that means that we will now be
1463 trying to replace register A with register C. This means we can
1464 no longer replace register C with register B and we need to disable
1465 such an elimination, if it exists. This occurs often with A == ap,
1466 B == sp, and C == fp. */
a8fdc208 1467
32131a9c
RK
1468 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1469 {
1470 struct elim_table *op;
1471 register int new_to = -1;
1472
1473 if (! ep->can_eliminate && ep->can_eliminate_previous)
1474 {
1475 /* Find the current elimination for ep->from, if there is a
1476 new one. */
1477 for (op = reg_eliminate;
1478 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1479 if (op->from == ep->from && op->can_eliminate)
1480 {
1481 new_to = op->to;
1482 break;
1483 }
1484
1485 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1486 disable it. */
1487 for (op = reg_eliminate;
1488 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1489 if (op->from == new_to && op->to == ep->to)
1490 op->can_eliminate = 0;
1491 }
1492 }
1493
1494 /* See if any registers that we thought we could eliminate the previous
1495 time are no longer eliminable. If so, something has changed and we
1496 must spill the register. Also, recompute the number of eliminable
1497 registers and see if the frame pointer is needed; it is if there is
1498 no elimination of the frame pointer that we can perform. */
1499
1500 frame_pointer_needed = 1;
1501 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1502 {
1503 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1504 frame_pointer_needed = 0;
1505
1506 if (! ep->can_eliminate && ep->can_eliminate_previous)
1507 {
1508 ep->can_eliminate_previous = 0;
1509 spill_hard_reg (ep->from, global, dumpfile, 1);
1510 regs_ever_live[ep->from] = 1;
1511 something_changed = 1;
1512 num_eliminable--;
1513 }
1514 }
1515
1516 /* If all needs are met, we win. */
1517
1518 for (i = 0; i < N_REG_CLASSES; i++)
1519 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1520 break;
1521 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1522 break;
1523
1524 /* Not all needs are met; must spill more hard regs. */
1525
1526 /* If any element of basic_block_needs changed from 0 to 1,
1527 re-spill all the regs already spilled. This may spill
1528 additional pseudos that didn't spill before. */
1529
1530 if (new_basic_block_needs)
1531 for (i = 0; i < n_spills; i++)
1532 something_changed
1533 |= spill_hard_reg (spill_regs[i], global, dumpfile, 0);
1534
1535 /* Now find more reload regs to satisfy the remaining need
1536 Do it by ascending class number, since otherwise a reg
1537 might be spilled for a big class and might fail to count
1538 for a smaller class even though it belongs to that class.
1539
1540 Count spilled regs in `spills', and add entries to
1541 `spill_regs' and `spill_reg_order'.
1542
1543 ??? Note there is a problem here.
1544 When there is a need for a group in a high-numbered class,
1545 and also need for non-group regs that come from a lower class,
1546 the non-group regs are chosen first. If there aren't many regs,
1547 they might leave no room for a group.
1548
1549 This was happening on the 386. To fix it, we added the code
1550 that calls possible_group_p, so that the lower class won't
1551 break up the last possible group.
1552
1553 Really fixing the problem would require changes above
1554 in counting the regs already spilled, and in choose_reload_regs.
1555 It might be hard to avoid introducing bugs there. */
1556
1557 for (class = 0; class < N_REG_CLASSES; class++)
1558 {
1559 /* First get the groups of registers.
1560 If we got single registers first, we might fragment
1561 possible groups. */
1562 while (max_groups[class] > 0)
1563 {
1564 /* If any single spilled regs happen to form groups,
1565 count them now. Maybe we don't really need
1566 to spill another group. */
1567 count_possible_groups (group_size, group_mode, max_groups);
1568
1569 /* Groups of size 2 (the only groups used on most machines)
1570 are treated specially. */
1571 if (group_size[class] == 2)
1572 {
1573 /* First, look for a register that will complete a group. */
1574 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1575 {
1576 int j = potential_reload_regs[i];
1577 int other;
1578 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1579 &&
1580 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1581 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1582 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1583 && HARD_REGNO_MODE_OK (other, group_mode[class])
1584 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1585 other)
1586 /* We don't want one part of another group.
1587 We could get "two groups" that overlap! */
1588 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1589 ||
1590 (j < FIRST_PSEUDO_REGISTER - 1
1591 && (other = j + 1, spill_reg_order[other] >= 0)
1592 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1593 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1594 && HARD_REGNO_MODE_OK (j, group_mode[class])
1595 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1596 other)
1597 && ! TEST_HARD_REG_BIT (counted_for_groups,
1598 other))))
1599 {
1600 register enum reg_class *p;
1601
1602 /* We have found one that will complete a group,
1603 so count off one group as provided. */
1604 max_groups[class]--;
1605 p = reg_class_superclasses[class];
1606 while (*p != LIM_REG_CLASSES)
1607 max_groups[(int) *p++]--;
1608
1609 /* Indicate both these regs are part of a group. */
1610 SET_HARD_REG_BIT (counted_for_groups, j);
1611 SET_HARD_REG_BIT (counted_for_groups, other);
1612 break;
1613 }
1614 }
1615 /* We can't complete a group, so start one. */
1616 if (i == FIRST_PSEUDO_REGISTER)
1617 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1618 {
1619 int j = potential_reload_regs[i];
1620 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1621 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1622 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1623 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1624 && HARD_REGNO_MODE_OK (j, group_mode[class])
1625 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1626 j + 1))
1627 break;
1628 }
1629
1630 /* I should be the index in potential_reload_regs
1631 of the new reload reg we have found. */
1632
5352b11a
RS
1633 if (i >= FIRST_PSEUDO_REGISTER)
1634 {
1635 /* There are no groups left to spill. */
1636 spill_failure (max_groups_insn[class]);
1637 failure = 1;
1638 goto failed;
1639 }
1640 else
1641 something_changed
fb3821f7 1642 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1643 global, dumpfile);
32131a9c
RK
1644 }
1645 else
1646 {
1647 /* For groups of more than 2 registers,
1648 look for a sufficient sequence of unspilled registers,
1649 and spill them all at once. */
1650 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1651 {
1652 int j = potential_reload_regs[i];
1653 int k;
1654 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1655 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1656 {
1657 /* Check each reg in the sequence. */
1658 for (k = 0; k < group_size[class]; k++)
1659 if (! (spill_reg_order[j + k] < 0
1660 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1661 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1662 break;
1663 /* We got a full sequence, so spill them all. */
1664 if (k == group_size[class])
1665 {
1666 register enum reg_class *p;
1667 for (k = 0; k < group_size[class]; k++)
1668 {
1669 int idx;
1670 SET_HARD_REG_BIT (counted_for_groups, j + k);
1671 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1672 if (potential_reload_regs[idx] == j + k)
1673 break;
5352b11a
RS
1674 if (i >= FIRST_PSEUDO_REGISTER)
1675 {
1676 /* There are no groups left. */
1677 spill_failure (max_groups_insn[class]);
1678 failure = 1;
1679 goto failed;
1680 }
1681 else
1682 something_changed
fb3821f7
CH
1683 |= new_spill_reg (idx, class,
1684 max_needs, NULL_PTR,
5352b11a 1685 global, dumpfile);
32131a9c
RK
1686 }
1687
1688 /* We have found one that will complete a group,
1689 so count off one group as provided. */
1690 max_groups[class]--;
1691 p = reg_class_superclasses[class];
1692 while (*p != LIM_REG_CLASSES)
1693 max_groups[(int) *p++]--;
1694
1695 break;
1696 }
1697 }
1698 }
fa52261e
RS
1699 /* We couldn't find any registers for this reload.
1700 Abort to avoid going into an infinite loop. */
1701 if (i == FIRST_PSEUDO_REGISTER)
1702 abort ();
32131a9c
RK
1703 }
1704 }
1705
1706 /* Now similarly satisfy all need for single registers. */
1707
1708 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1709 {
1710 /* Consider the potential reload regs that aren't
1711 yet in use as reload regs, in order of preference.
1712 Find the most preferred one that's in this class. */
1713
1714 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1715 if (potential_reload_regs[i] >= 0
1716 && TEST_HARD_REG_BIT (reg_class_contents[class],
1717 potential_reload_regs[i])
1718 /* If this reg will not be available for groups,
1719 pick one that does not foreclose possible groups.
1720 This is a kludge, and not very general,
1721 but it should be sufficient to make the 386 work,
1722 and the problem should not occur on machines with
1723 more registers. */
1724 && (max_nongroups[class] == 0
1725 || possible_group_p (potential_reload_regs[i], max_groups)))
1726 break;
1727
e404a39a
RK
1728 /* If we couldn't get a register, try to get one even if we
1729 might foreclose possible groups. This may cause problems
1730 later, but that's better than aborting now, since it is
1731 possible that we will, in fact, be able to form the needed
1732 group even with this allocation. */
1733
1734 if (i >= FIRST_PSEUDO_REGISTER
1735 && (asm_noperands (max_needs[class] > 0
1736 ? max_needs_insn[class]
1737 : max_nongroups_insn[class])
1738 < 0))
1739 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1740 if (potential_reload_regs[i] >= 0
1741 && TEST_HARD_REG_BIT (reg_class_contents[class],
1742 potential_reload_regs[i]))
1743 break;
1744
32131a9c
RK
1745 /* I should be the index in potential_reload_regs
1746 of the new reload reg we have found. */
1747
5352b11a
RS
1748 if (i >= FIRST_PSEUDO_REGISTER)
1749 {
1750 /* There are no possible registers left to spill. */
1751 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1752 : max_nongroups_insn[class]);
1753 failure = 1;
1754 goto failed;
1755 }
1756 else
1757 something_changed
1758 |= new_spill_reg (i, class, max_needs, max_nongroups,
1759 global, dumpfile);
32131a9c
RK
1760 }
1761 }
1762 }
1763
1764 /* If global-alloc was run, notify it of any register eliminations we have
1765 done. */
1766 if (global)
1767 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1768 if (ep->can_eliminate)
1769 mark_elimination (ep->from, ep->to);
1770
32131a9c 1771 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1772 around calls. Tell if what mode to use so that we will process
1773 those insns in reload_as_needed if we have to. */
32131a9c
RK
1774
1775 if (caller_save_needed)
a8efe40d
RK
1776 save_call_clobbered_regs (num_eliminable ? QImode
1777 : caller_save_spill_class != NO_REGS ? HImode
1778 : VOIDmode);
32131a9c
RK
1779
1780 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1781 If that insn didn't set the register (i.e., it copied the register to
1782 memory), just delete that insn instead of the equivalencing insn plus
1783 anything now dead. If we call delete_dead_insn on that insn, we may
1784 delete the insn that actually sets the register if the register die
1785 there and that is incorrect. */
1786
1787 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1788 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1789 && GET_CODE (reg_equiv_init[i]) != NOTE)
1790 {
1791 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1792 delete_dead_insn (reg_equiv_init[i]);
1793 else
1794 {
1795 PUT_CODE (reg_equiv_init[i], NOTE);
1796 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1797 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1798 }
1799 }
1800
1801 /* Use the reload registers where necessary
1802 by generating move instructions to move the must-be-register
1803 values into or out of the reload registers. */
1804
a8efe40d
RK
1805 if (something_needs_reloads || something_needs_elimination
1806 || (caller_save_needed && num_eliminable)
1807 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1808 reload_as_needed (first, global);
1809
1810 reload_in_progress = 0;
1811
5352b11a
RS
1812 /* Come here (with failure set nonzero) if we can't get enough spill regs
1813 and we decide not to abort about it. */
1814 failed:
1815
32131a9c
RK
1816 /* Now eliminate all pseudo regs by modifying them into
1817 their equivalent memory references.
1818 The REG-rtx's for the pseudos are modified in place,
1819 so all insns that used to refer to them now refer to memory.
1820
1821 For a reg that has a reg_equiv_address, all those insns
1822 were changed by reloading so that no insns refer to it any longer;
1823 but the DECL_RTL of a variable decl may refer to it,
1824 and if so this causes the debugging info to mention the variable. */
1825
1826 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1827 {
1828 rtx addr = 0;
ab1fd483 1829 int in_struct = 0;
32131a9c 1830 if (reg_equiv_mem[i])
ab1fd483
RS
1831 {
1832 addr = XEXP (reg_equiv_mem[i], 0);
1833 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1834 }
32131a9c
RK
1835 if (reg_equiv_address[i])
1836 addr = reg_equiv_address[i];
1837 if (addr)
1838 {
1839 if (reg_renumber[i] < 0)
1840 {
1841 rtx reg = regno_reg_rtx[i];
1842 XEXP (reg, 0) = addr;
1843 REG_USERVAR_P (reg) = 0;
ab1fd483 1844 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1845 PUT_CODE (reg, MEM);
1846 }
1847 else if (reg_equiv_mem[i])
1848 XEXP (reg_equiv_mem[i], 0) = addr;
1849 }
1850 }
1851
1852#ifdef PRESERVE_DEATH_INFO_REGNO_P
1853 /* Make a pass over all the insns and remove death notes for things that
1854 are no longer registers or no longer die in the insn (e.g., an input
1855 and output pseudo being tied). */
1856
1857 for (insn = first; insn; insn = NEXT_INSN (insn))
1858 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1859 {
1860 rtx note, next;
1861
1862 for (note = REG_NOTES (insn); note; note = next)
1863 {
1864 next = XEXP (note, 1);
1865 if (REG_NOTE_KIND (note) == REG_DEAD
1866 && (GET_CODE (XEXP (note, 0)) != REG
1867 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1868 remove_note (insn, note);
1869 }
1870 }
1871#endif
1872
1873 /* Indicate that we no longer have known memory locations or constants. */
1874 reg_equiv_constant = 0;
1875 reg_equiv_memory_loc = 0;
5352b11a
RS
1876
1877 return failure;
32131a9c
RK
1878}
1879\f
1880/* Nonzero if, after spilling reg REGNO for non-groups,
1881 it will still be possible to find a group if we still need one. */
1882
1883static int
1884possible_group_p (regno, max_groups)
1885 int regno;
1886 int *max_groups;
1887{
1888 int i;
1889 int class = (int) NO_REGS;
1890
1891 for (i = 0; i < (int) N_REG_CLASSES; i++)
1892 if (max_groups[i] > 0)
1893 {
1894 class = i;
1895 break;
1896 }
1897
1898 if (class == (int) NO_REGS)
1899 return 1;
1900
1901 /* Consider each pair of consecutive registers. */
1902 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
1903 {
1904 /* Ignore pairs that include reg REGNO. */
1905 if (i == regno || i + 1 == regno)
1906 continue;
1907
1908 /* Ignore pairs that are outside the class that needs the group.
1909 ??? Here we fail to handle the case where two different classes
1910 independently need groups. But this never happens with our
1911 current machine descriptions. */
1912 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
1913 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
1914 continue;
1915
1916 /* A pair of consecutive regs we can still spill does the trick. */
1917 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
1918 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1919 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
1920 return 1;
1921
1922 /* A pair of one already spilled and one we can spill does it
1923 provided the one already spilled is not otherwise reserved. */
1924 if (spill_reg_order[i] < 0
1925 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1926 && spill_reg_order[i + 1] >= 0
1927 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
1928 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
1929 return 1;
1930 if (spill_reg_order[i + 1] < 0
1931 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
1932 && spill_reg_order[i] >= 0
1933 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
1934 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
1935 return 1;
1936 }
1937
1938 return 0;
1939}
1940\f
1941/* Count any groups that can be formed from the registers recently spilled.
1942 This is done class by class, in order of ascending class number. */
1943
1944static void
1945count_possible_groups (group_size, group_mode, max_groups)
1946 int *group_size, *max_groups;
1947 enum machine_mode *group_mode;
1948{
1949 int i;
1950 /* Now find all consecutive groups of spilled registers
1951 and mark each group off against the need for such groups.
1952 But don't count them against ordinary need, yet. */
1953
1954 for (i = 0; i < N_REG_CLASSES; i++)
1955 if (group_size[i] > 1)
1956 {
1957 char regmask[FIRST_PSEUDO_REGISTER];
1958 int j;
1959
1960 bzero (regmask, sizeof regmask);
1961 /* Make a mask of all the regs that are spill regs in class I. */
1962 for (j = 0; j < n_spills; j++)
1963 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
1964 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
1965 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1966 spill_regs[j]))
1967 regmask[spill_regs[j]] = 1;
1968 /* Find each consecutive group of them. */
1969 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
1970 if (regmask[j] && j + group_size[i] <= FIRST_PSEUDO_REGISTER
1971 /* Next line in case group-mode for this class
1972 demands an even-odd pair. */
1973 && HARD_REGNO_MODE_OK (j, group_mode[i]))
1974 {
1975 int k;
1976 for (k = 1; k < group_size[i]; k++)
1977 if (! regmask[j + k])
1978 break;
1979 if (k == group_size[i])
1980 {
1981 /* We found a group. Mark it off against this class's
1982 need for groups, and against each superclass too. */
1983 register enum reg_class *p;
1984 max_groups[i]--;
1985 p = reg_class_superclasses[i];
1986 while (*p != LIM_REG_CLASSES)
1987 max_groups[(int) *p++]--;
a8fdc208 1988 /* Don't count these registers again. */
32131a9c
RK
1989 for (k = 0; k < group_size[i]; k++)
1990 SET_HARD_REG_BIT (counted_for_groups, j + k);
1991 }
fa52261e
RS
1992 /* Skip to the last reg in this group. When j is incremented
1993 above, it will then point to the first reg of the next
1994 possible group. */
1995 j += k - 1;
32131a9c
RK
1996 }
1997 }
1998
1999}
2000\f
2001/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2002 another mode that needs to be reloaded for the same register class CLASS.
2003 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2004 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2005
2006 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2007 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2008 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2009 causes unnecessary failures on machines requiring alignment of register
2010 groups when the two modes are different sizes, because the larger mode has
2011 more strict alignment rules than the smaller mode. */
2012
2013static int
2014modes_equiv_for_class_p (allocate_mode, other_mode, class)
2015 enum machine_mode allocate_mode, other_mode;
2016 enum reg_class class;
2017{
2018 register int regno;
2019 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2020 {
2021 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2022 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2023 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2024 return 0;
2025 }
2026 return 1;
2027}
2028
5352b11a
RS
2029/* Handle the failure to find a register to spill.
2030 INSN should be one of the insns which needed this particular spill reg. */
2031
2032static void
2033spill_failure (insn)
2034 rtx insn;
2035{
2036 if (asm_noperands (PATTERN (insn)) >= 0)
2037 error_for_asm (insn, "`asm' needs too many reloads");
2038 else
2039 abort ();
2040}
2041
32131a9c
RK
2042/* Add a new register to the tables of available spill-registers
2043 (as well as spilling all pseudos allocated to the register).
2044 I is the index of this register in potential_reload_regs.
2045 CLASS is the regclass whose need is being satisfied.
2046 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2047 so that this register can count off against them.
2048 MAX_NONGROUPS is 0 if this register is part of a group.
2049 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2050
2051static int
2052new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2053 int i;
2054 int class;
2055 int *max_needs;
2056 int *max_nongroups;
2057 int global;
2058 FILE *dumpfile;
2059{
2060 register enum reg_class *p;
2061 int val;
2062 int regno = potential_reload_regs[i];
2063
2064 if (i >= FIRST_PSEUDO_REGISTER)
2065 abort (); /* Caller failed to find any register. */
2066
2067 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2068 fatal ("fixed or forbidden register was spilled.\n\
2069This may be due to a compiler bug or to impossible asm statements.");
2070
2071 /* Make reg REGNO an additional reload reg. */
2072
2073 potential_reload_regs[i] = -1;
2074 spill_regs[n_spills] = regno;
2075 spill_reg_order[regno] = n_spills;
2076 if (dumpfile)
2077 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2078
2079 /* Clear off the needs we just satisfied. */
2080
2081 max_needs[class]--;
2082 p = reg_class_superclasses[class];
2083 while (*p != LIM_REG_CLASSES)
2084 max_needs[(int) *p++]--;
2085
2086 if (max_nongroups && max_nongroups[class] > 0)
2087 {
2088 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2089 max_nongroups[class]--;
2090 p = reg_class_superclasses[class];
2091 while (*p != LIM_REG_CLASSES)
2092 max_nongroups[(int) *p++]--;
2093 }
2094
2095 /* Spill every pseudo reg that was allocated to this reg
2096 or to something that overlaps this reg. */
2097
2098 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2099
2100 /* If there are some registers still to eliminate and this register
2101 wasn't ever used before, additional stack space may have to be
2102 allocated to store this register. Thus, we may have changed the offset
2103 between the stack and frame pointers, so mark that something has changed.
2104 (If new pseudos were spilled, thus requiring more space, VAL would have
2105 been set non-zero by the call to spill_hard_reg above since additional
2106 reloads may be needed in that case.
2107
2108 One might think that we need only set VAL to 1 if this is a call-used
2109 register. However, the set of registers that must be saved by the
2110 prologue is not identical to the call-used set. For example, the
2111 register used by the call insn for the return PC is a call-used register,
2112 but must be saved by the prologue. */
2113 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2114 val = 1;
2115
2116 regs_ever_live[spill_regs[n_spills]] = 1;
2117 n_spills++;
2118
2119 return val;
2120}
2121\f
2122/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2123 data that is dead in INSN. */
2124
2125static void
2126delete_dead_insn (insn)
2127 rtx insn;
2128{
2129 rtx prev = prev_real_insn (insn);
2130 rtx prev_dest;
2131
2132 /* If the previous insn sets a register that dies in our insn, delete it
2133 too. */
2134 if (prev && GET_CODE (PATTERN (prev)) == SET
2135 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2136 && reg_mentioned_p (prev_dest, PATTERN (insn))
2137 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2138 delete_dead_insn (prev);
2139
2140 PUT_CODE (insn, NOTE);
2141 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2142 NOTE_SOURCE_FILE (insn) = 0;
2143}
2144
2145/* Modify the home of pseudo-reg I.
2146 The new home is present in reg_renumber[I].
2147
2148 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2149 or it may be -1, meaning there is none or it is not relevant.
2150 This is used so that all pseudos spilled from a given hard reg
2151 can share one stack slot. */
2152
2153static void
2154alter_reg (i, from_reg)
2155 register int i;
2156 int from_reg;
2157{
2158 /* When outputting an inline function, this can happen
2159 for a reg that isn't actually used. */
2160 if (regno_reg_rtx[i] == 0)
2161 return;
2162
2163 /* If the reg got changed to a MEM at rtl-generation time,
2164 ignore it. */
2165 if (GET_CODE (regno_reg_rtx[i]) != REG)
2166 return;
2167
2168 /* Modify the reg-rtx to contain the new hard reg
2169 number or else to contain its pseudo reg number. */
2170 REGNO (regno_reg_rtx[i])
2171 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2172
2173 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2174 allocate a stack slot for it. */
2175
2176 if (reg_renumber[i] < 0
2177 && reg_n_refs[i] > 0
2178 && reg_equiv_constant[i] == 0
2179 && reg_equiv_memory_loc[i] == 0)
2180 {
2181 register rtx x;
2182 int inherent_size = PSEUDO_REGNO_BYTES (i);
2183 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2184 int adjust = 0;
2185
2186 /* Each pseudo reg has an inherent size which comes from its own mode,
2187 and a total size which provides room for paradoxical subregs
2188 which refer to the pseudo reg in wider modes.
2189
2190 We can use a slot already allocated if it provides both
2191 enough inherent space and enough total space.
2192 Otherwise, we allocate a new slot, making sure that it has no less
2193 inherent space, and no less total space, then the previous slot. */
2194 if (from_reg == -1)
2195 {
2196 /* No known place to spill from => no slot to reuse. */
2197 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2198#if BYTES_BIG_ENDIAN
2199 /* Cancel the big-endian correction done in assign_stack_local.
2200 Get the address of the beginning of the slot.
2201 This is so we can do a big-endian correction unconditionally
2202 below. */
2203 adjust = inherent_size - total_size;
2204#endif
2205 }
2206 /* Reuse a stack slot if possible. */
2207 else if (spill_stack_slot[from_reg] != 0
2208 && spill_stack_slot_width[from_reg] >= total_size
2209 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2210 >= inherent_size))
2211 x = spill_stack_slot[from_reg];
2212 /* Allocate a bigger slot. */
2213 else
2214 {
2215 /* Compute maximum size needed, both for inherent size
2216 and for total size. */
2217 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2218 if (spill_stack_slot[from_reg])
2219 {
2220 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2221 > inherent_size)
2222 mode = GET_MODE (spill_stack_slot[from_reg]);
2223 if (spill_stack_slot_width[from_reg] > total_size)
2224 total_size = spill_stack_slot_width[from_reg];
2225 }
2226 /* Make a slot with that size. */
2227 x = assign_stack_local (mode, total_size, -1);
2228#if BYTES_BIG_ENDIAN
2229 /* Cancel the big-endian correction done in assign_stack_local.
2230 Get the address of the beginning of the slot.
2231 This is so we can do a big-endian correction unconditionally
2232 below. */
2233 adjust = GET_MODE_SIZE (mode) - total_size;
2234#endif
2235 spill_stack_slot[from_reg] = x;
2236 spill_stack_slot_width[from_reg] = total_size;
2237 }
2238
2239#if BYTES_BIG_ENDIAN
2240 /* On a big endian machine, the "address" of the slot
2241 is the address of the low part that fits its inherent mode. */
2242 if (inherent_size < total_size)
2243 adjust += (total_size - inherent_size);
2244#endif /* BYTES_BIG_ENDIAN */
2245
2246 /* If we have any adjustment to make, or if the stack slot is the
2247 wrong mode, make a new stack slot. */
2248 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2249 {
2250 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2251 plus_constant (XEXP (x, 0), adjust));
2252 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2253 }
2254
2255 /* Save the stack slot for later. */
2256 reg_equiv_memory_loc[i] = x;
2257 }
2258}
2259
2260/* Mark the slots in regs_ever_live for the hard regs
2261 used by pseudo-reg number REGNO. */
2262
2263void
2264mark_home_live (regno)
2265 int regno;
2266{
2267 register int i, lim;
2268 i = reg_renumber[regno];
2269 if (i < 0)
2270 return;
2271 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2272 while (i < lim)
2273 regs_ever_live[i++] = 1;
2274}
2275\f
2276/* This function handles the tracking of elimination offsets around branches.
2277
2278 X is a piece of RTL being scanned.
2279
2280 INSN is the insn that it came from, if any.
2281
2282 INITIAL_P is non-zero if we are to set the offset to be the initial
2283 offset and zero if we are setting the offset of the label to be the
2284 current offset. */
2285
2286static void
2287set_label_offsets (x, insn, initial_p)
2288 rtx x;
2289 rtx insn;
2290 int initial_p;
2291{
2292 enum rtx_code code = GET_CODE (x);
2293 rtx tem;
2294 int i;
2295 struct elim_table *p;
2296
2297 switch (code)
2298 {
2299 case LABEL_REF:
8be386d9
RS
2300 if (LABEL_REF_NONLOCAL_P (x))
2301 return;
2302
32131a9c
RK
2303 x = XEXP (x, 0);
2304
2305 /* ... fall through ... */
2306
2307 case CODE_LABEL:
2308 /* If we know nothing about this label, set the desired offsets. Note
2309 that this sets the offset at a label to be the offset before a label
2310 if we don't know anything about the label. This is not correct for
2311 the label after a BARRIER, but is the best guess we can make. If
2312 we guessed wrong, we will suppress an elimination that might have
2313 been possible had we been able to guess correctly. */
2314
2315 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2316 {
2317 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2318 offsets_at[CODE_LABEL_NUMBER (x)][i]
2319 = (initial_p ? reg_eliminate[i].initial_offset
2320 : reg_eliminate[i].offset);
2321 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2322 }
2323
2324 /* Otherwise, if this is the definition of a label and it is
d45cf215 2325 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2326 that label. */
2327
2328 else if (x == insn
2329 && (tem = prev_nonnote_insn (insn)) != 0
2330 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2331 {
2332 num_not_at_initial_offset = 0;
2333 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2334 {
2335 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2336 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2337 if (reg_eliminate[i].can_eliminate
2338 && (reg_eliminate[i].offset
2339 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2340 num_not_at_initial_offset++;
2341 }
2342 }
32131a9c
RK
2343
2344 else
2345 /* If neither of the above cases is true, compare each offset
2346 with those previously recorded and suppress any eliminations
2347 where the offsets disagree. */
a8fdc208 2348
32131a9c
RK
2349 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2350 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2351 != (initial_p ? reg_eliminate[i].initial_offset
2352 : reg_eliminate[i].offset))
2353 reg_eliminate[i].can_eliminate = 0;
2354
2355 return;
2356
2357 case JUMP_INSN:
2358 set_label_offsets (PATTERN (insn), insn, initial_p);
2359
2360 /* ... fall through ... */
2361
2362 case INSN:
2363 case CALL_INSN:
2364 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2365 and hence must have all eliminations at their initial offsets. */
2366 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2367 if (REG_NOTE_KIND (tem) == REG_LABEL)
2368 set_label_offsets (XEXP (tem, 0), insn, 1);
2369 return;
2370
2371 case ADDR_VEC:
2372 case ADDR_DIFF_VEC:
2373 /* Each of the labels in the address vector must be at their initial
2374 offsets. We want the first first for ADDR_VEC and the second
2375 field for ADDR_DIFF_VEC. */
2376
2377 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2378 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2379 insn, initial_p);
2380 return;
2381
2382 case SET:
2383 /* We only care about setting PC. If the source is not RETURN,
2384 IF_THEN_ELSE, or a label, disable any eliminations not at
2385 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2386 isn't one of those possibilities. For branches to a label,
2387 call ourselves recursively.
2388
2389 Note that this can disable elimination unnecessarily when we have
2390 a non-local goto since it will look like a non-constant jump to
2391 someplace in the current function. This isn't a significant
2392 problem since such jumps will normally be when all elimination
2393 pairs are back to their initial offsets. */
2394
2395 if (SET_DEST (x) != pc_rtx)
2396 return;
2397
2398 switch (GET_CODE (SET_SRC (x)))
2399 {
2400 case PC:
2401 case RETURN:
2402 return;
2403
2404 case LABEL_REF:
2405 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2406 return;
2407
2408 case IF_THEN_ELSE:
2409 tem = XEXP (SET_SRC (x), 1);
2410 if (GET_CODE (tem) == LABEL_REF)
2411 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2412 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2413 break;
2414
2415 tem = XEXP (SET_SRC (x), 2);
2416 if (GET_CODE (tem) == LABEL_REF)
2417 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2418 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2419 break;
2420 return;
2421 }
2422
2423 /* If we reach here, all eliminations must be at their initial
2424 offset because we are doing a jump to a variable address. */
2425 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2426 if (p->offset != p->initial_offset)
2427 p->can_eliminate = 0;
2428 }
2429}
2430\f
2431/* Used for communication between the next two function to properly share
2432 the vector for an ASM_OPERANDS. */
2433
2434static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2435
a8fdc208 2436/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2437 replacement (such as sp), plus an offset.
2438
2439 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2440 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2441 MEM, we are allowed to replace a sum of a register and the constant zero
2442 with the register, which we cannot do outside a MEM. In addition, we need
2443 to record the fact that a register is referenced outside a MEM.
2444
2445 If INSN is nonzero, it is the insn containing X. If we replace a REG
2446 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2447 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2448 that the REG is being modified.
2449
2450 If we see a modification to a register we know about, take the
2451 appropriate action (see case SET, below).
2452
2453 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2454 replacements done assuming all offsets are at their initial values. If
2455 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2456 encounter, return the actual location so that find_reloads will do
2457 the proper thing. */
2458
2459rtx
2460eliminate_regs (x, mem_mode, insn)
2461 rtx x;
2462 enum machine_mode mem_mode;
2463 rtx insn;
2464{
2465 enum rtx_code code = GET_CODE (x);
2466 struct elim_table *ep;
2467 int regno;
2468 rtx new;
2469 int i, j;
2470 char *fmt;
2471 int copied = 0;
2472
2473 switch (code)
2474 {
2475 case CONST_INT:
2476 case CONST_DOUBLE:
2477 case CONST:
2478 case SYMBOL_REF:
2479 case CODE_LABEL:
2480 case PC:
2481 case CC0:
2482 case ASM_INPUT:
2483 case ADDR_VEC:
2484 case ADDR_DIFF_VEC:
2485 case RETURN:
2486 return x;
2487
2488 case REG:
2489 regno = REGNO (x);
2490
2491 /* First handle the case where we encounter a bare register that
2492 is eliminable. Replace it with a PLUS. */
2493 if (regno < FIRST_PSEUDO_REGISTER)
2494 {
2495 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2496 ep++)
2497 if (ep->from_rtx == x && ep->can_eliminate)
2498 {
2499 if (! mem_mode)
2500 ep->ref_outside_mem = 1;
2501 return plus_constant (ep->to_rtx, ep->previous_offset);
2502 }
2503
2504 }
2505 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2506 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2507 {
2508 /* In this case, find_reloads would attempt to either use an
2509 incorrect address (if something is not at its initial offset)
2510 or substitute an replaced address into an insn (which loses
2511 if the offset is changed by some later action). So we simply
2512 return the replaced stack slot (assuming it is changed by
2513 elimination) and ignore the fact that this is actually a
2514 reference to the pseudo. Ensure we make a copy of the
2515 address in case it is shared. */
fb3821f7
CH
2516 new = eliminate_regs (reg_equiv_memory_loc[regno],
2517 mem_mode, NULL_RTX);
32131a9c
RK
2518 if (new != reg_equiv_memory_loc[regno])
2519 return copy_rtx (new);
2520 }
2521 return x;
2522
2523 case PLUS:
2524 /* If this is the sum of an eliminable register and a constant, rework
2525 the sum. */
2526 if (GET_CODE (XEXP (x, 0)) == REG
2527 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2528 && CONSTANT_P (XEXP (x, 1)))
2529 {
2530 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2531 ep++)
2532 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2533 {
2534 if (! mem_mode)
2535 ep->ref_outside_mem = 1;
2536
2537 /* The only time we want to replace a PLUS with a REG (this
2538 occurs when the constant operand of the PLUS is the negative
2539 of the offset) is when we are inside a MEM. We won't want
2540 to do so at other times because that would change the
2541 structure of the insn in a way that reload can't handle.
2542 We special-case the commonest situation in
2543 eliminate_regs_in_insn, so just replace a PLUS with a
2544 PLUS here, unless inside a MEM. */
a23b64d5 2545 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2546 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2547 return ep->to_rtx;
2548 else
2549 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2550 plus_constant (XEXP (x, 1),
2551 ep->previous_offset));
2552 }
2553
2554 /* If the register is not eliminable, we are done since the other
2555 operand is a constant. */
2556 return x;
2557 }
2558
2559 /* If this is part of an address, we want to bring any constant to the
2560 outermost PLUS. We will do this by doing register replacement in
2561 our operands and seeing if a constant shows up in one of them.
2562
2563 We assume here this is part of an address (or a "load address" insn)
2564 since an eliminable register is not likely to appear in any other
2565 context.
2566
2567 If we have (plus (eliminable) (reg)), we want to produce
2568 (plus (plus (replacement) (reg) (const))). If this was part of a
2569 normal add insn, (plus (replacement) (reg)) will be pushed as a
2570 reload. This is the desired action. */
2571
2572 {
fb3821f7
CH
2573 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2574 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2575
2576 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2577 {
2578 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2579 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2580 we must replace the constant here since it may no longer
2581 be in the position of any operand. */
2582 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2583 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2584 && reg_renumber[REGNO (new1)] < 0
2585 && reg_equiv_constant != 0
2586 && reg_equiv_constant[REGNO (new1)] != 0)
2587 new1 = reg_equiv_constant[REGNO (new1)];
2588 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2589 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2590 && reg_renumber[REGNO (new0)] < 0
2591 && reg_equiv_constant[REGNO (new0)] != 0)
2592 new0 = reg_equiv_constant[REGNO (new0)];
2593
2594 new = form_sum (new0, new1);
2595
2596 /* As above, if we are not inside a MEM we do not want to
2597 turn a PLUS into something else. We might try to do so here
2598 for an addition of 0 if we aren't optimizing. */
2599 if (! mem_mode && GET_CODE (new) != PLUS)
2600 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2601 else
2602 return new;
2603 }
2604 }
2605 return x;
2606
2607 case EXPR_LIST:
2608 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2609 if (XEXP (x, 0))
2610 {
fb3821f7 2611 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2612 if (new != XEXP (x, 0))
2613 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2614 }
2615
2616 /* ... fall through ... */
2617
2618 case INSN_LIST:
2619 /* Now do eliminations in the rest of the chain. If this was
2620 an EXPR_LIST, this might result in allocating more memory than is
2621 strictly needed, but it simplifies the code. */
2622 if (XEXP (x, 1))
2623 {
fb3821f7 2624 new = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2625 if (new != XEXP (x, 1))
2626 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2627 }
2628 return x;
2629
2630 case CALL:
2631 case COMPARE:
2632 case MINUS:
2633 case MULT:
2634 case DIV: case UDIV:
2635 case MOD: case UMOD:
2636 case AND: case IOR: case XOR:
2637 case LSHIFT: case ASHIFT: case ROTATE:
2638 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2639 case NE: case EQ:
2640 case GE: case GT: case GEU: case GTU:
2641 case LE: case LT: case LEU: case LTU:
2642 {
fb3821f7
CH
2643 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2644 rtx new1
2645 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX) : 0;
32131a9c
RK
2646
2647 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2648 return gen_rtx (code, GET_MODE (x), new0, new1);
2649 }
2650 return x;
2651
2652 case PRE_INC:
2653 case POST_INC:
2654 case PRE_DEC:
2655 case POST_DEC:
2656 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2657 if (ep->to_rtx == XEXP (x, 0))
2658 {
2659 if (code == PRE_DEC || code == POST_DEC)
2660 ep->offset += GET_MODE_SIZE (mem_mode);
2661 else
2662 ep->offset -= GET_MODE_SIZE (mem_mode);
2663 }
2664
2665 /* Fall through to generic unary operation case. */
2666 case USE:
2667 case STRICT_LOW_PART:
2668 case NEG: case NOT:
2669 case SIGN_EXTEND: case ZERO_EXTEND:
2670 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2671 case FLOAT: case FIX:
2672 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2673 case ABS:
2674 case SQRT:
2675 case FFS:
fb3821f7 2676 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2677 if (new != XEXP (x, 0))
2678 return gen_rtx (code, GET_MODE (x), new);
2679 return x;
2680
2681 case SUBREG:
2682 /* Similar to above processing, but preserve SUBREG_WORD.
2683 Convert (subreg (mem)) to (mem) if not paradoxical.
2684 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2685 pseudo didn't get a hard reg, we must replace this with the
2686 eliminated version of the memory location because push_reloads
2687 may do the replacement in certain circumstances. */
2688 if (GET_CODE (SUBREG_REG (x)) == REG
2689 && (GET_MODE_SIZE (GET_MODE (x))
2690 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2691 && reg_equiv_memory_loc != 0
2692 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2693 {
2694 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
fb3821f7 2695 mem_mode, NULL_RTX);
32131a9c
RK
2696
2697 /* If we didn't change anything, we must retain the pseudo. */
2698 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2699 new = XEXP (x, 0);
2700 else
2701 /* Otherwise, ensure NEW isn't shared in case we have to reload
2702 it. */
2703 new = copy_rtx (new);
2704 }
2705 else
fb3821f7 2706 new = eliminate_regs (SUBREG_REG (x), mem_mode, NULL_RTX);
32131a9c
RK
2707
2708 if (new != XEXP (x, 0))
2709 {
2710 if (GET_CODE (new) == MEM
2711 && (GET_MODE_SIZE (GET_MODE (x))
2712 <= GET_MODE_SIZE (GET_MODE (new))))
2713 {
2714 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2715 enum machine_mode mode = GET_MODE (x);
2716
2717#if BYTES_BIG_ENDIAN
2718 offset += (MIN (UNITS_PER_WORD,
2719 GET_MODE_SIZE (GET_MODE (new)))
2720 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2721#endif
2722
2723 PUT_MODE (new, mode);
2724 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2725 return new;
2726 }
2727 else
2728 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2729 }
2730
2731 return x;
2732
2733 case CLOBBER:
2734 /* If clobbering a register that is the replacement register for an
d45cf215 2735 elimination we still think can be performed, note that it cannot
32131a9c
RK
2736 be performed. Otherwise, we need not be concerned about it. */
2737 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2738 if (ep->to_rtx == XEXP (x, 0))
2739 ep->can_eliminate = 0;
2740
2741 return x;
2742
2743 case ASM_OPERANDS:
2744 {
2745 rtx *temp_vec;
2746 /* Properly handle sharing input and constraint vectors. */
2747 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2748 {
2749 /* When we come to a new vector not seen before,
2750 scan all its elements; keep the old vector if none
2751 of them changes; otherwise, make a copy. */
2752 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2753 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2754 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2755 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
fb3821f7 2756 mem_mode, NULL_RTX);
32131a9c
RK
2757
2758 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2759 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2760 break;
2761
2762 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2763 new_asm_operands_vec = old_asm_operands_vec;
2764 else
2765 new_asm_operands_vec
2766 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2767 }
2768
2769 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2770 if (new_asm_operands_vec == old_asm_operands_vec)
2771 return x;
2772
2773 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2774 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2775 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2776 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2777 ASM_OPERANDS_SOURCE_FILE (x),
2778 ASM_OPERANDS_SOURCE_LINE (x));
2779 new->volatil = x->volatil;
2780 return new;
2781 }
2782
2783 case SET:
2784 /* Check for setting a register that we know about. */
2785 if (GET_CODE (SET_DEST (x)) == REG)
2786 {
2787 /* See if this is setting the replacement register for an
a8fdc208 2788 elimination.
32131a9c
RK
2789
2790 If DEST is the frame pointer, we do nothing because we assume that
2791 all assignments to the frame pointer are for non-local gotos and
2792 are being done at a time when they are valid and do not disturb
2793 anything else. Some machines want to eliminate a fake argument
2794 pointer with either the frame or stack pointer. Assignments to
2795 the frame pointer must not prevent this elimination. */
2796
2797 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2798 ep++)
2799 if (ep->to_rtx == SET_DEST (x)
2800 && SET_DEST (x) != frame_pointer_rtx)
2801 {
6dc42e49 2802 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2803 this elimination can't be done. */
2804 rtx src = SET_SRC (x);
2805
2806 if (GET_CODE (src) == PLUS
2807 && XEXP (src, 0) == SET_DEST (x)
2808 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2809 ep->offset -= INTVAL (XEXP (src, 1));
2810 else
2811 ep->can_eliminate = 0;
2812 }
2813
2814 /* Now check to see we are assigning to a register that can be
2815 eliminated. If so, it must be as part of a PARALLEL, since we
2816 will not have been called if this is a single SET. So indicate
2817 that we can no longer eliminate this reg. */
2818 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2819 ep++)
2820 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2821 ep->can_eliminate = 0;
2822 }
2823
2824 /* Now avoid the loop below in this common case. */
2825 {
fb3821f7
CH
2826 rtx new0 = eliminate_regs (SET_DEST (x), 0, NULL_RTX);
2827 rtx new1 = eliminate_regs (SET_SRC (x), 0, NULL_RTX);
32131a9c
RK
2828
2829 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2830 write a CLOBBER insn. */
2831 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
2832 && insn != 0)
2833 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
2834
2835 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
2836 return gen_rtx (SET, VOIDmode, new0, new1);
2837 }
2838
2839 return x;
2840
2841 case MEM:
2842 /* Our only special processing is to pass the mode of the MEM to our
2843 recursive call and copy the flags. While we are here, handle this
2844 case more efficiently. */
fb3821f7 2845 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), NULL_RTX);
32131a9c
RK
2846 if (new != XEXP (x, 0))
2847 {
2848 new = gen_rtx (MEM, GET_MODE (x), new);
2849 new->volatil = x->volatil;
2850 new->unchanging = x->unchanging;
2851 new->in_struct = x->in_struct;
2852 return new;
2853 }
2854 else
2855 return x;
2856 }
2857
2858 /* Process each of our operands recursively. If any have changed, make a
2859 copy of the rtx. */
2860 fmt = GET_RTX_FORMAT (code);
2861 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2862 {
2863 if (*fmt == 'e')
2864 {
fb3821f7 2865 new = eliminate_regs (XEXP (x, i), mem_mode, NULL_RTX);
32131a9c
RK
2866 if (new != XEXP (x, i) && ! copied)
2867 {
2868 rtx new_x = rtx_alloc (code);
2869 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2870 + (sizeof (new_x->fld[0])
2871 * GET_RTX_LENGTH (code))));
2872 x = new_x;
2873 copied = 1;
2874 }
2875 XEXP (x, i) = new;
2876 }
2877 else if (*fmt == 'E')
2878 {
2879 int copied_vec = 0;
2880 for (j = 0; j < XVECLEN (x, i); j++)
2881 {
2882 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2883 if (new != XVECEXP (x, i, j) && ! copied_vec)
2884 {
2885 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2886 &XVECEXP (x, i, 0));
2887 if (! copied)
2888 {
2889 rtx new_x = rtx_alloc (code);
2890 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2891 + (sizeof (new_x->fld[0])
2892 * GET_RTX_LENGTH (code))));
2893 x = new_x;
2894 copied = 1;
2895 }
2896 XVEC (x, i) = new_v;
2897 copied_vec = 1;
2898 }
2899 XVECEXP (x, i, j) = new;
2900 }
2901 }
2902 }
2903
2904 return x;
2905}
2906\f
2907/* Scan INSN and eliminate all eliminable registers in it.
2908
2909 If REPLACE is nonzero, do the replacement destructively. Also
2910 delete the insn as dead it if it is setting an eliminable register.
2911
2912 If REPLACE is zero, do all our allocations in reload_obstack.
2913
2914 If no eliminations were done and this insn doesn't require any elimination
2915 processing (these are not identical conditions: it might be updating sp,
2916 but not referencing fp; this needs to be seen during reload_as_needed so
2917 that the offset between fp and sp can be taken into consideration), zero
2918 is returned. Otherwise, 1 is returned. */
2919
2920static int
2921eliminate_regs_in_insn (insn, replace)
2922 rtx insn;
2923 int replace;
2924{
2925 rtx old_body = PATTERN (insn);
2926 rtx new_body;
2927 int val = 0;
2928 struct elim_table *ep;
2929
2930 if (! replace)
2931 push_obstacks (&reload_obstack, &reload_obstack);
2932
2933 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
2934 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
2935 {
2936 /* Check for setting an eliminable register. */
2937 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2938 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
2939 {
2940 /* In this case this insn isn't serving a useful purpose. We
2941 will delete it in reload_as_needed once we know that this
2942 elimination is, in fact, being done.
2943
2944 If REPLACE isn't set, we can't delete this insn, but neededn't
2945 process it since it won't be used unless something changes. */
2946 if (replace)
2947 delete_dead_insn (insn);
2948 val = 1;
2949 goto done;
2950 }
2951
2952 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
2953 in the insn is the negative of the offset in FROM. Substitute
2954 (set (reg) (reg to)) for the insn and change its code.
2955
2956 We have to do this here, rather than in eliminate_regs, do that we can
2957 change the insn code. */
2958
2959 if (GET_CODE (SET_SRC (old_body)) == PLUS
2960 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
2961 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
2962 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2963 ep++)
2964 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
2965 && ep->can_eliminate
2966 && ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
2967 {
2968 PATTERN (insn) = gen_rtx (SET, VOIDmode,
2969 SET_DEST (old_body), ep->to_rtx);
2970 INSN_CODE (insn) = -1;
2971 val = 1;
2972 goto done;
2973 }
2974 }
2975
2976 old_asm_operands_vec = 0;
2977
2978 /* Replace the body of this insn with a substituted form. If we changed
2979 something, return non-zero. If this is the final call for this
2980 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
2981
2982 If we are replacing a body that was a (set X (plus Y Z)), try to
2983 re-recognize the insn. We do this in case we had a simple addition
2984 but now can do this as a load-address. This saves an insn in this
2985 common case. */
2986
fb3821f7 2987 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
2988 if (new_body != old_body)
2989 {
4a5d0fb5 2990 /* If we had a move insn but now we don't, rerecognize it. */
0ba846c7
RS
2991 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
2992 && (GET_CODE (new_body) != SET
2993 || GET_CODE (SET_SRC (new_body)) != REG))
2994 /* If this was an add insn before, rerecognize. */
2995 ||
2996 (GET_CODE (old_body) == SET
2997 && GET_CODE (SET_SRC (old_body)) == PLUS))
4a5d0fb5
RS
2998 {
2999 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3000 /* If recognition fails, store the new body anyway.
3001 It's normal to have recognition failures here
3002 due to bizarre memory addresses; reloading will fix them. */
3003 PATTERN (insn) = new_body;
4a5d0fb5 3004 }
0ba846c7 3005 else
32131a9c
RK
3006 PATTERN (insn) = new_body;
3007
3008 if (replace && REG_NOTES (insn))
fb3821f7 3009 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, NULL_RTX);
32131a9c
RK
3010 val = 1;
3011 }
a8fdc208 3012
32131a9c
RK
3013 /* Loop through all elimination pairs. See if any have changed and
3014 recalculate the number not at initial offset.
3015
a8efe40d
RK
3016 Compute the maximum offset (minimum offset if the stack does not
3017 grow downward) for each elimination pair.
3018
32131a9c
RK
3019 We also detect a cases where register elimination cannot be done,
3020 namely, if a register would be both changed and referenced outside a MEM
3021 in the resulting insn since such an insn is often undefined and, even if
3022 not, we cannot know what meaning will be given to it. Note that it is
3023 valid to have a register used in an address in an insn that changes it
3024 (presumably with a pre- or post-increment or decrement).
3025
3026 If anything changes, return nonzero. */
3027
3028 num_not_at_initial_offset = 0;
3029 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3030 {
3031 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3032 ep->can_eliminate = 0;
3033
3034 ep->ref_outside_mem = 0;
3035
3036 if (ep->previous_offset != ep->offset)
3037 val = 1;
3038
3039 ep->previous_offset = ep->offset;
3040 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3041 num_not_at_initial_offset++;
a8efe40d
RK
3042
3043#ifdef STACK_GROWS_DOWNWARD
3044 ep->max_offset = MAX (ep->max_offset, ep->offset);
3045#else
3046 ep->max_offset = MIN (ep->max_offset, ep->offset);
3047#endif
32131a9c
RK
3048 }
3049
3050 done:
3051 if (! replace)
3052 pop_obstacks ();
3053
3054 return val;
3055}
3056
3057/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3058 replacement we currently believe is valid, mark it as not eliminable if X
3059 modifies DEST in any way other than by adding a constant integer to it.
3060
3061 If DEST is the frame pointer, we do nothing because we assume that
3062 all assignments to the frame pointer are nonlocal gotos and are being done
3063 at a time when they are valid and do not disturb anything else.
3064 Some machines want to eliminate a fake argument pointer with either the
3065 frame or stack pointer. Assignments to the frame pointer must not prevent
3066 this elimination.
3067
3068 Called via note_stores from reload before starting its passes to scan
3069 the insns of the function. */
3070
3071static void
3072mark_not_eliminable (dest, x)
3073 rtx dest;
3074 rtx x;
3075{
3076 register int i;
3077
3078 /* A SUBREG of a hard register here is just changing its mode. We should
3079 not see a SUBREG of an eliminable hard register, but check just in
3080 case. */
3081 if (GET_CODE (dest) == SUBREG)
3082 dest = SUBREG_REG (dest);
3083
3084 if (dest == frame_pointer_rtx)
3085 return;
3086
3087 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3088 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3089 && (GET_CODE (x) != SET
3090 || GET_CODE (SET_SRC (x)) != PLUS
3091 || XEXP (SET_SRC (x), 0) != dest
3092 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3093 {
3094 reg_eliminate[i].can_eliminate_previous
3095 = reg_eliminate[i].can_eliminate = 0;
3096 num_eliminable--;
3097 }
3098}
3099\f
3100/* Kick all pseudos out of hard register REGNO.
3101 If GLOBAL is nonzero, try to find someplace else to put them.
3102 If DUMPFILE is nonzero, log actions taken on that file.
3103
3104 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3105 because we found we can't eliminate some register. In the case, no pseudos
3106 are allowed to be in the register, even if they are only in a block that
3107 doesn't require spill registers, unlike the case when we are spilling this
3108 hard reg to produce another spill register.
3109
3110 Return nonzero if any pseudos needed to be kicked out. */
3111
3112static int
3113spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3114 register int regno;
3115 int global;
3116 FILE *dumpfile;
3117 int cant_eliminate;
3118{
3119 int something_changed = 0;
3120 register int i;
3121
3122 SET_HARD_REG_BIT (forbidden_regs, regno);
3123
3124 /* Spill every pseudo reg that was allocated to this reg
3125 or to something that overlaps this reg. */
3126
3127 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3128 if (reg_renumber[i] >= 0
3129 && reg_renumber[i] <= regno
a8fdc208 3130 && (reg_renumber[i]
32131a9c
RK
3131 + HARD_REGNO_NREGS (reg_renumber[i],
3132 PSEUDO_REGNO_MODE (i))
3133 > regno))
3134 {
3135 enum reg_class class = REGNO_REG_CLASS (regno);
3136
3137 /* If this register belongs solely to a basic block which needed no
3138 spilling of any class that this register is contained in,
3139 leave it be, unless we are spilling this register because
3140 it was a hard register that can't be eliminated. */
3141
3142 if (! cant_eliminate
3143 && basic_block_needs[0]
3144 && reg_basic_block[i] >= 0
3145 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3146 {
3147 enum reg_class *p;
3148
3149 for (p = reg_class_superclasses[(int) class];
3150 *p != LIM_REG_CLASSES; p++)
3151 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3152 break;
a8fdc208 3153
32131a9c
RK
3154 if (*p == LIM_REG_CLASSES)
3155 continue;
3156 }
3157
3158 /* Mark it as no longer having a hard register home. */
3159 reg_renumber[i] = -1;
3160 /* We will need to scan everything again. */
3161 something_changed = 1;
3162 if (global)
3163 retry_global_alloc (i, forbidden_regs);
3164
3165 alter_reg (i, regno);
3166 if (dumpfile)
3167 {
3168 if (reg_renumber[i] == -1)
3169 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3170 else
3171 fprintf (dumpfile, " Register %d now in %d.\n\n",
3172 i, reg_renumber[i]);
3173 }
3174 }
3175
3176 return something_changed;
3177}
3178\f
3179/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3180
3181static void
3182scan_paradoxical_subregs (x)
3183 register rtx x;
3184{
3185 register int i;
3186 register char *fmt;
3187 register enum rtx_code code = GET_CODE (x);
3188
3189 switch (code)
3190 {
3191 case CONST_INT:
3192 case CONST:
3193 case SYMBOL_REF:
3194 case LABEL_REF:
3195 case CONST_DOUBLE:
3196 case CC0:
3197 case PC:
3198 case REG:
3199 case USE:
3200 case CLOBBER:
3201 return;
3202
3203 case SUBREG:
3204 if (GET_CODE (SUBREG_REG (x)) == REG
3205 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3206 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3207 = GET_MODE_SIZE (GET_MODE (x));
3208 return;
3209 }
3210
3211 fmt = GET_RTX_FORMAT (code);
3212 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3213 {
3214 if (fmt[i] == 'e')
3215 scan_paradoxical_subregs (XEXP (x, i));
3216 else if (fmt[i] == 'E')
3217 {
3218 register int j;
3219 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3220 scan_paradoxical_subregs (XVECEXP (x, i, j));
3221 }
3222 }
3223}
3224\f
3225struct hard_reg_n_uses { int regno; int uses; };
3226
3227static int
3228hard_reg_use_compare (p1, p2)
3229 struct hard_reg_n_uses *p1, *p2;
3230{
3231 int tem = p1->uses - p2->uses;
3232 if (tem != 0) return tem;
3233 /* If regs are equally good, sort by regno,
3234 so that the results of qsort leave nothing to chance. */
3235 return p1->regno - p2->regno;
3236}
3237
3238/* Choose the order to consider regs for use as reload registers
3239 based on how much trouble would be caused by spilling one.
3240 Store them in order of decreasing preference in potential_reload_regs. */
3241
3242static void
3243order_regs_for_reload ()
3244{
3245 register int i;
3246 register int o = 0;
3247 int large = 0;
3248
3249 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3250
3251 CLEAR_HARD_REG_SET (bad_spill_regs);
3252
3253 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3254 potential_reload_regs[i] = -1;
3255
3256 /* Count number of uses of each hard reg by pseudo regs allocated to it
3257 and then order them by decreasing use. */
3258
3259 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3260 {
3261 hard_reg_n_uses[i].uses = 0;
3262 hard_reg_n_uses[i].regno = i;
3263 }
3264
3265 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3266 {
3267 int regno = reg_renumber[i];
3268 if (regno >= 0)
3269 {
3270 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3271 while (regno < lim)
3272 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3273 }
3274 large += reg_n_refs[i];
3275 }
3276
3277 /* Now fixed registers (which cannot safely be used for reloading)
3278 get a very high use count so they will be considered least desirable.
3279 Registers used explicitly in the rtl code are almost as bad. */
3280
3281 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3282 {
3283 if (fixed_regs[i])
3284 {
3285 hard_reg_n_uses[i].uses += 2 * large + 2;
3286 SET_HARD_REG_BIT (bad_spill_regs, i);
3287 }
3288 else if (regs_explicitly_used[i])
3289 {
3290 hard_reg_n_uses[i].uses += large + 1;
3291 /* ??? We are doing this here because of the potential that
3292 bad code may be generated if a register explicitly used in
3293 an insn was used as a spill register for that insn. But
3294 not using these are spill registers may lose on some machine.
3295 We'll have to see how this works out. */
3296 SET_HARD_REG_BIT (bad_spill_regs, i);
3297 }
3298 }
3299 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3300 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3301
3302#ifdef ELIMINABLE_REGS
3303 /* If registers other than the frame pointer are eliminable, mark them as
3304 poor choices. */
3305 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3306 {
3307 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3308 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3309 }
3310#endif
3311
3312 /* Prefer registers not so far used, for use in temporary loading.
3313 Among them, if REG_ALLOC_ORDER is defined, use that order.
3314 Otherwise, prefer registers not preserved by calls. */
3315
3316#ifdef REG_ALLOC_ORDER
3317 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3318 {
3319 int regno = reg_alloc_order[i];
3320
3321 if (hard_reg_n_uses[regno].uses == 0)
3322 potential_reload_regs[o++] = regno;
3323 }
3324#else
3325 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3326 {
3327 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3328 potential_reload_regs[o++] = i;
3329 }
3330 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3331 {
3332 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3333 potential_reload_regs[o++] = i;
3334 }
3335#endif
3336
3337 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3338 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3339
3340 /* Now add the regs that are already used,
3341 preferring those used less often. The fixed and otherwise forbidden
3342 registers will be at the end of this list. */
3343
3344 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3345 if (hard_reg_n_uses[i].uses != 0)
3346 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3347}
3348\f
3349/* Reload pseudo-registers into hard regs around each insn as needed.
3350 Additional register load insns are output before the insn that needs it
3351 and perhaps store insns after insns that modify the reloaded pseudo reg.
3352
3353 reg_last_reload_reg and reg_reloaded_contents keep track of
3354 which pseudo-registers are already available in reload registers.
3355 We update these for the reloads that we perform,
3356 as the insns are scanned. */
3357
3358static void
3359reload_as_needed (first, live_known)
3360 rtx first;
3361 int live_known;
3362{
3363 register rtx insn;
3364 register int i;
3365 int this_block = 0;
3366 rtx x;
3367 rtx after_call = 0;
3368
3369 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3370 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3371 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3372 reg_has_output_reload = (char *) alloca (max_regno);
3373 for (i = 0; i < n_spills; i++)
3374 {
3375 reg_reloaded_contents[i] = -1;
3376 reg_reloaded_insn[i] = 0;
3377 }
3378
3379 /* Reset all offsets on eliminable registers to their initial values. */
3380#ifdef ELIMINABLE_REGS
3381 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3382 {
3383 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3384 reg_eliminate[i].initial_offset);
32131a9c
RK
3385 reg_eliminate[i].previous_offset
3386 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3387 }
3388#else
3389 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3390 reg_eliminate[0].previous_offset
3391 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3392#endif
3393
3394 num_not_at_initial_offset = 0;
3395
3396 for (insn = first; insn;)
3397 {
3398 register rtx next = NEXT_INSN (insn);
3399
3400 /* Notice when we move to a new basic block. */
aa2c50d6 3401 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3402 && insn == basic_block_head[this_block+1])
3403 ++this_block;
3404
3405 /* If we pass a label, copy the offsets from the label information
3406 into the current offsets of each elimination. */
3407 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3408 {
3409 num_not_at_initial_offset = 0;
3410 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3411 {
3412 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3413 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3414 if (reg_eliminate[i].can_eliminate
3415 && (reg_eliminate[i].offset
3416 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3417 num_not_at_initial_offset++;
3418 }
3419 }
32131a9c
RK
3420
3421 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3422 {
3423 rtx avoid_return_reg = 0;
3424
3425#ifdef SMALL_REGISTER_CLASSES
3426 /* Set avoid_return_reg if this is an insn
3427 that might use the value of a function call. */
3428 if (GET_CODE (insn) == CALL_INSN)
3429 {
3430 if (GET_CODE (PATTERN (insn)) == SET)
3431 after_call = SET_DEST (PATTERN (insn));
3432 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3433 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3434 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3435 else
3436 after_call = 0;
3437 }
3438 else if (after_call != 0
3439 && !(GET_CODE (PATTERN (insn)) == SET
3440 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3441 {
3442 if (reg_mentioned_p (after_call, PATTERN (insn)))
3443 avoid_return_reg = after_call;
3444 after_call = 0;
3445 }
3446#endif /* SMALL_REGISTER_CLASSES */
3447
2758481d
RS
3448 /* If this is a USE and CLOBBER of a MEM, ensure that any
3449 references to eliminable registers have been removed. */
3450
3451 if ((GET_CODE (PATTERN (insn)) == USE
3452 || GET_CODE (PATTERN (insn)) == CLOBBER)
3453 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3454 XEXP (XEXP (PATTERN (insn), 0), 0)
3455 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3456 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3457
32131a9c
RK
3458 /* If we need to do register elimination processing, do so.
3459 This might delete the insn, in which case we are done. */
3460 if (num_eliminable && GET_MODE (insn) == QImode)
3461 {
3462 eliminate_regs_in_insn (insn, 1);
3463 if (GET_CODE (insn) == NOTE)
3464 {
3465 insn = next;
3466 continue;
3467 }
3468 }
3469
3470 if (GET_MODE (insn) == VOIDmode)
3471 n_reloads = 0;
3472 /* First find the pseudo regs that must be reloaded for this insn.
3473 This info is returned in the tables reload_... (see reload.h).
3474 Also modify the body of INSN by substituting RELOAD
3475 rtx's for those pseudo regs. */
3476 else
3477 {
3478 bzero (reg_has_output_reload, max_regno);
3479 CLEAR_HARD_REG_SET (reg_is_output_reload);
3480
3481 find_reloads (insn, 1, spill_indirect_levels, live_known,
3482 spill_reg_order);
3483 }
3484
3485 if (n_reloads > 0)
3486 {
3c3eeea6
RK
3487 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3488 rtx p;
32131a9c
RK
3489 int class;
3490
3491 /* If this block has not had spilling done for a
a8fdc208 3492 particular class, deactivate any optional reloads
32131a9c
RK
3493 of that class lest they try to use a spill-reg which isn't
3494 available here. If we have any non-optionals that need a
3495 spill reg, abort. */
3496
3497 for (class = 0; class < N_REG_CLASSES; class++)
3498 if (basic_block_needs[class] != 0
3499 && basic_block_needs[class][this_block] == 0)
3500 for (i = 0; i < n_reloads; i++)
3501 if (class == (int) reload_reg_class[i])
3502 {
3503 if (reload_optional[i])
b07ef7b9
RK
3504 {
3505 reload_in[i] = reload_out[i] = 0;
3506 reload_secondary_p[i] = 0;
3507 }
3508 else if (reload_reg_rtx[i] == 0
3509 && (reload_in[i] != 0 || reload_out[i] != 0
3510 || reload_secondary_p[i] != 0))
32131a9c
RK
3511 abort ();
3512 }
3513
3514 /* Now compute which reload regs to reload them into. Perhaps
3515 reusing reload regs from previous insns, or else output
3516 load insns to reload them. Maybe output store insns too.
3517 Record the choices of reload reg in reload_reg_rtx. */
3518 choose_reload_regs (insn, avoid_return_reg);
3519
3520 /* Generate the insns to reload operands into or out of
3521 their reload regs. */
3522 emit_reload_insns (insn);
3523
3524 /* Substitute the chosen reload regs from reload_reg_rtx
3525 into the insn's body (or perhaps into the bodies of other
3526 load and store insn that we just made for reloading
3527 and that we moved the structure into). */
3528 subst_reloads ();
3c3eeea6
RK
3529
3530 /* If this was an ASM, make sure that all the reload insns
3531 we have generated are valid. If not, give an error
3532 and delete them. */
3533
3534 if (asm_noperands (PATTERN (insn)) >= 0)
3535 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3536 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3537 && (recog_memoized (p) < 0
3538 || (insn_extract (p),
3539 ! constrain_operands (INSN_CODE (p), 1))))
3540 {
3541 error_for_asm (insn,
3542 "`asm' operand requires impossible reload");
3543 PUT_CODE (p, NOTE);
3544 NOTE_SOURCE_FILE (p) = 0;
3545 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3546 }
32131a9c
RK
3547 }
3548 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3549 is no longer validly lying around to save a future reload.
3550 Note that this does not detect pseudos that were reloaded
3551 for this insn in order to be stored in
3552 (obeying register constraints). That is correct; such reload
3553 registers ARE still valid. */
3554 note_stores (PATTERN (insn), forget_old_reloads_1);
3555
3556 /* There may have been CLOBBER insns placed after INSN. So scan
3557 between INSN and NEXT and use them to forget old reloads. */
3558 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3559 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3560 note_stores (PATTERN (x), forget_old_reloads_1);
3561
3562#ifdef AUTO_INC_DEC
3563 /* Likewise for regs altered by auto-increment in this insn.
3564 But note that the reg-notes are not changed by reloading:
3565 they still contain the pseudo-regs, not the spill regs. */
3566 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3567 if (REG_NOTE_KIND (x) == REG_INC)
3568 {
3569 /* See if this pseudo reg was reloaded in this insn.
3570 If so, its last-reload info is still valid
3571 because it is based on this insn's reload. */
3572 for (i = 0; i < n_reloads; i++)
3573 if (reload_out[i] == XEXP (x, 0))
3574 break;
3575
3576 if (i != n_reloads)
3577 forget_old_reloads_1 (XEXP (x, 0));
3578 }
3579#endif
3580 }
3581 /* A reload reg's contents are unknown after a label. */
3582 if (GET_CODE (insn) == CODE_LABEL)
3583 for (i = 0; i < n_spills; i++)
3584 {
3585 reg_reloaded_contents[i] = -1;
3586 reg_reloaded_insn[i] = 0;
3587 }
3588
3589 /* Don't assume a reload reg is still good after a call insn
3590 if it is a call-used reg. */
3591 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == CALL_INSN)
3592 for (i = 0; i < n_spills; i++)
3593 if (call_used_regs[spill_regs[i]])
3594 {
3595 reg_reloaded_contents[i] = -1;
3596 reg_reloaded_insn[i] = 0;
3597 }
3598
3599 /* In case registers overlap, allow certain insns to invalidate
3600 particular hard registers. */
3601
3602#ifdef INSN_CLOBBERS_REGNO_P
3603 for (i = 0 ; i < n_spills ; i++)
3604 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3605 {
3606 reg_reloaded_contents[i] = -1;
3607 reg_reloaded_insn[i] = 0;
3608 }
3609#endif
3610
3611 insn = next;
3612
3613#ifdef USE_C_ALLOCA
3614 alloca (0);
3615#endif
3616 }
3617}
3618
3619/* Discard all record of any value reloaded from X,
3620 or reloaded in X from someplace else;
3621 unless X is an output reload reg of the current insn.
3622
3623 X may be a hard reg (the reload reg)
3624 or it may be a pseudo reg that was reloaded from. */
3625
3626static void
3627forget_old_reloads_1 (x)
3628 rtx x;
3629{
3630 register int regno;
3631 int nr;
0a2e51a9
RS
3632 int offset = 0;
3633
3634 /* note_stores does give us subregs of hard regs. */
3635 while (GET_CODE (x) == SUBREG)
3636 {
3637 offset += SUBREG_WORD (x);
3638 x = SUBREG_REG (x);
3639 }
32131a9c
RK
3640
3641 if (GET_CODE (x) != REG)
3642 return;
3643
0a2e51a9 3644 regno = REGNO (x) + offset;
32131a9c
RK
3645
3646 if (regno >= FIRST_PSEUDO_REGISTER)
3647 nr = 1;
3648 else
3649 {
3650 int i;
3651 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3652 /* Storing into a spilled-reg invalidates its contents.
3653 This can happen if a block-local pseudo is allocated to that reg
3654 and it wasn't spilled because this block's total need is 0.
3655 Then some insn might have an optional reload and use this reg. */
3656 for (i = 0; i < nr; i++)
3657 if (spill_reg_order[regno + i] >= 0
3658 /* But don't do this if the reg actually serves as an output
3659 reload reg in the current instruction. */
3660 && (n_reloads == 0
3661 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3662 {
3663 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3664 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3665 }
3666 }
3667
3668 /* Since value of X has changed,
3669 forget any value previously copied from it. */
3670
3671 while (nr-- > 0)
3672 /* But don't forget a copy if this is the output reload
3673 that establishes the copy's validity. */
3674 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3675 reg_last_reload_reg[regno + nr] = 0;
3676}
3677\f
3678/* For each reload, the mode of the reload register. */
3679static enum machine_mode reload_mode[MAX_RELOADS];
3680
3681/* For each reload, the largest number of registers it will require. */
3682static int reload_nregs[MAX_RELOADS];
3683
3684/* Comparison function for qsort to decide which of two reloads
3685 should be handled first. *P1 and *P2 are the reload numbers. */
3686
3687static int
3688reload_reg_class_lower (p1, p2)
3689 short *p1, *p2;
3690{
3691 register int r1 = *p1, r2 = *p2;
3692 register int t;
a8fdc208 3693
32131a9c
RK
3694 /* Consider required reloads before optional ones. */
3695 t = reload_optional[r1] - reload_optional[r2];
3696 if (t != 0)
3697 return t;
3698
3699 /* Count all solitary classes before non-solitary ones. */
3700 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3701 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3702 if (t != 0)
3703 return t;
3704
3705 /* Aside from solitaires, consider all multi-reg groups first. */
3706 t = reload_nregs[r2] - reload_nregs[r1];
3707 if (t != 0)
3708 return t;
3709
3710 /* Consider reloads in order of increasing reg-class number. */
3711 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3712 if (t != 0)
3713 return t;
3714
3715 /* If reloads are equally urgent, sort by reload number,
3716 so that the results of qsort leave nothing to chance. */
3717 return r1 - r2;
3718}
3719\f
3720/* The following HARD_REG_SETs indicate when each hard register is
3721 used for a reload of various parts of the current insn. */
3722
3723/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3724static HARD_REG_SET reload_reg_used;
3725/* If reg is in use for a RELOAD_FOR_INPUT_RELOAD_ADDRESS reload. */
3726static HARD_REG_SET reload_reg_used_in_input_addr;
3727/* If reg is in use for a RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reload. */
3728static HARD_REG_SET reload_reg_used_in_output_addr;
3729/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3730static HARD_REG_SET reload_reg_used_in_op_addr;
3731/* If reg is in use for a RELOAD_FOR_INPUT reload. */
3732static HARD_REG_SET reload_reg_used_in_input;
3733/* If reg is in use for a RELOAD_FOR_OUTPUT reload. */
3734static HARD_REG_SET reload_reg_used_in_output;
3735
3736/* If reg is in use as a reload reg for any sort of reload. */
3737static HARD_REG_SET reload_reg_used_at_all;
3738
3739/* Mark reg REGNO as in use for a reload of the sort spec'd by WHEN_NEEDED.
3740 MODE is used to indicate how many consecutive regs are actually used. */
3741
3742static void
3743mark_reload_reg_in_use (regno, when_needed, mode)
3744 int regno;
3745 enum reload_when_needed when_needed;
3746 enum machine_mode mode;
3747{
3748 int nregs = HARD_REGNO_NREGS (regno, mode);
3749 int i;
3750
3751 for (i = regno; i < nregs + regno; i++)
3752 {
3753 switch (when_needed)
3754 {
3755 case RELOAD_OTHER:
3756 SET_HARD_REG_BIT (reload_reg_used, i);
3757 break;
3758
3759 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3760 SET_HARD_REG_BIT (reload_reg_used_in_input_addr, i);
3761 break;
3762
3763 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3764 SET_HARD_REG_BIT (reload_reg_used_in_output_addr, i);
3765 break;
3766
3767 case RELOAD_FOR_OPERAND_ADDRESS:
3768 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3769 break;
3770
3771 case RELOAD_FOR_INPUT:
3772 SET_HARD_REG_BIT (reload_reg_used_in_input, i);
3773 break;
3774
3775 case RELOAD_FOR_OUTPUT:
3776 SET_HARD_REG_BIT (reload_reg_used_in_output, i);
3777 break;
3778 }
3779
3780 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3781 }
3782}
3783
3784/* 1 if reg REGNO is free as a reload reg for a reload of the sort
3785 specified by WHEN_NEEDED. */
3786
3787static int
3788reload_reg_free_p (regno, when_needed)
3789 int regno;
3790 enum reload_when_needed when_needed;
3791{
3792 /* In use for a RELOAD_OTHER means it's not available for anything. */
3793 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
3794 return 0;
3795 switch (when_needed)
3796 {
3797 case RELOAD_OTHER:
3798 /* In use for anything means not available for a RELOAD_OTHER. */
3799 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
3800
3801 /* The other kinds of use can sometimes share a register. */
3802 case RELOAD_FOR_INPUT:
3803 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3804 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3805 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno));
3806 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3807 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno)
3808 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno));
3809 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3810 return (! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3811 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3812 case RELOAD_FOR_OPERAND_ADDRESS:
3813 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3814 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3815 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3816 case RELOAD_FOR_OUTPUT:
3817 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3818 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3819 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3820 }
3821 abort ();
3822}
3823
3824/* Return 1 if the value in reload reg REGNO, as used by a reload
3825 needed for the part of the insn specified by WHEN_NEEDED,
3826 is not in use for a reload in any prior part of the insn.
3827
3828 We can assume that the reload reg was already tested for availability
3829 at the time it is needed, and we should not check this again,
3830 in case the reg has already been marked in use. */
3831
3832static int
3833reload_reg_free_before_p (regno, when_needed)
3834 int regno;
3835 enum reload_when_needed when_needed;
3836{
3837 switch (when_needed)
3838 {
3839 case RELOAD_OTHER:
3840 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3841 its use starts from the beginning, so nothing can use it earlier. */
3842 return 1;
3843
3844 /* If this use is for part of the insn,
3845 check the reg is not in use for any prior part. */
3846 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3847 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
3848 return 0;
3849 case RELOAD_FOR_OUTPUT:
3850 if (TEST_HARD_REG_BIT (reload_reg_used_in_input, regno))
3851 return 0;
3852 case RELOAD_FOR_OPERAND_ADDRESS:
3853 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno))
3854 return 0;
3855 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3856 case RELOAD_FOR_INPUT:
3857 return 1;
3858 }
3859 abort ();
3860}
3861
3862/* Return 1 if the value in reload reg REGNO, as used by a reload
3863 needed for the part of the insn specified by WHEN_NEEDED,
3864 is still available in REGNO at the end of the insn.
3865
3866 We can assume that the reload reg was already tested for availability
3867 at the time it is needed, and we should not check this again,
3868 in case the reg has already been marked in use. */
3869
3870static int
3871reload_reg_reaches_end_p (regno, when_needed)
3872 int regno;
3873 enum reload_when_needed when_needed;
3874{
3875 switch (when_needed)
3876 {
3877 case RELOAD_OTHER:
3878 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3879 its value must reach the end. */
3880 return 1;
3881
3882 /* If this use is for part of the insn,
3883 its value reaches if no subsequent part uses the same register. */
3884 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3885 case RELOAD_FOR_INPUT:
3886 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3887 || TEST_HARD_REG_BIT (reload_reg_used_in_output, regno))
3888 return 0;
3889 case RELOAD_FOR_OPERAND_ADDRESS:
3890 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno))
3891 return 0;
3892 case RELOAD_FOR_OUTPUT:
3893 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3894 return 1;
3895 }
3896 abort ();
3897}
3898\f
3899/* Vector of reload-numbers showing the order in which the reloads should
3900 be processed. */
3901short reload_order[MAX_RELOADS];
3902
3903/* Indexed by reload number, 1 if incoming value
3904 inherited from previous insns. */
3905char reload_inherited[MAX_RELOADS];
3906
3907/* For an inherited reload, this is the insn the reload was inherited from,
3908 if we know it. Otherwise, this is 0. */
3909rtx reload_inheritance_insn[MAX_RELOADS];
3910
3911/* If non-zero, this is a place to get the value of the reload,
3912 rather than using reload_in. */
3913rtx reload_override_in[MAX_RELOADS];
3914
3915/* For each reload, the index in spill_regs of the spill register used,
3916 or -1 if we did not need one of the spill registers for this reload. */
3917int reload_spill_index[MAX_RELOADS];
3918
3919/* Index of last register assigned as a spill register. We allocate in
3920 a round-robin fashio. */
3921
3922static last_spill_reg = 0;
3923
3924/* Find a spill register to use as a reload register for reload R.
3925 LAST_RELOAD is non-zero if this is the last reload for the insn being
3926 processed.
3927
3928 Set reload_reg_rtx[R] to the register allocated.
3929
3930 If NOERROR is nonzero, we return 1 if successful,
3931 or 0 if we couldn't find a spill reg and we didn't change anything. */
3932
3933static int
3934allocate_reload_reg (r, insn, last_reload, noerror)
3935 int r;
3936 rtx insn;
3937 int last_reload;
3938 int noerror;
3939{
3940 int i;
3941 int pass;
3942 int count;
3943 rtx new;
3944 int regno;
3945
3946 /* If we put this reload ahead, thinking it is a group,
3947 then insist on finding a group. Otherwise we can grab a
a8fdc208 3948 reg that some other reload needs.
32131a9c
RK
3949 (That can happen when we have a 68000 DATA_OR_FP_REG
3950 which is a group of data regs or one fp reg.)
3951 We need not be so restrictive if there are no more reloads
3952 for this insn.
3953
3954 ??? Really it would be nicer to have smarter handling
3955 for that kind of reg class, where a problem like this is normal.
3956 Perhaps those classes should be avoided for reloading
3957 by use of more alternatives. */
3958
3959 int force_group = reload_nregs[r] > 1 && ! last_reload;
3960
3961 /* If we want a single register and haven't yet found one,
3962 take any reg in the right class and not in use.
3963 If we want a consecutive group, here is where we look for it.
3964
3965 We use two passes so we can first look for reload regs to
3966 reuse, which are already in use for other reloads in this insn,
3967 and only then use additional registers.
3968 I think that maximizing reuse is needed to make sure we don't
3969 run out of reload regs. Suppose we have three reloads, and
3970 reloads A and B can share regs. These need two regs.
3971 Suppose A and B are given different regs.
3972 That leaves none for C. */
3973 for (pass = 0; pass < 2; pass++)
3974 {
3975 /* I is the index in spill_regs.
3976 We advance it round-robin between insns to use all spill regs
3977 equally, so that inherited reloads have a chance
3978 of leapfrogging each other. */
3979
3980 for (count = 0, i = last_spill_reg; count < n_spills; count++)
3981 {
3982 int class = (int) reload_reg_class[r];
3983
3984 i = (i + 1) % n_spills;
3985
3986 if (reload_reg_free_p (spill_regs[i], reload_when_needed[r])
3987 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
3988 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
3989 /* Look first for regs to share, then for unshared. */
3990 && (pass || TEST_HARD_REG_BIT (reload_reg_used_at_all,
3991 spill_regs[i])))
3992 {
3993 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
3994 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
3995 (on 68000) got us two FP regs. If NR is 1,
3996 we would reject both of them. */
3997 if (force_group)
3998 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
3999 /* If we need only one reg, we have already won. */
4000 if (nr == 1)
4001 {
4002 /* But reject a single reg if we demand a group. */
4003 if (force_group)
4004 continue;
4005 break;
4006 }
4007 /* Otherwise check that as many consecutive regs as we need
4008 are available here.
4009 Also, don't use for a group registers that are
4010 needed for nongroups. */
4011 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4012 while (nr > 1)
4013 {
4014 regno = spill_regs[i] + nr - 1;
4015 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4016 && spill_reg_order[regno] >= 0
4017 && reload_reg_free_p (regno, reload_when_needed[r])
4018 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4019 regno)))
4020 break;
4021 nr--;
4022 }
4023 if (nr == 1)
4024 break;
4025 }
4026 }
4027
4028 /* If we found something on pass 1, omit pass 2. */
4029 if (count < n_spills)
4030 break;
4031 }
4032
4033 /* We should have found a spill register by now. */
4034 if (count == n_spills)
4035 {
4036 if (noerror)
4037 return 0;
4038 abort ();
4039 }
4040
4041 last_spill_reg = i;
4042
4043 /* Mark as in use for this insn the reload regs we use for this. */
4044 mark_reload_reg_in_use (spill_regs[i], reload_when_needed[r],
4045 reload_mode[r]);
4046
4047 new = spill_reg_rtx[i];
4048
4049 if (new == 0 || GET_MODE (new) != reload_mode[r])
4050 spill_reg_rtx[i] = new = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4051
4052 reload_reg_rtx[r] = new;
4053 reload_spill_index[r] = i;
4054 regno = true_regnum (new);
4055
4056 /* Detect when the reload reg can't hold the reload mode.
4057 This used to be one `if', but Sequent compiler can't handle that. */
4058 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4059 {
4060 enum machine_mode test_mode = VOIDmode;
4061 if (reload_in[r])
4062 test_mode = GET_MODE (reload_in[r]);
4063 /* If reload_in[r] has VOIDmode, it means we will load it
4064 in whatever mode the reload reg has: to wit, reload_mode[r].
4065 We have already tested that for validity. */
4066 /* Aside from that, we need to test that the expressions
4067 to reload from or into have modes which are valid for this
4068 reload register. Otherwise the reload insns would be invalid. */
4069 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4070 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4071 if (! (reload_out[r] != 0
4072 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4073 /* The reg is OK. */
4074 return 1;
4075 }
4076
4077 /* The reg is not OK. */
4078 if (noerror)
4079 return 0;
4080
4081 if (asm_noperands (PATTERN (insn)) < 0)
4082 /* It's the compiler's fault. */
4083 abort ();
4084
4085 /* It's the user's fault; the operand's mode and constraint
4086 don't match. Disable this reload so we don't crash in final. */
4087 error_for_asm (insn,
4088 "`asm' operand constraint incompatible with operand size");
4089 reload_in[r] = 0;
4090 reload_out[r] = 0;
4091 reload_reg_rtx[r] = 0;
4092 reload_optional[r] = 1;
4093 reload_secondary_p[r] = 1;
4094
4095 return 1;
4096}
4097\f
4098/* Assign hard reg targets for the pseudo-registers we must reload
4099 into hard regs for this insn.
4100 Also output the instructions to copy them in and out of the hard regs.
4101
4102 For machines with register classes, we are responsible for
4103 finding a reload reg in the proper class. */
4104
4105static void
4106choose_reload_regs (insn, avoid_return_reg)
4107 rtx insn;
4108 /* This argument is currently ignored. */
4109 rtx avoid_return_reg;
4110{
4111 register int i, j;
4112 int max_group_size = 1;
4113 enum reg_class group_class = NO_REGS;
4114 int inheritance;
4115
4116 rtx save_reload_reg_rtx[MAX_RELOADS];
4117 char save_reload_inherited[MAX_RELOADS];
4118 rtx save_reload_inheritance_insn[MAX_RELOADS];
4119 rtx save_reload_override_in[MAX_RELOADS];
4120 int save_reload_spill_index[MAX_RELOADS];
4121 HARD_REG_SET save_reload_reg_used;
4122 HARD_REG_SET save_reload_reg_used_in_input_addr;
4123 HARD_REG_SET save_reload_reg_used_in_output_addr;
4124 HARD_REG_SET save_reload_reg_used_in_op_addr;
4125 HARD_REG_SET save_reload_reg_used_in_input;
4126 HARD_REG_SET save_reload_reg_used_in_output;
4127 HARD_REG_SET save_reload_reg_used_at_all;
4128
4129 bzero (reload_inherited, MAX_RELOADS);
4130 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4131 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4132
4133 CLEAR_HARD_REG_SET (reload_reg_used);
4134 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4135 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr);
4136 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr);
4137 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4138 CLEAR_HARD_REG_SET (reload_reg_used_in_output);
4139 CLEAR_HARD_REG_SET (reload_reg_used_in_input);
4140
4141 /* Distinguish output-only and input-only reloads
4142 because they can overlap with other things. */
4143 for (j = 0; j < n_reloads; j++)
4144 if (reload_when_needed[j] == RELOAD_OTHER
4145 && ! reload_needed_for_multiple[j])
4146 {
4147 if (reload_in[j] == 0)
4148 {
4149 /* But earlyclobber operands must stay as RELOAD_OTHER. */
4150 for (i = 0; i < n_earlyclobbers; i++)
4151 if (rtx_equal_p (reload_out[j], reload_earlyclobbers[i]))
4152 break;
4153 if (i == n_earlyclobbers)
4154 reload_when_needed[j] = RELOAD_FOR_OUTPUT;
4155 }
4156 if (reload_out[j] == 0)
4157 reload_when_needed[j] = RELOAD_FOR_INPUT;
4158
4159 if (reload_secondary_reload[j] >= 0
4160 && ! reload_needed_for_multiple[reload_secondary_reload[j]])
4161 reload_when_needed[reload_secondary_reload[j]]
4162 = reload_when_needed[j];
4163 }
4164
4165#ifdef SMALL_REGISTER_CLASSES
4166 /* Don't bother with avoiding the return reg
4167 if we have no mandatory reload that could use it. */
4168 if (avoid_return_reg)
4169 {
4170 int do_avoid = 0;
4171 int regno = REGNO (avoid_return_reg);
4172 int nregs
4173 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4174 int r;
4175
4176 for (r = regno; r < regno + nregs; r++)
4177 if (spill_reg_order[r] >= 0)
4178 for (j = 0; j < n_reloads; j++)
4179 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4180 && (reload_in[j] != 0 || reload_out[j] != 0
4181 || reload_secondary_p[j])
4182 &&
4183 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4184 do_avoid = 1;
4185 if (!do_avoid)
4186 avoid_return_reg = 0;
4187 }
4188#endif /* SMALL_REGISTER_CLASSES */
4189
4190#if 0 /* Not needed, now that we can always retry without inheritance. */
4191 /* See if we have more mandatory reloads than spill regs.
4192 If so, then we cannot risk optimizations that could prevent
a8fdc208 4193 reloads from sharing one spill register.
32131a9c
RK
4194
4195 Since we will try finding a better register than reload_reg_rtx
4196 unless it is equal to reload_in or reload_out, count such reloads. */
4197
4198 {
4199 int tem = 0;
4200#ifdef SMALL_REGISTER_CLASSES
4201 int tem = (avoid_return_reg != 0);
a8fdc208 4202#endif
32131a9c
RK
4203 for (j = 0; j < n_reloads; j++)
4204 if (! reload_optional[j]
4205 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4206 && (reload_reg_rtx[j] == 0
4207 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4208 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4209 tem++;
4210 if (tem > n_spills)
4211 must_reuse = 1;
4212 }
4213#endif
4214
4215#ifdef SMALL_REGISTER_CLASSES
4216 /* Don't use the subroutine call return reg for a reload
4217 if we are supposed to avoid it. */
4218 if (avoid_return_reg)
4219 {
4220 int regno = REGNO (avoid_return_reg);
4221 int nregs
4222 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4223 int r;
4224
4225 for (r = regno; r < regno + nregs; r++)
4226 if (spill_reg_order[r] >= 0)
4227 SET_HARD_REG_BIT (reload_reg_used, r);
4228 }
4229#endif /* SMALL_REGISTER_CLASSES */
4230
4231 /* In order to be certain of getting the registers we need,
4232 we must sort the reloads into order of increasing register class.
4233 Then our grabbing of reload registers will parallel the process
a8fdc208 4234 that provided the reload registers.
32131a9c
RK
4235
4236 Also note whether any of the reloads wants a consecutive group of regs.
4237 If so, record the maximum size of the group desired and what
4238 register class contains all the groups needed by this insn. */
4239
4240 for (j = 0; j < n_reloads; j++)
4241 {
4242 reload_order[j] = j;
4243 reload_spill_index[j] = -1;
4244
4245 reload_mode[j]
4246 = (reload_strict_low[j] && reload_out[j]
4247 ? GET_MODE (SUBREG_REG (reload_out[j]))
4248 : (reload_inmode[j] == VOIDmode
4249 || (GET_MODE_SIZE (reload_outmode[j])
4250 > GET_MODE_SIZE (reload_inmode[j])))
4251 ? reload_outmode[j] : reload_inmode[j]);
4252
4253 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4254
4255 if (reload_nregs[j] > 1)
4256 {
4257 max_group_size = MAX (reload_nregs[j], max_group_size);
4258 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4259 }
4260
4261 /* If we have already decided to use a certain register,
4262 don't use it in another way. */
4263 if (reload_reg_rtx[j])
4264 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]),
4265 reload_when_needed[j], reload_mode[j]);
4266 }
4267
4268 if (n_reloads > 1)
4269 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4270
4271 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4272 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4273 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4274 sizeof reload_inheritance_insn);
4275 bcopy (reload_override_in, save_reload_override_in,
4276 sizeof reload_override_in);
4277 bcopy (reload_spill_index, save_reload_spill_index,
4278 sizeof reload_spill_index);
4279 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4280 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4281 COPY_HARD_REG_SET (save_reload_reg_used_in_output,
4282 reload_reg_used_in_output);
4283 COPY_HARD_REG_SET (save_reload_reg_used_in_input,
4284 reload_reg_used_in_input);
4285 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr,
4286 reload_reg_used_in_input_addr);
4287 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr,
4288 reload_reg_used_in_output_addr);
4289 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4290 reload_reg_used_in_op_addr);
4291
4292 /* Try first with inheritance, then turning it off. */
4293
4294 for (inheritance = 1; inheritance >= 0; inheritance--)
4295 {
4296 /* Process the reloads in order of preference just found.
4297 Beyond this point, subregs can be found in reload_reg_rtx.
4298
4299 This used to look for an existing reloaded home for all
4300 of the reloads, and only then perform any new reloads.
4301 But that could lose if the reloads were done out of reg-class order
4302 because a later reload with a looser constraint might have an old
4303 home in a register needed by an earlier reload with a tighter constraint.
4304
4305 To solve this, we make two passes over the reloads, in the order
4306 described above. In the first pass we try to inherit a reload
4307 from a previous insn. If there is a later reload that needs a
4308 class that is a proper subset of the class being processed, we must
4309 also allocate a spill register during the first pass.
4310
4311 Then make a second pass over the reloads to allocate any reloads
4312 that haven't been given registers yet. */
4313
4314 for (j = 0; j < n_reloads; j++)
4315 {
4316 register int r = reload_order[j];
4317
4318 /* Ignore reloads that got marked inoperative. */
4319 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4320 continue;
4321
4322 /* If find_reloads chose a to use reload_in or reload_out as a reload
4323 register, we don't need to chose one. Otherwise, try even if it found
4324 one since we might save an insn if we find the value lying around. */
4325 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4326 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4327 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4328 continue;
4329
4330#if 0 /* No longer needed for correct operation.
4331 It might give better code, or might not; worth an experiment? */
4332 /* If this is an optional reload, we can't inherit from earlier insns
4333 until we are sure that any non-optional reloads have been allocated.
4334 The following code takes advantage of the fact that optional reloads
4335 are at the end of reload_order. */
4336 if (reload_optional[r] != 0)
4337 for (i = 0; i < j; i++)
4338 if ((reload_out[reload_order[i]] != 0
4339 || reload_in[reload_order[i]] != 0
4340 || reload_secondary_p[reload_order[i]])
4341 && ! reload_optional[reload_order[i]]
4342 && reload_reg_rtx[reload_order[i]] == 0)
4343 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4344#endif
4345
4346 /* First see if this pseudo is already available as reloaded
4347 for a previous insn. We cannot try to inherit for reloads
4348 that are smaller than the maximum number of registers needed
4349 for groups unless the register we would allocate cannot be used
4350 for the groups.
4351
4352 We could check here to see if this is a secondary reload for
4353 an object that is already in a register of the desired class.
4354 This would avoid the need for the secondary reload register.
4355 But this is complex because we can't easily determine what
4356 objects might want to be loaded via this reload. So let a register
4357 be allocated here. In `emit_reload_insns' we suppress one of the
4358 loads in the case described above. */
4359
4360 if (inheritance)
4361 {
4362 register int regno = -1;
4363
4364 if (reload_in[r] == 0)
4365 ;
4366 else if (GET_CODE (reload_in[r]) == REG)
4367 regno = REGNO (reload_in[r]);
4368 else if (GET_CODE (reload_in_reg[r]) == REG)
4369 regno = REGNO (reload_in_reg[r]);
4370#if 0
4371 /* This won't work, since REGNO can be a pseudo reg number.
4372 Also, it takes much more hair to keep track of all the things
4373 that can invalidate an inherited reload of part of a pseudoreg. */
4374 else if (GET_CODE (reload_in[r]) == SUBREG
4375 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4376 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4377#endif
4378
4379 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4380 {
4381 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4382
4383 if (reg_reloaded_contents[i] == regno
4384 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4385 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4386 spill_regs[i])
4387 && (reload_nregs[r] == max_group_size
4388 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4389 spill_regs[i]))
4390 && reload_reg_free_p (spill_regs[i], reload_when_needed[r])
4391 && reload_reg_free_before_p (spill_regs[i],
4392 reload_when_needed[r]))
4393 {
4394 /* If a group is needed, verify that all the subsequent
4395 registers still have their values intact. */
4396 int nr
4397 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4398 int k;
4399
4400 for (k = 1; k < nr; k++)
4401 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4402 != regno)
4403 break;
4404
4405 if (k == nr)
4406 {
4407 /* Mark the register as in use for this part of
4408 the insn. */
4409 mark_reload_reg_in_use (spill_regs[i],
4410 reload_when_needed[r],
4411 reload_mode[r]);
4412 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4413 reload_inherited[r] = 1;
4414 reload_inheritance_insn[r] = reg_reloaded_insn[i];
4415 reload_spill_index[r] = i;
4416 }
4417 }
4418 }
4419 }
4420
4421 /* Here's another way to see if the value is already lying around. */
4422 if (inheritance
4423 && reload_in[r] != 0
4424 && ! reload_inherited[r]
4425 && reload_out[r] == 0
4426 && (CONSTANT_P (reload_in[r])
4427 || GET_CODE (reload_in[r]) == PLUS
4428 || GET_CODE (reload_in[r]) == REG
4429 || GET_CODE (reload_in[r]) == MEM)
4430 && (reload_nregs[r] == max_group_size
4431 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4432 {
4433 register rtx equiv
4434 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 4435 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
4436 int regno;
4437
4438 if (equiv != 0)
4439 {
4440 if (GET_CODE (equiv) == REG)
4441 regno = REGNO (equiv);
4442 else if (GET_CODE (equiv) == SUBREG)
4443 {
4444 regno = REGNO (SUBREG_REG (equiv));
4445 if (regno < FIRST_PSEUDO_REGISTER)
4446 regno += SUBREG_WORD (equiv);
4447 }
4448 else
4449 abort ();
4450 }
4451
4452 /* If we found a spill reg, reject it unless it is free
4453 and of the desired class. */
4454 if (equiv != 0
4455 && ((spill_reg_order[regno] >= 0
4456 && ! reload_reg_free_before_p (regno,
4457 reload_when_needed[r]))
4458 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4459 regno)))
4460 equiv = 0;
4461
4462 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4463 equiv = 0;
4464
4465 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4466 equiv = 0;
4467
4468 /* We found a register that contains the value we need.
4469 If this register is the same as an `earlyclobber' operand
4470 of the current insn, just mark it as a place to reload from
4471 since we can't use it as the reload register itself. */
4472
4473 if (equiv != 0)
4474 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
4475 if (reg_overlap_mentioned_for_reload_p (equiv,
4476 reload_earlyclobbers[i]))
32131a9c
RK
4477 {
4478 reload_override_in[r] = equiv;
4479 equiv = 0;
4480 break;
4481 }
4482
4483 /* JRV: If the equiv register we have found is explicitly
4484 clobbered in the current insn, mark but don't use, as above. */
4485
4486 if (equiv != 0 && regno_clobbered_p (regno, insn))
4487 {
4488 reload_override_in[r] = equiv;
4489 equiv = 0;
4490 }
4491
4492 /* If we found an equivalent reg, say no code need be generated
4493 to load it, and use it as our reload reg. */
4494 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4495 {
4496 reload_reg_rtx[r] = equiv;
4497 reload_inherited[r] = 1;
4498 /* If it is a spill reg,
4499 mark the spill reg as in use for this insn. */
4500 i = spill_reg_order[regno];
4501 if (i >= 0)
4502 mark_reload_reg_in_use (regno, reload_when_needed[r],
4503 reload_mode[r]);
4504 }
4505 }
4506
4507 /* If we found a register to use already, or if this is an optional
4508 reload, we are done. */
4509 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
4510 continue;
4511
4512#if 0 /* No longer needed for correct operation. Might or might not
4513 give better code on the average. Want to experiment? */
4514
4515 /* See if there is a later reload that has a class different from our
4516 class that intersects our class or that requires less register
4517 than our reload. If so, we must allocate a register to this
4518 reload now, since that reload might inherit a previous reload
4519 and take the only available register in our class. Don't do this
4520 for optional reloads since they will force all previous reloads
4521 to be allocated. Also don't do this for reloads that have been
4522 turned off. */
4523
4524 for (i = j + 1; i < n_reloads; i++)
4525 {
4526 int s = reload_order[i];
4527
d45cf215
RS
4528 if ((reload_in[s] == 0 && reload_out[s] == 0
4529 && ! reload_secondary_p[s])
32131a9c
RK
4530 || reload_optional[s])
4531 continue;
4532
4533 if ((reload_reg_class[s] != reload_reg_class[r]
4534 && reg_classes_intersect_p (reload_reg_class[r],
4535 reload_reg_class[s]))
4536 || reload_nregs[s] < reload_nregs[r])
4537 break;
4538 }
4539
4540 if (i == n_reloads)
4541 continue;
4542
4543 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
4544#endif
4545 }
4546
4547 /* Now allocate reload registers for anything non-optional that
4548 didn't get one yet. */
4549 for (j = 0; j < n_reloads; j++)
4550 {
4551 register int r = reload_order[j];
4552
4553 /* Ignore reloads that got marked inoperative. */
4554 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4555 continue;
4556
4557 /* Skip reloads that already have a register allocated or are
4558 optional. */
4559 if (reload_reg_rtx[r] != 0 || reload_optional[r])
4560 continue;
4561
4562 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
4563 break;
4564 }
4565
4566 /* If that loop got all the way, we have won. */
4567 if (j == n_reloads)
4568 break;
4569
4570 fail:
4571 /* Loop around and try without any inheritance. */
4572 /* First undo everything done by the failed attempt
4573 to allocate with inheritance. */
4574 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
4575 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
4576 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
4577 sizeof reload_inheritance_insn);
4578 bcopy (save_reload_override_in, reload_override_in,
4579 sizeof reload_override_in);
4580 bcopy (save_reload_spill_index, reload_spill_index,
4581 sizeof reload_spill_index);
4582 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
4583 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
4584 COPY_HARD_REG_SET (reload_reg_used_in_input,
4585 save_reload_reg_used_in_input);
4586 COPY_HARD_REG_SET (reload_reg_used_in_output,
4587 save_reload_reg_used_in_output);
4588 COPY_HARD_REG_SET (reload_reg_used_in_input_addr,
4589 save_reload_reg_used_in_input_addr);
4590 COPY_HARD_REG_SET (reload_reg_used_in_output_addr,
4591 save_reload_reg_used_in_output_addr);
4592 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
4593 save_reload_reg_used_in_op_addr);
4594 }
4595
4596 /* If we thought we could inherit a reload, because it seemed that
4597 nothing else wanted the same reload register earlier in the insn,
4598 verify that assumption, now that all reloads have been assigned. */
4599
4600 for (j = 0; j < n_reloads; j++)
4601 {
4602 register int r = reload_order[j];
4603
4604 if (reload_inherited[r] && reload_reg_rtx[r] != 0
4605 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
4606 reload_when_needed[r]))
4607 reload_inherited[r] = 0;
4608
4609 /* If we found a better place to reload from,
4610 validate it in the same fashion, if it is a reload reg. */
4611 if (reload_override_in[r]
4612 && (GET_CODE (reload_override_in[r]) == REG
4613 || GET_CODE (reload_override_in[r]) == SUBREG))
4614 {
4615 int regno = true_regnum (reload_override_in[r]);
4616 if (spill_reg_order[regno] >= 0
4617 && ! reload_reg_free_before_p (regno, reload_when_needed[r]))
4618 reload_override_in[r] = 0;
4619 }
4620 }
4621
4622 /* Now that reload_override_in is known valid,
4623 actually override reload_in. */
4624 for (j = 0; j < n_reloads; j++)
4625 if (reload_override_in[j])
4626 reload_in[j] = reload_override_in[j];
4627
4628 /* If this reload won't be done because it has been cancelled or is
4629 optional and not inherited, clear reload_reg_rtx so other
4630 routines (such as subst_reloads) don't get confused. */
4631 for (j = 0; j < n_reloads; j++)
4632 if ((reload_optional[j] && ! reload_inherited[j])
4633 || (reload_in[j] == 0 && reload_out[j] == 0
4634 && ! reload_secondary_p[j]))
4635 reload_reg_rtx[j] = 0;
4636
4637 /* Record which pseudos and which spill regs have output reloads. */
4638 for (j = 0; j < n_reloads; j++)
4639 {
4640 register int r = reload_order[j];
4641
4642 i = reload_spill_index[r];
4643
4644 /* I is nonneg if this reload used one of the spill regs.
4645 If reload_reg_rtx[r] is 0, this is an optional reload
4646 that we opted to ignore. */
4647 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
4648 && reload_reg_rtx[r] != 0)
4649 {
4650 register int nregno = REGNO (reload_out[r]);
372e033b
RS
4651 int nr = 1;
4652
4653 if (nregno < FIRST_PSEUDO_REGISTER)
4654 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
4655
4656 while (--nr >= 0)
372e033b
RS
4657 reg_has_output_reload[nregno + nr] = 1;
4658
4659 if (i >= 0)
32131a9c 4660 {
372e033b
RS
4661 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4662 while (--nr >= 0)
32131a9c
RK
4663 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
4664 }
4665
4666 if (reload_when_needed[r] != RELOAD_OTHER
4667 && reload_when_needed[r] != RELOAD_FOR_OUTPUT)
4668 abort ();
4669 }
4670 }
4671}
4672\f
4673/* Output insns to reload values in and out of the chosen reload regs. */
4674
4675static void
4676emit_reload_insns (insn)
4677 rtx insn;
4678{
4679 register int j;
4680 rtx following_insn = NEXT_INSN (insn);
a8efe40d 4681 rtx before_insn = insn;
32131a9c
RK
4682 rtx first_output_reload_insn = NEXT_INSN (insn);
4683 rtx first_other_reload_insn = insn;
4684 rtx first_operand_address_reload_insn = insn;
4685 int special;
4686 /* Values to be put in spill_reg_store are put here first. */
4687 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
4688
d45cf215 4689 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
4690 must go in front of the first USE insn, not in front of INSN. */
4691
4692 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
4693 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
4694 while (GET_CODE (PREV_INSN (before_insn)) == INSN
4695 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
4696 first_other_reload_insn = first_operand_address_reload_insn
4697 = before_insn = PREV_INSN (before_insn);
4698
32131a9c
RK
4699 /* Now output the instructions to copy the data into and out of the
4700 reload registers. Do these in the order that the reloads were reported,
4701 since reloads of base and index registers precede reloads of operands
4702 and the operands may need the base and index registers reloaded. */
4703
4704 for (j = 0; j < n_reloads; j++)
4705 {
4706 register rtx old;
4707 rtx oldequiv_reg = 0;
4708 rtx this_reload_insn = 0;
4709 rtx store_insn = 0;
4710
4711 old = reload_in[j];
4712 if (old != 0 && ! reload_inherited[j]
4713 && ! rtx_equal_p (reload_reg_rtx[j], old)
4714 && reload_reg_rtx[j] != 0)
4715 {
4716 register rtx reloadreg = reload_reg_rtx[j];
4717 rtx oldequiv = 0;
4718 enum machine_mode mode;
4719 rtx where;
d445b551 4720 rtx reload_insn;
32131a9c
RK
4721
4722 /* Determine the mode to reload in.
4723 This is very tricky because we have three to choose from.
4724 There is the mode the insn operand wants (reload_inmode[J]).
4725 There is the mode of the reload register RELOADREG.
4726 There is the intrinsic mode of the operand, which we could find
4727 by stripping some SUBREGs.
4728 It turns out that RELOADREG's mode is irrelevant:
4729 we can change that arbitrarily.
4730
4731 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
4732 then the reload reg may not support QImode moves, so use SImode.
4733 If foo is in memory due to spilling a pseudo reg, this is safe,
4734 because the QImode value is in the least significant part of a
4735 slot big enough for a SImode. If foo is some other sort of
4736 memory reference, then it is impossible to reload this case,
4737 so previous passes had better make sure this never happens.
4738
4739 Then consider a one-word union which has SImode and one of its
4740 members is a float, being fetched as (SUBREG:SF union:SI).
4741 We must fetch that as SFmode because we could be loading into
4742 a float-only register. In this case OLD's mode is correct.
4743
4744 Consider an immediate integer: it has VOIDmode. Here we need
4745 to get a mode from something else.
4746
4747 In some cases, there is a fourth mode, the operand's
4748 containing mode. If the insn specifies a containing mode for
4749 this operand, it overrides all others.
4750
4751 I am not sure whether the algorithm here is always right,
4752 but it does the right things in those cases. */
4753
4754 mode = GET_MODE (old);
4755 if (mode == VOIDmode)
4756 mode = reload_inmode[j];
4757 if (reload_strict_low[j])
4758 mode = GET_MODE (SUBREG_REG (reload_in[j]));
4759
4760#ifdef SECONDARY_INPUT_RELOAD_CLASS
4761 /* If we need a secondary register for this operation, see if
4762 the value is already in a register in that class. Don't
4763 do this if the secondary register will be used as a scratch
4764 register. */
4765
4766 if (reload_secondary_reload[j] >= 0
4767 && reload_secondary_icode[j] == CODE_FOR_nothing)
4768 oldequiv
4769 = find_equiv_reg (old, insn,
4770 reload_reg_class[reload_secondary_reload[j]],
fb3821f7 4771 -1, NULL_PTR, 0, mode);
32131a9c
RK
4772#endif
4773
4774 /* If reloading from memory, see if there is a register
4775 that already holds the same value. If so, reload from there.
4776 We can pass 0 as the reload_reg_p argument because
4777 any other reload has either already been emitted,
4778 in which case find_equiv_reg will see the reload-insn,
4779 or has yet to be emitted, in which case it doesn't matter
4780 because we will use this equiv reg right away. */
4781
4782 if (oldequiv == 0
4783 && (GET_CODE (old) == MEM
4784 || (GET_CODE (old) == REG
4785 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4786 && reg_renumber[REGNO (old)] < 0)))
4787 oldequiv = find_equiv_reg (old, insn, GENERAL_REGS,
fb3821f7 4788 -1, NULL_PTR, 0, mode);
32131a9c
RK
4789
4790 if (oldequiv)
4791 {
4792 int regno = true_regnum (oldequiv);
4793
4794 /* If OLDEQUIV is a spill register, don't use it for this
4795 if any other reload needs it at an earlier stage of this insn
a8fdc208 4796 or at this stage. */
32131a9c
RK
4797 if (spill_reg_order[regno] >= 0
4798 && (! reload_reg_free_p (regno, reload_when_needed[j])
4799 || ! reload_reg_free_before_p (regno,
4800 reload_when_needed[j])))
4801 oldequiv = 0;
4802
4803 /* If OLDEQUIV is not a spill register,
4804 don't use it if any other reload wants it. */
4805 if (spill_reg_order[regno] < 0)
4806 {
4807 int k;
4808 for (k = 0; k < n_reloads; k++)
4809 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
4810 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
4811 oldequiv))
32131a9c
RK
4812 {
4813 oldequiv = 0;
4814 break;
4815 }
4816 }
4817 }
4818
4819 if (oldequiv == 0)
4820 oldequiv = old;
4821 else if (GET_CODE (oldequiv) == REG)
4822 oldequiv_reg = oldequiv;
4823 else if (GET_CODE (oldequiv) == SUBREG)
4824 oldequiv_reg = SUBREG_REG (oldequiv);
4825
4826 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
4827 then load RELOADREG from OLDEQUIV. */
4828
4829 if (GET_MODE (reloadreg) != mode)
4830 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
4831 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
4832 oldequiv = SUBREG_REG (oldequiv);
4833 if (GET_MODE (oldequiv) != VOIDmode
4834 && mode != GET_MODE (oldequiv))
4835 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
4836
4837 /* Decide where to put reload insn for this reload. */
4838 switch (reload_when_needed[j])
4839 {
4840 case RELOAD_FOR_INPUT:
4841 case RELOAD_OTHER:
4842 where = first_operand_address_reload_insn;
4843 break;
4844 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
4845 where = first_other_reload_insn;
4846 break;
4847 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
4848 where = first_output_reload_insn;
4849 break;
4850 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4851 where = before_insn;
32131a9c
RK
4852 }
4853
4854 special = 0;
4855
4856 /* Auto-increment addresses must be reloaded in a special way. */
4857 if (GET_CODE (oldequiv) == POST_INC
4858 || GET_CODE (oldequiv) == POST_DEC
4859 || GET_CODE (oldequiv) == PRE_INC
4860 || GET_CODE (oldequiv) == PRE_DEC)
4861 {
4862 /* We are not going to bother supporting the case where a
4863 incremented register can't be copied directly from
4864 OLDEQUIV since this seems highly unlikely. */
4865 if (reload_secondary_reload[j] >= 0)
4866 abort ();
4867 /* Prevent normal processing of this reload. */
4868 special = 1;
4869 /* Output a special code sequence for this case. */
4870 this_reload_insn
4871 = inc_for_reload (reloadreg, oldequiv, reload_inc[j], where);
4872 }
4873
4874 /* If we are reloading a pseudo-register that was set by the previous
4875 insn, see if we can get rid of that pseudo-register entirely
4876 by redirecting the previous insn into our reload register. */
4877
4878 else if (optimize && GET_CODE (old) == REG
4879 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4880 && dead_or_set_p (insn, old)
4881 /* This is unsafe if some other reload
4882 uses the same reg first. */
4883 && (reload_when_needed[j] == RELOAD_OTHER
4884 || reload_when_needed[j] == RELOAD_FOR_INPUT
4885 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS))
4886 {
4887 rtx temp = PREV_INSN (insn);
4888 while (temp && GET_CODE (temp) == NOTE)
4889 temp = PREV_INSN (temp);
4890 if (temp
4891 && GET_CODE (temp) == INSN
4892 && GET_CODE (PATTERN (temp)) == SET
4893 && SET_DEST (PATTERN (temp)) == old
4894 /* Make sure we can access insn_operand_constraint. */
4895 && asm_noperands (PATTERN (temp)) < 0
4896 /* This is unsafe if prev insn rejects our reload reg. */
4897 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
4898 reloadreg)
4899 /* This is unsafe if operand occurs more than once in current
4900 insn. Perhaps some occurrences aren't reloaded. */
4901 && count_occurrences (PATTERN (insn), old) == 1
4902 /* Don't risk splitting a matching pair of operands. */
4903 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
4904 {
4905 /* Store into the reload register instead of the pseudo. */
4906 SET_DEST (PATTERN (temp)) = reloadreg;
4907 /* If these are the only uses of the pseudo reg,
4908 pretend for GDB it lives in the reload reg we used. */
4909 if (reg_n_deaths[REGNO (old)] == 1
4910 && reg_n_sets[REGNO (old)] == 1)
4911 {
4912 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
4913 alter_reg (REGNO (old), -1);
4914 }
4915 special = 1;
4916 }
4917 }
4918
4919 /* We can't do that, so output an insn to load RELOADREG.
4920 Keep them in the following order:
4921 all reloads for input reload addresses,
4922 all reloads for ordinary input operands,
4923 all reloads for addresses of non-reloaded operands,
4924 the insn being reloaded,
4925 all reloads for addresses of output reloads,
4926 the output reloads. */
4927 if (! special)
4928 {
4929#ifdef SECONDARY_INPUT_RELOAD_CLASS
4930 rtx second_reload_reg = 0;
4931 enum insn_code icode;
4932
4933 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
4934 and icode, if any. If OLDEQUIV and OLD are different or
4935 if this is an in-out reload, recompute whether or not we
4936 still need a secondary register and what the icode should
4937 be. If we still need a secondary register and the class or
4938 icode is different, go back to reloading from OLD if using
4939 OLDEQUIV means that we got the wrong type of register. We
4940 cannot have different class or icode due to an in-out reload
4941 because we don't make such reloads when both the input and
4942 output need secondary reload registers. */
32131a9c
RK
4943
4944 if (reload_secondary_reload[j] >= 0)
4945 {
4946 int secondary_reload = reload_secondary_reload[j];
1554c2c6
RK
4947 rtx real_oldequiv = oldequiv;
4948 rtx real_old = old;
4949
4950 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
4951 and similarly for OLD.
4952 See comments in find_secondary_reload in reload.c. */
4953 if (GET_CODE (oldequiv) == REG
4954 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
4955 && reg_equiv_mem[REGNO (oldequiv)] != 0)
4956 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
4957
4958 if (GET_CODE (old) == REG
4959 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4960 && reg_equiv_mem[REGNO (old)] != 0)
4961 real_old = reg_equiv_mem[REGNO (old)];
4962
32131a9c
RK
4963 second_reload_reg = reload_reg_rtx[secondary_reload];
4964 icode = reload_secondary_icode[j];
4965
d445b551
RK
4966 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
4967 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
4968 {
4969 enum reg_class new_class
4970 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 4971 mode, real_oldequiv);
32131a9c
RK
4972
4973 if (new_class == NO_REGS)
4974 second_reload_reg = 0;
4975 else
4976 {
4977 enum insn_code new_icode;
4978 enum machine_mode new_mode;
4979
4980 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
4981 REGNO (second_reload_reg)))
1554c2c6 4982 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
4983 else
4984 {
4985 new_icode = reload_in_optab[(int) mode];
4986 if (new_icode != CODE_FOR_nothing
4987 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 4988 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 4989 (reloadreg, mode)))
a8fdc208
RS
4990 || (insn_operand_predicate[(int) new_icode][1]
4991 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 4992 (real_oldequiv, mode)))))
32131a9c
RK
4993 new_icode = CODE_FOR_nothing;
4994
4995 if (new_icode == CODE_FOR_nothing)
4996 new_mode = mode;
4997 else
4998 new_mode = insn_operand_mode[new_icode][2];
4999
5000 if (GET_MODE (second_reload_reg) != new_mode)
5001 {
5002 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5003 new_mode))
1554c2c6 5004 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
5005 else
5006 second_reload_reg
3aaa90c7
MM
5007 = gen_rtx (REG, new_mode,
5008 REGNO (second_reload_reg));
32131a9c
RK
5009 }
5010 }
5011 }
5012 }
5013
5014 /* If we still need a secondary reload register, check
5015 to see if it is being used as a scratch or intermediate
1554c2c6
RK
5016 register and generate code appropriately. If we need
5017 a scratch register, use REAL_OLDEQUIV since the form of
5018 the insn may depend on the actual address if it is
5019 a MEM. */
32131a9c
RK
5020
5021 if (second_reload_reg)
5022 {
5023 if (icode != CODE_FOR_nothing)
5024 {
d445b551 5025 reload_insn = emit_insn_before (GEN_FCN (icode)
1554c2c6
RK
5026 (reloadreg,
5027 real_oldequiv,
d445b551
RK
5028 second_reload_reg),
5029 where);
5030 if (this_reload_insn == 0)
5031 this_reload_insn = reload_insn;
32131a9c
RK
5032 special = 1;
5033 }
5034 else
5035 {
5036 /* See if we need a scratch register to load the
5037 intermediate register (a tertiary reload). */
5038 enum insn_code tertiary_icode
5039 = reload_secondary_icode[secondary_reload];
5040
5041 if (tertiary_icode != CODE_FOR_nothing)
5042 {
5043 rtx third_reload_reg
5044 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5045
d445b551
RK
5046 reload_insn
5047 = emit_insn_before ((GEN_FCN (tertiary_icode)
5048 (second_reload_reg,
1554c2c6 5049 real_oldequiv,
d445b551
RK
5050 third_reload_reg)),
5051 where);
5052 if (this_reload_insn == 0)
5053 this_reload_insn = reload_insn;
32131a9c
RK
5054 }
5055 else
5056 {
d445b551
RK
5057 reload_insn
5058 = gen_input_reload (second_reload_reg,
fe751ebf 5059 oldequiv, where);
d445b551
RK
5060 if (this_reload_insn == 0)
5061 this_reload_insn = reload_insn;
32131a9c
RK
5062 oldequiv = second_reload_reg;
5063 }
5064 }
5065 }
5066 }
5067#endif
5068
5069 if (! special)
d445b551 5070 {
3c3eeea6 5071 reload_insn = gen_input_reload (reloadreg, oldequiv, where);
d445b551
RK
5072 if (this_reload_insn == 0)
5073 this_reload_insn = reload_insn;
5074 }
32131a9c
RK
5075
5076#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5077 /* We may have to make a REG_DEAD note for the secondary reload
5078 register in the insns we just made. Find the last insn that
5079 mentioned the register. */
5080 if (! special && second_reload_reg
5081 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5082 {
5083 rtx prev;
5084
5085 for (prev = where;
5086 prev != PREV_INSN (this_reload_insn);
5087 prev = PREV_INSN (prev))
5088 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5089 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5090 PATTERN (prev)))
32131a9c
RK
5091 {
5092 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5093 second_reload_reg,
5094 REG_NOTES (prev));
5095 break;
5096 }
5097 }
5098#endif
5099 }
5100
5101 /* Update where to put other reload insns. */
5102 if (this_reload_insn)
5103 switch (reload_when_needed[j])
5104 {
5105 case RELOAD_FOR_INPUT:
5106 case RELOAD_OTHER:
5107 if (first_other_reload_insn == first_operand_address_reload_insn)
5108 first_other_reload_insn = this_reload_insn;
5109 break;
5110 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 5111 if (first_operand_address_reload_insn == before_insn)
32131a9c 5112 first_operand_address_reload_insn = this_reload_insn;
a8efe40d 5113 if (first_other_reload_insn == before_insn)
32131a9c
RK
5114 first_other_reload_insn = this_reload_insn;
5115 }
5116
5117 /* reload_inc[j] was formerly processed here. */
5118 }
5119
5120 /* Add a note saying the input reload reg
5121 dies in this insn, if anyone cares. */
5122#ifdef PRESERVE_DEATH_INFO_REGNO_P
5123 if (old != 0
5124 && reload_reg_rtx[j] != old
5125 && reload_reg_rtx[j] != 0
5126 && reload_out[j] == 0
5127 && ! reload_inherited[j]
5128 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5129 {
5130 register rtx reloadreg = reload_reg_rtx[j];
5131
a8fdc208 5132#if 0
32131a9c
RK
5133 /* We can't abort here because we need to support this for sched.c.
5134 It's not terrible to miss a REG_DEAD note, but we should try
5135 to figure out how to do this correctly. */
5136 /* The code below is incorrect for address-only reloads. */
5137 if (reload_when_needed[j] != RELOAD_OTHER
5138 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5139 abort ();
5140#endif
5141
5142 /* Add a death note to this insn, for an input reload. */
5143
5144 if ((reload_when_needed[j] == RELOAD_OTHER
5145 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5146 && ! dead_or_set_p (insn, reloadreg))
5147 REG_NOTES (insn)
5148 = gen_rtx (EXPR_LIST, REG_DEAD,
5149 reloadreg, REG_NOTES (insn));
5150 }
5151
5152 /* When we inherit a reload, the last marked death of the reload reg
5153 may no longer really be a death. */
5154 if (reload_reg_rtx[j] != 0
5155 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5156 && reload_inherited[j])
5157 {
5158 /* Handle inheriting an output reload.
5159 Remove the death note from the output reload insn. */
5160 if (reload_spill_index[j] >= 0
5161 && GET_CODE (reload_in[j]) == REG
5162 && spill_reg_store[reload_spill_index[j]] != 0
5163 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5164 REG_DEAD, REGNO (reload_reg_rtx[j])))
5165 remove_death (REGNO (reload_reg_rtx[j]),
5166 spill_reg_store[reload_spill_index[j]]);
5167 /* Likewise for input reloads that were inherited. */
5168 else if (reload_spill_index[j] >= 0
5169 && GET_CODE (reload_in[j]) == REG
5170 && spill_reg_store[reload_spill_index[j]] == 0
5171 && reload_inheritance_insn[j] != 0
a8fdc208 5172 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5173 REGNO (reload_reg_rtx[j])))
5174 remove_death (REGNO (reload_reg_rtx[j]),
5175 reload_inheritance_insn[j]);
5176 else
5177 {
5178 rtx prev;
5179
5180 /* We got this register from find_equiv_reg.
5181 Search back for its last death note and get rid of it.
5182 But don't search back too far.
5183 Don't go past a place where this reg is set,
5184 since a death note before that remains valid. */
5185 for (prev = PREV_INSN (insn);
5186 prev && GET_CODE (prev) != CODE_LABEL;
5187 prev = PREV_INSN (prev))
5188 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5189 && dead_or_set_p (prev, reload_reg_rtx[j]))
5190 {
5191 if (find_regno_note (prev, REG_DEAD,
5192 REGNO (reload_reg_rtx[j])))
5193 remove_death (REGNO (reload_reg_rtx[j]), prev);
5194 break;
5195 }
5196 }
5197 }
5198
5199 /* We might have used find_equiv_reg above to choose an alternate
5200 place from which to reload. If so, and it died, we need to remove
5201 that death and move it to one of the insns we just made. */
5202
5203 if (oldequiv_reg != 0
5204 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5205 {
5206 rtx prev, prev1;
5207
5208 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5209 prev = PREV_INSN (prev))
5210 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5211 && dead_or_set_p (prev, oldequiv_reg))
5212 {
5213 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5214 {
5215 for (prev1 = this_reload_insn;
5216 prev1; prev1 = PREV_INSN (prev1))
5217 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5218 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5219 PATTERN (prev1)))
32131a9c
RK
5220 {
5221 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5222 oldequiv_reg,
5223 REG_NOTES (prev1));
5224 break;
5225 }
5226 remove_death (REGNO (oldequiv_reg), prev);
5227 }
5228 break;
5229 }
5230 }
5231#endif
5232
5233 /* If we are reloading a register that was recently stored in with an
5234 output-reload, see if we can prove there was
5235 actually no need to store the old value in it. */
5236
5237 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5238 /* This is unsafe if some other reload uses the same reg first. */
5239 && (reload_when_needed[j] == RELOAD_OTHER
5240 || reload_when_needed[j] == RELOAD_FOR_INPUT
5241 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS)
5242 && GET_CODE (reload_in[j]) == REG
5243#if 0
5244 /* There doesn't seem to be any reason to restrict this to pseudos
5245 and doing so loses in the case where we are copying from a
5246 register of the wrong class. */
5247 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5248#endif
5249 && spill_reg_store[reload_spill_index[j]] != 0
5250 && dead_or_set_p (insn, reload_in[j])
5251 /* This is unsafe if operand occurs more than once in current
5252 insn. Perhaps some occurrences weren't reloaded. */
5253 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5254 delete_output_reload (insn, j,
5255 spill_reg_store[reload_spill_index[j]]);
5256
5257 /* Input-reloading is done. Now do output-reloading,
5258 storing the value from the reload-register after the main insn
5259 if reload_out[j] is nonzero.
5260
5261 ??? At some point we need to support handling output reloads of
5262 JUMP_INSNs or insns that set cc0. */
5263 old = reload_out[j];
5264 if (old != 0
5265 && reload_reg_rtx[j] != old
5266 && reload_reg_rtx[j] != 0)
5267 {
5268 register rtx reloadreg = reload_reg_rtx[j];
5269 register rtx second_reloadreg = 0;
5270 rtx prev_insn = PREV_INSN (first_output_reload_insn);
5271 rtx note, p;
5272 enum machine_mode mode;
5273 int special = 0;
5274
5275 /* An output operand that dies right away does need a reload,
5276 but need not be copied from it. Show the new location in the
5277 REG_UNUSED note. */
5278 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5279 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5280 {
5281 XEXP (note, 0) = reload_reg_rtx[j];
5282 continue;
5283 }
5284 else if (GET_CODE (old) == SCRATCH)
5285 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5286 but we don't want to make an output reload. */
5287 continue;
5288
5289#if 0
5290 /* Strip off of OLD any size-increasing SUBREGs such as
5291 (SUBREG:SI foo:QI 0). */
5292
5293 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5294 && (GET_MODE_SIZE (GET_MODE (old))
5295 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5296 old = SUBREG_REG (old);
5297#endif
5298
5299 /* If is a JUMP_INSN, we can't support output reloads yet. */
5300 if (GET_CODE (insn) == JUMP_INSN)
5301 abort ();
5302
5303 /* Determine the mode to reload in.
5304 See comments above (for input reloading). */
5305
5306 mode = GET_MODE (old);
5307 if (mode == VOIDmode)
5308 abort (); /* Should never happen for an output. */
5309
5310 /* A strict-low-part output operand needs to be reloaded
5311 in the mode of the entire value. */
5312 if (reload_strict_low[j])
5313 {
5314 mode = GET_MODE (SUBREG_REG (reload_out[j]));
5315 /* Encapsulate OLD into that mode. */
5316 /* If OLD is a subreg, then strip it, since the subreg will
5317 be altered by this very reload. */
5318 while (GET_CODE (old) == SUBREG && GET_MODE (old) != mode)
5319 old = SUBREG_REG (old);
5320 if (GET_MODE (old) != VOIDmode
5321 && mode != GET_MODE (old))
5322 old = gen_rtx (SUBREG, mode, old, 0);
5323 }
5324
5325 if (GET_MODE (reloadreg) != mode)
5326 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5327
5328#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5329
5330 /* If we need two reload regs, set RELOADREG to the intermediate
5331 one, since it will be stored into OUT. We might need a secondary
5332 register only for an input reload, so check again here. */
5333
1554c2c6 5334 if (reload_secondary_reload[j] >= 0)
32131a9c 5335 {
1554c2c6 5336 rtx real_old = old;
32131a9c 5337
1554c2c6
RK
5338 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5339 && reg_equiv_mem[REGNO (old)] != 0)
5340 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 5341
1554c2c6
RK
5342 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5343 mode, real_old)
5344 != NO_REGS))
5345 {
5346 second_reloadreg = reloadreg;
5347 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
32131a9c 5348
1554c2c6
RK
5349 /* See if RELOADREG is to be used as a scratch register
5350 or as an intermediate register. */
5351 if (reload_secondary_icode[j] != CODE_FOR_nothing)
32131a9c 5352 {
1554c2c6
RK
5353 emit_insn_before ((GEN_FCN (reload_secondary_icode[j])
5354 (real_old, second_reloadreg,
5355 reloadreg)),
5356 first_output_reload_insn);
5357 special = 1;
32131a9c
RK
5358 }
5359 else
1554c2c6
RK
5360 {
5361 /* See if we need both a scratch and intermediate reload
5362 register. */
5363 int secondary_reload = reload_secondary_reload[j];
5364 enum insn_code tertiary_icode
5365 = reload_secondary_icode[secondary_reload];
5366 rtx pat;
32131a9c 5367
1554c2c6
RK
5368 if (GET_MODE (reloadreg) != mode)
5369 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5370
5371 if (tertiary_icode != CODE_FOR_nothing)
5372 {
5373 rtx third_reloadreg
5374 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5375 pat = (GEN_FCN (tertiary_icode)
5376 (reloadreg, second_reloadreg, third_reloadreg));
5377 }
9ad5f9f6
JW
5378#ifdef SECONDARY_MEMORY_NEEDED
5379 /* If we need a memory location to do the move, do it that way. */
5380 else if (GET_CODE (reloadreg) == REG
5381 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
5382 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
5383 REGNO_REG_CLASS (REGNO (second_reloadreg)),
5384 GET_MODE (second_reloadreg)))
5385 {
5386 /* Get the memory to use and rewrite both registers
5387 to its mode. */
5388 rtx loc = get_secondary_mem (reloadreg,
5389 GET_MODE (second_reloadreg));
5390 rtx tmp_reloadreg;
5391
5392 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
5393 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
5394 REGNO (second_reloadreg));
5395
5396 if (GET_MODE (loc) != GET_MODE (reloadreg))
5397 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
5398 REGNO (reloadreg));
5399 else
5400 tmp_reloadreg = reloadreg;
5401
5402 emit_insn_before (gen_move_insn (loc, second_reloadreg),
5403 first_output_reload_insn);
5404 pat = gen_move_insn (tmp_reloadreg, loc);
5405 }
5406#endif
1554c2c6
RK
5407 else
5408 pat = gen_move_insn (reloadreg, second_reloadreg);
5409
5410 emit_insn_before (pat, first_output_reload_insn);
5411 }
32131a9c
RK
5412 }
5413 }
5414#endif
5415
5416 /* Output the last reload insn. */
5417 if (! special)
0dadecf6
RK
5418 {
5419#ifdef SECONDARY_MEMORY_NEEDED
5420 /* If we need a memory location to do the move, do it that way. */
5421 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
5422 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
5423 REGNO_REG_CLASS (REGNO (reloadreg)),
5424 GET_MODE (reloadreg)))
5425 {
5426 /* Get the memory to use and rewrite both registers to
5427 its mode. */
5428 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg));
5429
5430 if (GET_MODE (loc) != GET_MODE (reloadreg))
5431 reloadreg = gen_rtx (REG, GET_MODE (loc),
5432 REGNO (reloadreg));
5433
5434 if (GET_MODE (loc) != GET_MODE (old))
5435 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
5436
5437 emit_insn_before (gen_move_insn (loc, reloadreg),
5438 first_output_reload_insn);
5439 emit_insn_before (gen_move_insn (old, loc),
5440 first_output_reload_insn);
5441 }
5442 else
5443#endif
5444 emit_insn_before (gen_move_insn (old, reloadreg),
5445 first_output_reload_insn);
5446 }
32131a9c
RK
5447
5448#ifdef PRESERVE_DEATH_INFO_REGNO_P
5449 /* If final will look at death notes for this reg,
5450 put one on the last output-reload insn to use it. Similarly
5451 for any secondary register. */
5452 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
5453 for (p = PREV_INSN (first_output_reload_insn);
5454 p != prev_insn; p = PREV_INSN (p))
5455 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5456 && reg_overlap_mentioned_for_reload_p (reloadreg,
5457 PATTERN (p)))
32131a9c
RK
5458 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5459 reloadreg, REG_NOTES (p));
5460
5461#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5462 if (! special
5463 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
5464 for (p = PREV_INSN (first_output_reload_insn);
5465 p != prev_insn; p = PREV_INSN (p))
5466 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5467 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
5468 PATTERN (p)))
32131a9c
RK
5469 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5470 second_reloadreg, REG_NOTES (p));
5471#endif
5472#endif
5473 /* Look at all insns we emitted, just to be safe. */
5474 for (p = NEXT_INSN (prev_insn); p != first_output_reload_insn;
5475 p = NEXT_INSN (p))
5476 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
5477 {
5478 /* If this output reload doesn't come from a spill reg,
5479 clear any memory of reloaded copies of the pseudo reg.
5480 If this output reload comes from a spill reg,
5481 reg_has_output_reload will make this do nothing. */
5482 note_stores (PATTERN (p), forget_old_reloads_1);
5483
5484 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
5485 store_insn = p;
5486 }
5487
5488 first_output_reload_insn = NEXT_INSN (prev_insn);
5489 }
5490
5491 if (reload_spill_index[j] >= 0)
5492 new_spill_reg_store[reload_spill_index[j]] = store_insn;
5493 }
5494
32131a9c
RK
5495 /* Move death notes from INSN
5496 to output-operand-address and output reload insns. */
5497#ifdef PRESERVE_DEATH_INFO_REGNO_P
5498 {
5499 rtx insn1;
5500 /* Loop over those insns, last ones first. */
5501 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
5502 insn1 = PREV_INSN (insn1))
5503 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
5504 {
5505 rtx source = SET_SRC (PATTERN (insn1));
5506 rtx dest = SET_DEST (PATTERN (insn1));
5507
5508 /* The note we will examine next. */
5509 rtx reg_notes = REG_NOTES (insn);
5510 /* The place that pointed to this note. */
5511 rtx *prev_reg_note = &REG_NOTES (insn);
5512
5513 /* If the note is for something used in the source of this
5514 reload insn, or in the output address, move the note. */
5515 while (reg_notes)
5516 {
5517 rtx next_reg_notes = XEXP (reg_notes, 1);
5518 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
5519 && GET_CODE (XEXP (reg_notes, 0)) == REG
5520 && ((GET_CODE (dest) != REG
bfa30b22
RK
5521 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5522 dest))
5523 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5524 source)))
32131a9c
RK
5525 {
5526 *prev_reg_note = next_reg_notes;
5527 XEXP (reg_notes, 1) = REG_NOTES (insn1);
5528 REG_NOTES (insn1) = reg_notes;
5529 }
5530 else
5531 prev_reg_note = &XEXP (reg_notes, 1);
5532
5533 reg_notes = next_reg_notes;
5534 }
5535 }
5536 }
5537#endif
5538
5539 /* For all the spill regs newly reloaded in this instruction,
5540 record what they were reloaded from, so subsequent instructions
d445b551
RK
5541 can inherit the reloads.
5542
5543 Update spill_reg_store for the reloads of this insn.
e9e79d69 5544 Copy the elements that were updated in the loop above. */
32131a9c
RK
5545
5546 for (j = 0; j < n_reloads; j++)
5547 {
5548 register int r = reload_order[j];
5549 register int i = reload_spill_index[r];
5550
5551 /* I is nonneg if this reload used one of the spill regs.
5552 If reload_reg_rtx[r] is 0, this is an optional reload
5553 that we opted to ignore. */
d445b551 5554
32131a9c
RK
5555 if (i >= 0 && reload_reg_rtx[r] != 0)
5556 {
5557 /* First, clear out memory of what used to be in this spill reg.
5558 If consecutive registers are used, clear them all. */
5559 int nr
5560 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
5561 int k;
5562
5563 for (k = 0; k < nr; k++)
5564 {
5565 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
5566 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
5567 }
5568
5569 /* Maybe the spill reg contains a copy of reload_out. */
5570 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5571 {
5572 register int nregno = REGNO (reload_out[r]);
d445b551
RK
5573
5574 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 5575 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 5576
32131a9c
RK
5577 for (k = 0; k < nr; k++)
5578 {
5579 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5580 = nregno;
5581 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
5582 }
5583 }
d445b551 5584
32131a9c
RK
5585 /* Maybe the spill reg contains a copy of reload_in. */
5586 else if (reload_out[r] == 0
5587 && reload_in[r] != 0
5588 && (GET_CODE (reload_in[r]) == REG
5589 || GET_CODE (reload_in_reg[r]) == REG))
5590 {
5591 register int nregno;
5592 if (GET_CODE (reload_in[r]) == REG)
5593 nregno = REGNO (reload_in[r]);
5594 else
5595 nregno = REGNO (reload_in_reg[r]);
5596
5597 /* If there are two separate reloads (one in and one out)
5598 for the same (hard or pseudo) reg,
a8fdc208 5599 leave reg_last_reload_reg set
32131a9c
RK
5600 based on the output reload.
5601 Otherwise, set it from this input reload. */
5602 if (!reg_has_output_reload[nregno]
5603 /* But don't do so if another input reload
5604 will clobber this one's value. */
5605 && reload_reg_reaches_end_p (spill_regs[i],
5606 reload_when_needed[r]))
5607 {
5608 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551
RK
5609
5610 /* Unless we inherited this reload, show we haven't
5611 recently done a store. */
5612 if (! reload_inherited[r])
5613 spill_reg_store[i] = 0;
5614
32131a9c
RK
5615 for (k = 0; k < nr; k++)
5616 {
5617 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5618 = nregno;
5619 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
5620 = insn;
5621 }
5622 }
5623 }
5624 }
5625
5626 /* The following if-statement was #if 0'd in 1.34 (or before...).
5627 It's reenabled in 1.35 because supposedly nothing else
5628 deals with this problem. */
5629
5630 /* If a register gets output-reloaded from a non-spill register,
5631 that invalidates any previous reloaded copy of it.
5632 But forget_old_reloads_1 won't get to see it, because
5633 it thinks only about the original insn. So invalidate it here. */
5634 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5635 {
5636 register int nregno = REGNO (reload_out[r]);
5637 reg_last_reload_reg[nregno] = 0;
5638 }
5639 }
5640}
5641\f
5642/* Emit code before BEFORE_INSN to perform an input reload of IN to RELOADREG.
3c3eeea6 5643 Returns first insn emitted. */
32131a9c
RK
5644
5645rtx
3c3eeea6 5646gen_input_reload (reloadreg, in, before_insn)
32131a9c
RK
5647 rtx reloadreg;
5648 rtx in;
5649 rtx before_insn;
5650{
5651 register rtx prev_insn = PREV_INSN (before_insn);
5652
a8fdc208 5653 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
5654 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
5655 register that didn't get a hard register. In that case we can just
5656 call emit_move_insn.
5657
5658 We can also be asked to reload a PLUS that adds either two registers or
5659 a register and a constant or MEM. This can occur during frame pointer
5660 elimination. That case if handled by trying to emit a single insn
5661 to perform the add. If it is not valid, we use a two insn sequence.
5662
5663 Finally, we could be called to handle an 'o' constraint by putting
5664 an address into a register. In that case, we first try to do this
5665 with a named pattern of "reload_load_address". If no such pattern
5666 exists, we just emit a SET insn and hope for the best (it will normally
5667 be valid on machines that use 'o').
5668
5669 This entire process is made complex because reload will never
5670 process the insns we generate here and so we must ensure that
5671 they will fit their constraints and also by the fact that parts of
5672 IN might be being reloaded separately and replaced with spill registers.
5673 Because of this, we are, in some sense, just guessing the right approach
5674 here. The one listed above seems to work.
5675
5676 ??? At some point, this whole thing needs to be rethought. */
5677
5678 if (GET_CODE (in) == PLUS
5679 && GET_CODE (XEXP (in, 0)) == REG
5680 && (GET_CODE (XEXP (in, 1)) == REG
5681 || CONSTANT_P (XEXP (in, 1))
5682 || GET_CODE (XEXP (in, 1)) == MEM))
5683 {
5684 /* We need to compute the sum of what is either a register and a
5685 constant, a register and memory, or a hard register and a pseudo
5686 register and put it into the reload register. The best possible way
5687 of doing this is if the machine has a three-operand ADD insn that
5688 accepts the required operands.
5689
5690 The simplest approach is to try to generate such an insn and see if it
5691 is recognized and matches its constraints. If so, it can be used.
5692
5693 It might be better not to actually emit the insn unless it is valid,
0009eff2 5694 but we need to pass the insn as an operand to `recog' and
b36d7dd7 5695 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 5696 not valid than to dummy things up. */
a8fdc208 5697
af929c62 5698 rtx op0, op1, tem, insn;
32131a9c 5699 int code;
a8fdc208 5700
af929c62
RK
5701 op0 = find_replacement (&XEXP (in, 0));
5702 op1 = find_replacement (&XEXP (in, 1));
5703
32131a9c
RK
5704 /* Since constraint checking is strict, commutativity won't be
5705 checked, so we need to do that here to avoid spurious failure
5706 if the add instruction is two-address and the second operand
5707 of the add is the same as the reload reg, which is frequently
5708 the case. If the insn would be A = B + A, rearrange it so
5709 it will be A = A + B as constrain_operands expects. */
a8fdc208 5710
32131a9c
RK
5711 if (GET_CODE (XEXP (in, 1)) == REG
5712 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
5713 tem = op0, op0 = op1, op1 = tem;
5714
5715 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
5716 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c
RK
5717
5718 insn = emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in),
5719 before_insn);
5720 code = recog_memoized (insn);
5721
5722 if (code >= 0)
5723 {
5724 insn_extract (insn);
5725 /* We want constrain operands to treat this insn strictly in
5726 its validity determination, i.e., the way it would after reload
5727 has completed. */
5728 if (constrain_operands (code, 1))
5729 return insn;
5730 }
5731
5732 if (PREV_INSN (insn))
5733 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
5734 if (NEXT_INSN (insn))
5735 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
5736
5737 /* If that failed, we must use a conservative two-insn sequence.
5738 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
5739 register since "move" will be able to handle an arbitrary operand,
5740 unlike add which can't, in general. Then add the registers.
32131a9c
RK
5741
5742 If there is another way to do this for a specific machine, a
5743 DEFINE_PEEPHOLE should be specified that recognizes the sequence
5744 we emit below. */
5745
af929c62
RK
5746 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
5747 || (GET_CODE (op1) == REG
5748 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
5749 tem = op0, op0 = op1, op1 = tem;
32131a9c 5750
af929c62
RK
5751 emit_insn_before (gen_move_insn (reloadreg, op0), before_insn);
5752 emit_insn_before (gen_add2_insn (reloadreg, op1), before_insn);
32131a9c
RK
5753 }
5754
0dadecf6
RK
5755#ifdef SECONDARY_MEMORY_NEEDED
5756 /* If we need a memory location to do the move, do it that way. */
5757 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
5758 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
5759 REGNO_REG_CLASS (REGNO (reloadreg)),
5760 GET_MODE (reloadreg)))
5761 {
5762 /* Get the memory to use and rewrite both registers to its mode. */
5763 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg));
5764
5765 if (GET_MODE (loc) != GET_MODE (reloadreg))
5766 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
5767
5768 if (GET_MODE (loc) != GET_MODE (in))
5769 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
5770
0dadecf6 5771 emit_insn_before (gen_move_insn (loc, in), before_insn);
58c8c593 5772 emit_insn_before (gen_move_insn (reloadreg, loc), before_insn);
0dadecf6
RK
5773 }
5774#endif
5775
32131a9c
RK
5776 /* If IN is a simple operand, use gen_move_insn. */
5777 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
3c3eeea6 5778 emit_insn_before (gen_move_insn (reloadreg, in), before_insn);
32131a9c
RK
5779
5780#ifdef HAVE_reload_load_address
5781 else if (HAVE_reload_load_address)
3c3eeea6 5782 emit_insn_before (gen_reload_load_address (reloadreg, in), before_insn);
32131a9c
RK
5783#endif
5784
5785 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
5786 else
3c3eeea6 5787 emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in), before_insn);
32131a9c
RK
5788
5789 /* Return the first insn emitted.
5790 We can not just return PREV_INSN (before_insn), because there may have
5791 been multiple instructions emitted. Also note that gen_move_insn may
5792 emit more than one insn itself, so we can not assume that there is one
5793 insn emitted per emit_insn_before call. */
5794
5795 return NEXT_INSN (prev_insn);
5796}
5797\f
5798/* Delete a previously made output-reload
5799 whose result we now believe is not needed.
5800 First we double-check.
5801
5802 INSN is the insn now being processed.
5803 OUTPUT_RELOAD_INSN is the insn of the output reload.
5804 J is the reload-number for this insn. */
5805
5806static void
5807delete_output_reload (insn, j, output_reload_insn)
5808 rtx insn;
5809 int j;
5810 rtx output_reload_insn;
5811{
5812 register rtx i1;
5813
5814 /* Get the raw pseudo-register referred to. */
5815
5816 rtx reg = reload_in[j];
5817 while (GET_CODE (reg) == SUBREG)
5818 reg = SUBREG_REG (reg);
5819
5820 /* If the pseudo-reg we are reloading is no longer referenced
5821 anywhere between the store into it and here,
5822 and no jumps or labels intervene, then the value can get
5823 here through the reload reg alone.
5824 Otherwise, give up--return. */
5825 for (i1 = NEXT_INSN (output_reload_insn);
5826 i1 != insn; i1 = NEXT_INSN (i1))
5827 {
5828 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
5829 return;
5830 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
5831 && reg_mentioned_p (reg, PATTERN (i1)))
5832 return;
5833 }
5834
5835 /* If this insn will store in the pseudo again,
5836 the previous store can be removed. */
5837 if (reload_out[j] == reload_in[j])
5838 delete_insn (output_reload_insn);
5839
5840 /* See if the pseudo reg has been completely replaced
5841 with reload regs. If so, delete the store insn
5842 and forget we had a stack slot for the pseudo. */
5843 else if (reg_n_deaths[REGNO (reg)] == 1
5844 && reg_basic_block[REGNO (reg)] >= 0
5845 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
5846 {
5847 rtx i2;
5848
5849 /* We know that it was used only between here
5850 and the beginning of the current basic block.
5851 (We also know that the last use before INSN was
5852 the output reload we are thinking of deleting, but never mind that.)
5853 Search that range; see if any ref remains. */
5854 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5855 {
d445b551
RK
5856 rtx set = single_set (i2);
5857
32131a9c
RK
5858 /* Uses which just store in the pseudo don't count,
5859 since if they are the only uses, they are dead. */
d445b551 5860 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5861 continue;
5862 if (GET_CODE (i2) == CODE_LABEL
5863 || GET_CODE (i2) == JUMP_INSN)
5864 break;
5865 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
5866 && reg_mentioned_p (reg, PATTERN (i2)))
5867 /* Some other ref remains;
5868 we can't do anything. */
5869 return;
5870 }
5871
5872 /* Delete the now-dead stores into this pseudo. */
5873 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5874 {
d445b551
RK
5875 rtx set = single_set (i2);
5876
5877 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5878 delete_insn (i2);
5879 if (GET_CODE (i2) == CODE_LABEL
5880 || GET_CODE (i2) == JUMP_INSN)
5881 break;
5882 }
5883
5884 /* For the debugging info,
5885 say the pseudo lives in this reload reg. */
5886 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
5887 alter_reg (REGNO (reg), -1);
5888 }
5889}
5890
5891\f
a8fdc208 5892/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 5893 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
5894 is a register or memory location;
5895 so reloading involves incrementing that location.
5896
5897 INC_AMOUNT is the number to increment or decrement by (always positive).
5898 This cannot be deduced from VALUE.
5899
5900 INSN is the insn before which the new insns should be emitted.
5901
5902 The return value is the first of the insns emitted. */
5903
5904static rtx
5905inc_for_reload (reloadreg, value, inc_amount, insn)
5906 rtx reloadreg;
5907 rtx value;
5908 int inc_amount;
5909 rtx insn;
5910{
5911 /* REG or MEM to be copied and incremented. */
5912 rtx incloc = XEXP (value, 0);
5913 /* Nonzero if increment after copying. */
5914 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
0009eff2
RK
5915 rtx prev = PREV_INSN (insn);
5916 rtx inc;
5917 rtx add_insn;
5918 int code;
32131a9c
RK
5919
5920 /* No hard register is equivalent to this register after
5921 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
5922 we could inc/dec that register as well (maybe even using it for
5923 the source), but I'm not sure it's worth worrying about. */
5924 if (GET_CODE (incloc) == REG)
5925 reg_last_reload_reg[REGNO (incloc)] = 0;
5926
5927 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
5928 inc_amount = - inc_amount;
5929
fb3821f7 5930 inc = GEN_INT (inc_amount);
0009eff2
RK
5931
5932 /* If this is post-increment, first copy the location to the reload reg. */
5933 if (post)
5934 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5935
5936 /* See if we can directly increment INCLOC. Use a method similar to that
5937 in gen_input_reload. */
5938
5939 add_insn = emit_insn_before (gen_rtx (SET, VOIDmode, incloc,
5940 gen_rtx (PLUS, GET_MODE (incloc),
5941 incloc, inc)), insn);
5942
5943 code = recog_memoized (add_insn);
5944 if (code >= 0)
32131a9c 5945 {
0009eff2
RK
5946 insn_extract (add_insn);
5947 if (constrain_operands (code, 1))
32131a9c 5948 {
0009eff2
RK
5949 /* If this is a pre-increment and we have incremented the value
5950 where it lives, copy the incremented value to RELOADREG to
5951 be used as an address. */
5952
5953 if (! post)
5954 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5955 return NEXT_INSN (prev);
32131a9c
RK
5956 }
5957 }
0009eff2
RK
5958
5959 if (PREV_INSN (add_insn))
5960 NEXT_INSN (PREV_INSN (add_insn)) = NEXT_INSN (add_insn);
5961 if (NEXT_INSN (add_insn))
5962 PREV_INSN (NEXT_INSN (add_insn)) = PREV_INSN (add_insn);
5963
5964 /* If couldn't do the increment directly, must increment in RELOADREG.
5965 The way we do this depends on whether this is pre- or post-increment.
5966 For pre-increment, copy INCLOC to the reload register, increment it
5967 there, then save back. */
5968
5969 if (! post)
5970 {
5971 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5972 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5973 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
5974 }
32131a9c
RK
5975 else
5976 {
0009eff2
RK
5977 /* Postincrement.
5978 Because this might be a jump insn or a compare, and because RELOADREG
5979 may not be available after the insn in an input reload, we must do
5980 the incrementation before the insn being reloaded for.
5981
5982 We have already copied INCLOC to RELOADREG. Increment the copy in
5983 RELOADREG, save that back, then decrement RELOADREG so it has
5984 the original value. */
5985
5986 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5987 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
fb3821f7 5988 emit_insn_before (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)),
0009eff2 5989 insn);
32131a9c 5990 }
0009eff2
RK
5991
5992 return NEXT_INSN (prev);
32131a9c
RK
5993}
5994\f
5995/* Return 1 if we are certain that the constraint-string STRING allows
5996 the hard register REG. Return 0 if we can't be sure of this. */
5997
5998static int
5999constraint_accepts_reg_p (string, reg)
6000 char *string;
6001 rtx reg;
6002{
6003 int value = 0;
6004 int regno = true_regnum (reg);
6005 int c;
6006
6007 /* Initialize for first alternative. */
6008 value = 0;
6009 /* Check that each alternative contains `g' or `r'. */
6010 while (1)
6011 switch (c = *string++)
6012 {
6013 case 0:
6014 /* If an alternative lacks `g' or `r', we lose. */
6015 return value;
6016 case ',':
6017 /* If an alternative lacks `g' or `r', we lose. */
6018 if (value == 0)
6019 return 0;
6020 /* Initialize for next alternative. */
6021 value = 0;
6022 break;
6023 case 'g':
6024 case 'r':
6025 /* Any general reg wins for this alternative. */
6026 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6027 value = 1;
6028 break;
6029 default:
6030 /* Any reg in specified class wins for this alternative. */
6031 {
0009eff2 6032 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 6033
0009eff2 6034 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
6035 value = 1;
6036 }
6037 }
6038}
6039\f
d445b551
RK
6040/* Return the number of places FIND appears within X, but don't count
6041 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
6042
6043static int
6044count_occurrences (x, find)
6045 register rtx x, find;
6046{
6047 register int i, j;
6048 register enum rtx_code code;
6049 register char *format_ptr;
6050 int count;
6051
6052 if (x == find)
6053 return 1;
6054 if (x == 0)
6055 return 0;
6056
6057 code = GET_CODE (x);
6058
6059 switch (code)
6060 {
6061 case REG:
6062 case QUEUED:
6063 case CONST_INT:
6064 case CONST_DOUBLE:
6065 case SYMBOL_REF:
6066 case CODE_LABEL:
6067 case PC:
6068 case CC0:
6069 return 0;
d445b551
RK
6070
6071 case SET:
6072 if (SET_DEST (x) == find)
6073 return count_occurrences (SET_SRC (x), find);
6074 break;
32131a9c
RK
6075 }
6076
6077 format_ptr = GET_RTX_FORMAT (code);
6078 count = 0;
6079
6080 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6081 {
6082 switch (*format_ptr++)
6083 {
6084 case 'e':
6085 count += count_occurrences (XEXP (x, i), find);
6086 break;
6087
6088 case 'E':
6089 if (XVEC (x, i) != NULL)
6090 {
6091 for (j = 0; j < XVECLEN (x, i); j++)
6092 count += count_occurrences (XVECEXP (x, i, j), find);
6093 }
6094 break;
6095 }
6096 }
6097 return count;
6098}
This page took 0.704928 seconds and 5 git commands to generate.