]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
*** empty log message ***
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
a8efe40d 2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "obstack.h"
24#include "insn-config.h"
25#include "insn-flags.h"
26#include "insn-codes.h"
27#include "flags.h"
28#include "expr.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "reload.h"
32#include "recog.h"
33#include "basic-block.h"
34#include "output.h"
35#include <stdio.h>
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69\f
70/* During reload_as_needed, element N contains a REG rtx for the hard reg
71 into which pseudo reg N has been reloaded (perhaps for a previous insn). */
72static rtx *reg_last_reload_reg;
73
74/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
75 for an output reload that stores into reg N. */
76static char *reg_has_output_reload;
77
78/* Indicates which hard regs are reload-registers for an output reload
79 in the current insn. */
80static HARD_REG_SET reg_is_output_reload;
81
82/* Element N is the constant value to which pseudo reg N is equivalent,
83 or zero if pseudo reg N is not equivalent to a constant.
84 find_reloads looks at this in order to replace pseudo reg N
85 with the constant it stands for. */
86rtx *reg_equiv_constant;
87
88/* Element N is a memory location to which pseudo reg N is equivalent,
89 prior to any register elimination (such as frame pointer to stack
90 pointer). Depending on whether or not it is a valid address, this value
91 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 92rtx *reg_equiv_memory_loc;
32131a9c
RK
93
94/* Element N is the address of stack slot to which pseudo reg N is equivalent.
95 This is used when the address is not valid as a memory address
96 (because its displacement is too big for the machine.) */
97rtx *reg_equiv_address;
98
99/* Element N is the memory slot to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a memory slot. */
101rtx *reg_equiv_mem;
102
103/* Widest width in which each pseudo reg is referred to (via subreg). */
104static int *reg_max_ref_width;
105
106/* Element N is the insn that initialized reg N from its equivalent
107 constant or memory slot. */
108static rtx *reg_equiv_init;
109
110/* During reload_as_needed, element N contains the last pseudo regno
111 reloaded into the Nth reload register. This vector is in parallel
112 with spill_regs. If that pseudo reg occupied more than one register,
113 reg_reloaded_contents points to that pseudo for each spill register in
114 use; all of these must remain set for an inheritance to occur. */
115static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
116
117/* During reload_as_needed, element N contains the insn for which
118 the Nth reload register was last used. This vector is in parallel
119 with spill_regs, and its contents are significant only when
120 reg_reloaded_contents is significant. */
121static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
122
123/* Number of spill-regs so far; number of valid elements of spill_regs. */
124static int n_spills;
125
126/* In parallel with spill_regs, contains REG rtx's for those regs.
127 Holds the last rtx used for any given reg, or 0 if it has never
128 been used for spilling yet. This rtx is reused, provided it has
129 the proper mode. */
130static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
131
132/* In parallel with spill_regs, contains nonzero for a spill reg
133 that was stored after the last time it was used.
134 The precise value is the insn generated to do the store. */
135static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
136
137/* This table is the inverse mapping of spill_regs:
138 indexed by hard reg number,
139 it contains the position of that reg in spill_regs,
140 or -1 for something that is not in spill_regs. */
141static short spill_reg_order[FIRST_PSEUDO_REGISTER];
142
143/* This reg set indicates registers that may not be used for retrying global
144 allocation. The registers that may not be used include all spill registers
145 and the frame pointer (if we are using one). */
146HARD_REG_SET forbidden_regs;
147
148/* This reg set indicates registers that are not good for spill registers.
149 They will not be used to complete groups of spill registers. This includes
150 all fixed registers, registers that may be eliminated, and registers
151 explicitly used in the rtl.
152
153 (spill_reg_order prevents these registers from being used to start a
154 group.) */
155static HARD_REG_SET bad_spill_regs;
156
157/* Describes order of use of registers for reloading
158 of spilled pseudo-registers. `spills' is the number of
159 elements that are actually valid; new ones are added at the end. */
160static short spill_regs[FIRST_PSEUDO_REGISTER];
161
162/* Describes order of preference for putting regs into spill_regs.
163 Contains the numbers of all the hard regs, in order most preferred first.
164 This order is different for each function.
165 It is set up by order_regs_for_reload.
166 Empty elements at the end contain -1. */
167static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
168
169/* 1 for a hard register that appears explicitly in the rtl
170 (for example, function value registers, special registers
171 used by insns, structure value pointer registers). */
172static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
173
174/* Indicates if a register was counted against the need for
175 groups. 0 means it can count against max_nongroup instead. */
176static HARD_REG_SET counted_for_groups;
177
178/* Indicates if a register was counted against the need for
179 non-groups. 0 means it can become part of a new group.
180 During choose_reload_regs, 1 here means don't use this reg
181 as part of a group, even if it seems to be otherwise ok. */
182static HARD_REG_SET counted_for_nongroups;
183
184/* Nonzero if indirect addressing is supported on the machine; this means
185 that spilling (REG n) does not require reloading it into a register in
186 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
187 value indicates the level of indirect addressing supported, e.g., two
188 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
189 a hard register. */
190
191static char spill_indirect_levels;
192
193/* Nonzero if indirect addressing is supported when the innermost MEM is
194 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
195 which these are valid is the same as spill_indirect_levels, above. */
196
197char indirect_symref_ok;
198
199/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
200
201char double_reg_address_ok;
202
203/* Record the stack slot for each spilled hard register. */
204
205static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
206
207/* Width allocated so far for that stack slot. */
208
209static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
210
211/* Indexed by register class and basic block number, nonzero if there is
212 any need for a spill register of that class in that basic block.
213 The pointer is 0 if we did stupid allocation and don't know
214 the structure of basic blocks. */
215
216char *basic_block_needs[N_REG_CLASSES];
217
218/* First uid used by insns created by reload in this function.
219 Used in find_equiv_reg. */
220int reload_first_uid;
221
222/* Flag set by local-alloc or global-alloc if anything is live in
223 a call-clobbered reg across calls. */
224
225int caller_save_needed;
226
227/* Set to 1 while reload_as_needed is operating.
228 Required by some machines to handle any generated moves differently. */
229
230int reload_in_progress = 0;
231
232/* These arrays record the insn_code of insns that may be needed to
233 perform input and output reloads of special objects. They provide a
234 place to pass a scratch register. */
235
236enum insn_code reload_in_optab[NUM_MACHINE_MODES];
237enum insn_code reload_out_optab[NUM_MACHINE_MODES];
238
d45cf215 239/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
240 The allocated storage can be freed once find_reloads has processed the
241 insn. */
242
243struct obstack reload_obstack;
244char *reload_firstobj;
245
246#define obstack_chunk_alloc xmalloc
247#define obstack_chunk_free free
248
249extern int xmalloc ();
250extern void free ();
251
252/* List of labels that must never be deleted. */
253extern rtx forced_labels;
254\f
255/* This structure is used to record information about register eliminations.
256 Each array entry describes one possible way of eliminating a register
257 in favor of another. If there is more than one way of eliminating a
258 particular register, the most preferred should be specified first. */
259
260static struct elim_table
261{
262 int from; /* Register number to be eliminated. */
263 int to; /* Register number used as replacement. */
264 int initial_offset; /* Initial difference between values. */
265 int can_eliminate; /* Non-zero if this elimination can be done. */
266 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
267 insns made by reload. */
268 int offset; /* Current offset between the two regs. */
a8efe40d 269 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
270 int previous_offset; /* Offset at end of previous insn. */
271 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
272 rtx from_rtx; /* REG rtx for the register to be eliminated.
273 We cannot simply compare the number since
274 we might then spuriously replace a hard
275 register corresponding to a pseudo
276 assigned to the reg to be eliminated. */
277 rtx to_rtx; /* REG rtx for the replacement. */
278} reg_eliminate[] =
279
280/* If a set of eliminable registers was specified, define the table from it.
281 Otherwise, default to the normal case of the frame pointer being
282 replaced by the stack pointer. */
283
284#ifdef ELIMINABLE_REGS
285 ELIMINABLE_REGS;
286#else
287 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
288#endif
289
290#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
291
292/* Record the number of pending eliminations that have an offset not equal
293 to their initial offset. If non-zero, we use a new copy of each
294 replacement result in any insns encountered. */
295static int num_not_at_initial_offset;
296
297/* Count the number of registers that we may be able to eliminate. */
298static int num_eliminable;
299
300/* For each label, we record the offset of each elimination. If we reach
301 a label by more than one path and an offset differs, we cannot do the
302 elimination. This information is indexed by the number of the label.
303 The first table is an array of flags that records whether we have yet
304 encountered a label and the second table is an array of arrays, one
305 entry in the latter array for each elimination. */
306
307static char *offsets_known_at;
308static int (*offsets_at)[NUM_ELIMINABLE_REGS];
309
310/* Number of labels in the current function. */
311
312static int num_labels;
313\f
314void mark_home_live ();
315static void count_possible_groups ();
316static int possible_group_p ();
317static void scan_paradoxical_subregs ();
318static void reload_as_needed ();
319static int modes_equiv_for_class_p ();
320static void alter_reg ();
321static void delete_dead_insn ();
322static int new_spill_reg();
323static void set_label_offsets ();
324static int eliminate_regs_in_insn ();
325static void mark_not_eliminable ();
326static int spill_hard_reg ();
327static void choose_reload_regs ();
328static void emit_reload_insns ();
329static void delete_output_reload ();
330static void forget_old_reloads_1 ();
331static void order_regs_for_reload ();
332static rtx inc_for_reload ();
333static int constraint_accepts_reg_p ();
334static int count_occurrences ();
335
336extern void remove_death ();
337extern rtx adj_offsettable_operand ();
338extern rtx form_sum ();
339\f
340void
341init_reload ()
342{
343 register int i;
344
345 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
346 Set spill_indirect_levels to the number of levels such addressing is
347 permitted, zero if it is not permitted at all. */
348
349 register rtx tem
350 = gen_rtx (MEM, Pmode,
351 gen_rtx (PLUS, Pmode,
352 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
353 gen_rtx (CONST_INT, VOIDmode, 4)));
354 spill_indirect_levels = 0;
355
356 while (memory_address_p (QImode, tem))
357 {
358 spill_indirect_levels++;
359 tem = gen_rtx (MEM, Pmode, tem);
360 }
361
362 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
363
364 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
365 indirect_symref_ok = memory_address_p (QImode, tem);
366
367 /* See if reg+reg is a valid (and offsettable) address. */
368
a8fdc208 369 tem = gen_rtx (PLUS, Pmode,
32131a9c
RK
370 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
371 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM));
372 /* This way, we make sure that reg+reg is an offsettable address. */
373 tem = plus_constant (tem, 4);
374
375 double_reg_address_ok = memory_address_p (QImode, tem);
376
377 /* Initialize obstack for our rtl allocation. */
378 gcc_obstack_init (&reload_obstack);
379 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
380
381#ifdef HAVE_SECONDARY_RELOADS
382
383 /* Initialize the optabs for doing special input and output reloads. */
384
385 for (i = 0; i < NUM_MACHINE_MODES; i++)
386 reload_in_optab[i] = reload_out_optab[i] = CODE_FOR_nothing;
387
388#ifdef HAVE_reload_inqi
389 if (HAVE_reload_inqi)
390 reload_in_optab[(int) QImode] = CODE_FOR_reload_inqi;
391#endif
392#ifdef HAVE_reload_inhi
393 if (HAVE_reload_inhi)
394 reload_in_optab[(int) HImode] = CODE_FOR_reload_inhi;
395#endif
396#ifdef HAVE_reload_insi
397 if (HAVE_reload_insi)
398 reload_in_optab[(int) SImode] = CODE_FOR_reload_insi;
399#endif
400#ifdef HAVE_reload_indi
401 if (HAVE_reload_indi)
402 reload_in_optab[(int) DImode] = CODE_FOR_reload_indi;
403#endif
404#ifdef HAVE_reload_inti
405 if (HAVE_reload_inti)
406 reload_in_optab[(int) TImode] = CODE_FOR_reload_inti;
407#endif
408#ifdef HAVE_reload_insf
409 if (HAVE_reload_insf)
410 reload_in_optab[(int) SFmode] = CODE_FOR_reload_insf;
411#endif
412#ifdef HAVE_reload_indf
413 if (HAVE_reload_indf)
414 reload_in_optab[(int) DFmode] = CODE_FOR_reload_indf;
415#endif
416#ifdef HAVE_reload_inxf
417 if (HAVE_reload_inxf)
418 reload_in_optab[(int) XFmode] = CODE_FOR_reload_inxf;
419#endif
420#ifdef HAVE_reload_intf
421 if (HAVE_reload_intf)
422 reload_in_optab[(int) TFmode] = CODE_FOR_reload_intf;
423#endif
424
425#ifdef HAVE_reload_outqi
426 if (HAVE_reload_outqi)
427 reload_out_optab[(int) QImode] = CODE_FOR_reload_outqi;
428#endif
429#ifdef HAVE_reload_outhi
430 if (HAVE_reload_outhi)
431 reload_out_optab[(int) HImode] = CODE_FOR_reload_outhi;
432#endif
433#ifdef HAVE_reload_outsi
434 if (HAVE_reload_outsi)
435 reload_out_optab[(int) SImode] = CODE_FOR_reload_outsi;
436#endif
437#ifdef HAVE_reload_outdi
438 if (HAVE_reload_outdi)
439 reload_out_optab[(int) DImode] = CODE_FOR_reload_outdi;
440#endif
441#ifdef HAVE_reload_outti
442 if (HAVE_reload_outti)
443 reload_out_optab[(int) TImode] = CODE_FOR_reload_outti;
444#endif
445#ifdef HAVE_reload_outsf
446 if (HAVE_reload_outsf)
447 reload_out_optab[(int) SFmode] = CODE_FOR_reload_outsf;
448#endif
449#ifdef HAVE_reload_outdf
450 if (HAVE_reload_outdf)
451 reload_out_optab[(int) DFmode] = CODE_FOR_reload_outdf;
452#endif
453#ifdef HAVE_reload_outxf
454 if (HAVE_reload_outxf)
455 reload_out_optab[(int) XFmode] = CODE_FOR_reload_outxf;
456#endif
457#ifdef HAVE_reload_outtf
458 if (HAVE_reload_outtf)
459 reload_out_optab[(int) TFmode] = CODE_FOR_reload_outtf;
460#endif
461
462#endif /* HAVE_SECONDARY_RELOADS */
463
464}
465
466/* Main entry point for the reload pass, and only entry point
467 in this file.
468
469 FIRST is the first insn of the function being compiled.
470
471 GLOBAL nonzero means we were called from global_alloc
472 and should attempt to reallocate any pseudoregs that we
473 displace from hard regs we will use for reloads.
474 If GLOBAL is zero, we do not have enough information to do that,
475 so any pseudo reg that is spilled must go to the stack.
476
477 DUMPFILE is the global-reg debugging dump file stream, or 0.
478 If it is nonzero, messages are written to it to describe
479 which registers are seized as reload regs, which pseudo regs
480 are spilled from them, and where the pseudo regs are reallocated to. */
481
482void
483reload (first, global, dumpfile)
484 rtx first;
485 int global;
486 FILE *dumpfile;
487{
488 register int class;
489 register int i;
490 register rtx insn;
491 register struct elim_table *ep;
492
493 int something_changed;
494 int something_needs_reloads;
495 int something_needs_elimination;
496 int new_basic_block_needs;
a8efe40d
RK
497 enum reg_class caller_save_spill_class = NO_REGS;
498 int caller_save_group_size = 1;
32131a9c
RK
499
500 /* The basic block number currently being processed for INSN. */
501 int this_block;
502
503 /* Make sure even insns with volatile mem refs are recognizable. */
504 init_recog ();
505
506 /* Enable find_equiv_reg to distinguish insns made by reload. */
507 reload_first_uid = get_max_uid ();
508
509 for (i = 0; i < N_REG_CLASSES; i++)
510 basic_block_needs[i] = 0;
511
512 /* Remember which hard regs appear explicitly
513 before we merge into `regs_ever_live' the ones in which
514 pseudo regs have been allocated. */
515 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
516
517 /* We don't have a stack slot for any spill reg yet. */
518 bzero (spill_stack_slot, sizeof spill_stack_slot);
519 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
520
a8efe40d
RK
521 /* Initialize the save area information for caller-save, in case some
522 are needed. */
523 init_save_areas ();
a8fdc208 524
32131a9c
RK
525 /* Compute which hard registers are now in use
526 as homes for pseudo registers.
527 This is done here rather than (eg) in global_alloc
528 because this point is reached even if not optimizing. */
529
530 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
531 mark_home_live (i);
532
533 /* Make sure that the last insn in the chain
534 is not something that needs reloading. */
535 emit_note (0, NOTE_INSN_DELETED);
536
537 /* Find all the pseudo registers that didn't get hard regs
538 but do have known equivalent constants or memory slots.
539 These include parameters (known equivalent to parameter slots)
540 and cse'd or loop-moved constant memory addresses.
541
542 Record constant equivalents in reg_equiv_constant
543 so they will be substituted by find_reloads.
544 Record memory equivalents in reg_mem_equiv so they can
545 be substituted eventually by altering the REG-rtx's. */
546
547 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
548 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
549 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
550 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
551 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
552 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
553 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
554 bzero (reg_equiv_init, max_regno * sizeof (rtx));
555 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
556 bzero (reg_equiv_address, max_regno * sizeof (rtx));
557 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
558 bzero (reg_max_ref_width, max_regno * sizeof (int));
559
560 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
561 Also find all paradoxical subregs
562 and find largest such for each pseudo. */
563
564 for (insn = first; insn; insn = NEXT_INSN (insn))
565 {
566 rtx set = single_set (insn);
567
568 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
569 {
570 rtx note = find_reg_note (insn, REG_EQUIV, 0);
a8efe40d
RK
571 if (note
572#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 573 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
574 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
575#endif
576 )
32131a9c
RK
577 {
578 rtx x = XEXP (note, 0);
579 i = REGNO (SET_DEST (set));
580 if (i > LAST_VIRTUAL_REGISTER)
581 {
582 if (GET_CODE (x) == MEM)
583 reg_equiv_memory_loc[i] = x;
584 else if (CONSTANT_P (x))
585 {
586 if (LEGITIMATE_CONSTANT_P (x))
587 reg_equiv_constant[i] = x;
588 else
589 reg_equiv_memory_loc[i]
d445b551 590 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
591 }
592 else
593 continue;
594
595 /* If this register is being made equivalent to a MEM
596 and the MEM is not SET_SRC, the equivalencing insn
597 is one with the MEM as a SET_DEST and it occurs later.
598 So don't mark this insn now. */
599 if (GET_CODE (x) != MEM
600 || rtx_equal_p (SET_SRC (set), x))
601 reg_equiv_init[i] = insn;
602 }
603 }
604 }
605
606 /* If this insn is setting a MEM from a register equivalent to it,
607 this is the equivalencing insn. */
608 else if (set && GET_CODE (SET_DEST (set)) == MEM
609 && GET_CODE (SET_SRC (set)) == REG
610 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
611 && rtx_equal_p (SET_DEST (set),
612 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
613 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
614
615 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
616 scan_paradoxical_subregs (PATTERN (insn));
617 }
618
619 /* Does this function require a frame pointer? */
620
621 frame_pointer_needed = (! flag_omit_frame_pointer
622#ifdef EXIT_IGNORE_STACK
623 /* ?? If EXIT_IGNORE_STACK is set, we will not save
624 and restore sp for alloca. So we can't eliminate
625 the frame pointer in that case. At some point,
626 we should improve this by emitting the
627 sp-adjusting insns for this case. */
628 || (current_function_calls_alloca
629 && EXIT_IGNORE_STACK)
630#endif
631 || FRAME_POINTER_REQUIRED);
632
633 num_eliminable = 0;
634
635 /* Initialize the table of registers to eliminate. The way we do this
636 depends on how the eliminable registers were defined. */
637#ifdef ELIMINABLE_REGS
638 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
639 {
640 ep->can_eliminate = ep->can_eliminate_previous
641 = (CAN_ELIMINATE (ep->from, ep->to)
642 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
643 }
644#else
645 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
646 = ! frame_pointer_needed;
647#endif
648
649 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 650 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
651 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
652 We depend on this. */
653 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
654 {
655 num_eliminable += ep->can_eliminate;
656 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
657 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
658 }
659
660 num_labels = max_label_num () - get_first_label_num ();
661
662 /* Allocate the tables used to store offset information at labels. */
663 offsets_known_at = (char *) alloca (num_labels);
664 offsets_at
665 = (int (*)[NUM_ELIMINABLE_REGS])
666 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
667
668 offsets_known_at -= get_first_label_num ();
669 offsets_at -= get_first_label_num ();
670
671 /* Alter each pseudo-reg rtx to contain its hard reg number.
672 Assign stack slots to the pseudos that lack hard regs or equivalents.
673 Do not touch virtual registers. */
674
675 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
676 alter_reg (i, -1);
677
678 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
679 because the stack size may be a part of the offset computation for
680 register elimination. */
681 assign_stack_local (BLKmode, 0, 0);
682
683 /* If we have some registers we think can be eliminated, scan all insns to
684 see if there is an insn that sets one of these registers to something
685 other than itself plus a constant. If so, the register cannot be
686 eliminated. Doing this scan here eliminates an extra pass through the
687 main reload loop in the most common case where register elimination
688 cannot be done. */
689 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
690 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
691 || GET_CODE (insn) == CALL_INSN)
692 note_stores (PATTERN (insn), mark_not_eliminable);
693
694#ifndef REGISTER_CONSTRAINTS
695 /* If all the pseudo regs have hard regs,
696 except for those that are never referenced,
697 we know that no reloads are needed. */
698 /* But that is not true if there are register constraints, since
699 in that case some pseudos might be in the wrong kind of hard reg. */
700
701 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
702 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
703 break;
704
b8093d02 705 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
706 return;
707#endif
708
709 /* Compute the order of preference for hard registers to spill.
710 Store them by decreasing preference in potential_reload_regs. */
711
712 order_regs_for_reload ();
713
714 /* So far, no hard regs have been spilled. */
715 n_spills = 0;
716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
717 spill_reg_order[i] = -1;
718
719 /* On most machines, we can't use any register explicitly used in the
720 rtl as a spill register. But on some, we have to. Those will have
721 taken care to keep the life of hard regs as short as possible. */
722
723#ifdef SMALL_REGISTER_CLASSES
724 CLEAR_HARD_REG_SET (forbidden_regs);
725#else
726 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
727#endif
728
729 /* Spill any hard regs that we know we can't eliminate. */
730 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
731 if (! ep->can_eliminate)
732 {
733 spill_hard_reg (ep->from, global, dumpfile, 1);
734 regs_ever_live[ep->from] = 1;
735 }
736
737 if (global)
738 for (i = 0; i < N_REG_CLASSES; i++)
739 {
740 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
741 bzero (basic_block_needs[i], n_basic_blocks);
742 }
743
744 /* This loop scans the entire function each go-round
745 and repeats until one repetition spills no additional hard regs. */
746
d45cf215 747 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
748 to require another pass. Note that getting an additional reload
749 reg does not necessarily imply any pseudo reg was spilled;
750 sometimes we find a reload reg that no pseudo reg was allocated in. */
751 something_changed = 1;
752 /* This flag is set if there are any insns that require reloading. */
753 something_needs_reloads = 0;
754 /* This flag is set if there are any insns that require register
755 eliminations. */
756 something_needs_elimination = 0;
757 while (something_changed)
758 {
759 rtx after_call = 0;
760
761 /* For each class, number of reload regs needed in that class.
762 This is the maximum over all insns of the needs in that class
763 of the individual insn. */
764 int max_needs[N_REG_CLASSES];
765 /* For each class, size of group of consecutive regs
766 that is needed for the reloads of this class. */
767 int group_size[N_REG_CLASSES];
768 /* For each class, max number of consecutive groups needed.
769 (Each group contains group_size[CLASS] consecutive registers.) */
770 int max_groups[N_REG_CLASSES];
771 /* For each class, max number needed of regs that don't belong
772 to any of the groups. */
773 int max_nongroups[N_REG_CLASSES];
774 /* For each class, the machine mode which requires consecutive
775 groups of regs of that class.
776 If two different modes ever require groups of one class,
777 they must be the same size and equally restrictive for that class,
778 otherwise we can't handle the complexity. */
779 enum machine_mode group_mode[N_REG_CLASSES];
780 rtx x;
781
782 something_changed = 0;
783 bzero (max_needs, sizeof max_needs);
784 bzero (max_groups, sizeof max_groups);
785 bzero (max_nongroups, sizeof max_nongroups);
786 bzero (group_size, sizeof group_size);
787 for (i = 0; i < N_REG_CLASSES; i++)
788 group_mode[i] = VOIDmode;
789
790 /* Keep track of which basic blocks are needing the reloads. */
791 this_block = 0;
792
793 /* Remember whether any element of basic_block_needs
794 changes from 0 to 1 in this pass. */
795 new_basic_block_needs = 0;
796
797 /* Reset all offsets on eliminable registers to their initial values. */
798#ifdef ELIMINABLE_REGS
799 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
800 {
801 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
802 ep->previous_offset = ep->offset
803 = ep->max_offset = ep->initial_offset;
32131a9c
RK
804 }
805#else
806#ifdef INITIAL_FRAME_POINTER_OFFSET
807 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
808#else
809 if (!FRAME_POINTER_REQUIRED)
810 abort ();
811 reg_eliminate[0].initial_offset = 0;
812#endif
a8efe40d 813 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
814 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
815#endif
816
817 num_not_at_initial_offset = 0;
818
819 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
820
821 /* Set a known offset for each forced label to be at the initial offset
822 of each elimination. We do this because we assume that all
823 computed jumps occur from a location where each elimination is
824 at its initial offset. */
825
826 for (x = forced_labels; x; x = XEXP (x, 1))
827 if (XEXP (x, 0))
828 set_label_offsets (XEXP (x, 0), 0, 1);
829
830 /* For each pseudo register that has an equivalent location defined,
831 try to eliminate any eliminable registers (such as the frame pointer)
832 assuming initial offsets for the replacement register, which
833 is the normal case.
834
835 If the resulting location is directly addressable, substitute
836 the MEM we just got directly for the old REG.
837
838 If it is not addressable but is a constant or the sum of a hard reg
839 and constant, it is probably not addressable because the constant is
840 out of range, in that case record the address; we will generate
841 hairy code to compute the address in a register each time it is
a8fdc208 842 needed.
32131a9c
RK
843
844 If the location is not addressable, but does not have one of the
845 above forms, assign a stack slot. We have to do this to avoid the
846 potential of producing lots of reloads if, e.g., a location involves
847 a pseudo that didn't get a hard register and has an equivalent memory
848 location that also involves a pseudo that didn't get a hard register.
849
850 Perhaps at some point we will improve reload_when_needed handling
851 so this problem goes away. But that's very hairy. */
852
853 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
854 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
855 {
856 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, 0);
857
858 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
859 XEXP (x, 0)))
860 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
861 else if (CONSTANT_P (XEXP (x, 0))
862 || (GET_CODE (XEXP (x, 0)) == PLUS
863 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
864 && (REGNO (XEXP (XEXP (x, 0), 0))
865 < FIRST_PSEUDO_REGISTER)
866 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
867 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
868 else
869 {
870 /* Make a new stack slot. Then indicate that something
a8fdc208 871 changed so we go back and recompute offsets for
32131a9c
RK
872 eliminable registers because the allocation of memory
873 below might change some offset. reg_equiv_{mem,address}
874 will be set up for this pseudo on the next pass around
875 the loop. */
876 reg_equiv_memory_loc[i] = 0;
877 reg_equiv_init[i] = 0;
878 alter_reg (i, -1);
879 something_changed = 1;
880 }
881 }
a8fdc208 882
d45cf215 883 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
884 bookkeeping. */
885 if (something_changed)
886 continue;
887
a8efe40d
RK
888 /* If caller-saves needs a group, initialize the group to include
889 the size and mode required for caller-saves. */
890
891 if (caller_save_group_size > 1)
892 {
893 group_mode[(int) caller_save_spill_class] = Pmode;
894 group_size[(int) caller_save_spill_class] = caller_save_group_size;
895 }
896
32131a9c
RK
897 /* Compute the most additional registers needed by any instruction.
898 Collect information separately for each class of regs. */
899
900 for (insn = first; insn; insn = NEXT_INSN (insn))
901 {
902 if (global && this_block + 1 < n_basic_blocks
903 && insn == basic_block_head[this_block+1])
904 ++this_block;
905
906 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
907 might include REG_LABEL), we need to see what effects this
908 has on the known offsets at labels. */
909
910 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
911 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
912 && REG_NOTES (insn) != 0))
913 set_label_offsets (insn, insn, 0);
914
915 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
916 {
917 /* Nonzero means don't use a reload reg that overlaps
918 the place where a function value can be returned. */
919 rtx avoid_return_reg = 0;
920
921 rtx old_body = PATTERN (insn);
922 int old_code = INSN_CODE (insn);
923 rtx old_notes = REG_NOTES (insn);
924 int did_elimination = 0;
925
926 /* Initially, count RELOAD_OTHER reloads.
927 Later, merge in the other kinds. */
928 int insn_needs[N_REG_CLASSES];
929 int insn_groups[N_REG_CLASSES];
930 int insn_total_groups = 0;
931
932 /* Count RELOAD_FOR_INPUT_RELOAD_ADDRESS reloads. */
933 int insn_needs_for_inputs[N_REG_CLASSES];
934 int insn_groups_for_inputs[N_REG_CLASSES];
935 int insn_total_groups_for_inputs = 0;
936
937 /* Count RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reloads. */
938 int insn_needs_for_outputs[N_REG_CLASSES];
939 int insn_groups_for_outputs[N_REG_CLASSES];
940 int insn_total_groups_for_outputs = 0;
941
942 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
943 int insn_needs_for_operands[N_REG_CLASSES];
944 int insn_groups_for_operands[N_REG_CLASSES];
945 int insn_total_groups_for_operands = 0;
946
32131a9c
RK
947#if 0 /* This wouldn't work nowadays, since optimize_bit_field
948 looks for non-strict memory addresses. */
949 /* Optimization: a bit-field instruction whose field
950 happens to be a byte or halfword in memory
951 can be changed to a move instruction. */
952
953 if (GET_CODE (PATTERN (insn)) == SET)
954 {
955 rtx dest = SET_DEST (PATTERN (insn));
956 rtx src = SET_SRC (PATTERN (insn));
957
958 if (GET_CODE (dest) == ZERO_EXTRACT
959 || GET_CODE (dest) == SIGN_EXTRACT)
960 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
961 if (GET_CODE (src) == ZERO_EXTRACT
962 || GET_CODE (src) == SIGN_EXTRACT)
963 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
964 }
965#endif
966
967 /* If needed, eliminate any eliminable registers. */
968 if (num_eliminable)
969 did_elimination = eliminate_regs_in_insn (insn, 0);
970
971#ifdef SMALL_REGISTER_CLASSES
972 /* Set avoid_return_reg if this is an insn
973 that might use the value of a function call. */
974 if (GET_CODE (insn) == CALL_INSN)
975 {
976 if (GET_CODE (PATTERN (insn)) == SET)
977 after_call = SET_DEST (PATTERN (insn));
978 else if (GET_CODE (PATTERN (insn)) == PARALLEL
979 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
980 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
981 else
982 after_call = 0;
983 }
984 else if (after_call != 0
985 && !(GET_CODE (PATTERN (insn)) == SET
986 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
987 {
988 if (reg_mentioned_p (after_call, PATTERN (insn)))
989 avoid_return_reg = after_call;
990 after_call = 0;
991 }
992#endif /* SMALL_REGISTER_CLASSES */
993
994 /* Analyze the instruction. */
995 find_reloads (insn, 0, spill_indirect_levels, global,
996 spill_reg_order);
997
998 /* Remember for later shortcuts which insns had any reloads or
999 register eliminations.
1000
1001 One might think that it would be worthwhile to mark insns
1002 that need register replacements but not reloads, but this is
1003 not safe because find_reloads may do some manipulation of
1004 the insn (such as swapping commutative operands), which would
1005 be lost when we restore the old pattern after register
1006 replacement. So the actions of find_reloads must be redone in
1007 subsequent passes or in reload_as_needed.
1008
1009 However, it is safe to mark insns that need reloads
1010 but not register replacement. */
1011
1012 PUT_MODE (insn, (did_elimination ? QImode
1013 : n_reloads ? HImode
1014 : VOIDmode));
1015
1016 /* Discard any register replacements done. */
1017 if (did_elimination)
1018 {
1019 obstack_free (&reload_obstack, reload_firstobj);
1020 PATTERN (insn) = old_body;
1021 INSN_CODE (insn) = old_code;
1022 REG_NOTES (insn) = old_notes;
1023 something_needs_elimination = 1;
1024 }
1025
a8efe40d 1026 /* If this insn has no reloads, we need not do anything except
a8fdc208 1027 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1028 caller-save needs reloads. */
1029
1030 if (n_reloads == 0
1031 && ! (GET_CODE (insn) == CALL_INSN
1032 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1033 continue;
1034
1035 something_needs_reloads = 1;
1036
a8efe40d
RK
1037 for (i = 0; i < N_REG_CLASSES; i++)
1038 {
1039 insn_needs[i] = 0, insn_groups[i] = 0;
1040 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1041 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1042 insn_needs_for_operands[i] = 0, insn_groups_for_operands[i] = 0;
1043 }
1044
32131a9c
RK
1045 /* Count each reload once in every class
1046 containing the reload's own class. */
1047
1048 for (i = 0; i < n_reloads; i++)
1049 {
1050 register enum reg_class *p;
e85ddd99 1051 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1052 int size;
1053 enum machine_mode mode;
1054 int *this_groups;
1055 int *this_needs;
1056 int *this_total_groups;
1057
1058 /* Don't count the dummy reloads, for which one of the
1059 regs mentioned in the insn can be used for reloading.
1060 Don't count optional reloads.
1061 Don't count reloads that got combined with others. */
1062 if (reload_reg_rtx[i] != 0
1063 || reload_optional[i] != 0
1064 || (reload_out[i] == 0 && reload_in[i] == 0
1065 && ! reload_secondary_p[i]))
1066 continue;
1067
e85ddd99
RK
1068 /* Show that a reload register of this class is needed
1069 in this basic block. We do not use insn_needs and
1070 insn_groups because they are overly conservative for
1071 this purpose. */
1072 if (global && ! basic_block_needs[(int) class][this_block])
1073 {
1074 basic_block_needs[(int) class][this_block] = 1;
1075 new_basic_block_needs = 1;
1076 }
1077
32131a9c
RK
1078 /* Decide which time-of-use to count this reload for. */
1079 switch (reload_when_needed[i])
1080 {
1081 case RELOAD_OTHER:
1082 case RELOAD_FOR_OUTPUT:
1083 case RELOAD_FOR_INPUT:
1084 this_needs = insn_needs;
1085 this_groups = insn_groups;
1086 this_total_groups = &insn_total_groups;
1087 break;
1088
1089 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
1090 this_needs = insn_needs_for_inputs;
1091 this_groups = insn_groups_for_inputs;
1092 this_total_groups = &insn_total_groups_for_inputs;
1093 break;
1094
1095 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
1096 this_needs = insn_needs_for_outputs;
1097 this_groups = insn_groups_for_outputs;
1098 this_total_groups = &insn_total_groups_for_outputs;
1099 break;
1100
1101 case RELOAD_FOR_OPERAND_ADDRESS:
1102 this_needs = insn_needs_for_operands;
1103 this_groups = insn_groups_for_operands;
1104 this_total_groups = &insn_total_groups_for_operands;
1105 break;
1106 }
1107
1108 mode = reload_inmode[i];
1109 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1110 mode = reload_outmode[i];
e85ddd99 1111 size = CLASS_MAX_NREGS (class, mode);
32131a9c
RK
1112 if (size > 1)
1113 {
1114 enum machine_mode other_mode, allocate_mode;
1115
1116 /* Count number of groups needed separately from
1117 number of individual regs needed. */
e85ddd99
RK
1118 this_groups[(int) class]++;
1119 p = reg_class_superclasses[(int) class];
32131a9c
RK
1120 while (*p != LIM_REG_CLASSES)
1121 this_groups[(int) *p++]++;
1122 (*this_total_groups)++;
1123
1124 /* Record size and mode of a group of this class. */
1125 /* If more than one size group is needed,
1126 make all groups the largest needed size. */
e85ddd99 1127 if (group_size[(int) class] < size)
32131a9c 1128 {
e85ddd99 1129 other_mode = group_mode[(int) class];
32131a9c
RK
1130 allocate_mode = mode;
1131
e85ddd99
RK
1132 group_size[(int) class] = size;
1133 group_mode[(int) class] = mode;
32131a9c
RK
1134 }
1135 else
1136 {
1137 other_mode = mode;
e85ddd99 1138 allocate_mode = group_mode[(int) class];
32131a9c
RK
1139 }
1140
1141 /* Crash if two dissimilar machine modes both need
1142 groups of consecutive regs of the same class. */
1143
1144 if (other_mode != VOIDmode
1145 && other_mode != allocate_mode
1146 && ! modes_equiv_for_class_p (allocate_mode,
1147 other_mode,
e85ddd99 1148 class))
32131a9c
RK
1149 abort ();
1150 }
1151 else if (size == 1)
1152 {
e85ddd99
RK
1153 this_needs[(int) class] += 1;
1154 p = reg_class_superclasses[(int) class];
32131a9c
RK
1155 while (*p != LIM_REG_CLASSES)
1156 this_needs[(int) *p++] += 1;
1157 }
1158 else
1159 abort ();
1160 }
1161
1162 /* All reloads have been counted for this insn;
1163 now merge the various times of use.
1164 This sets insn_needs, etc., to the maximum total number
1165 of registers needed at any point in this insn. */
1166
1167 for (i = 0; i < N_REG_CLASSES; i++)
1168 {
1169 int this_max;
1170 this_max = insn_needs_for_inputs[i];
1171 if (insn_needs_for_outputs[i] > this_max)
1172 this_max = insn_needs_for_outputs[i];
1173 if (insn_needs_for_operands[i] > this_max)
1174 this_max = insn_needs_for_operands[i];
1175 insn_needs[i] += this_max;
1176 this_max = insn_groups_for_inputs[i];
1177 if (insn_groups_for_outputs[i] > this_max)
1178 this_max = insn_groups_for_outputs[i];
1179 if (insn_groups_for_operands[i] > this_max)
1180 this_max = insn_groups_for_operands[i];
1181 insn_groups[i] += this_max;
32131a9c 1182 }
a8efe40d 1183
32131a9c
RK
1184 insn_total_groups += MAX (insn_total_groups_for_inputs,
1185 MAX (insn_total_groups_for_outputs,
1186 insn_total_groups_for_operands));
1187
a8efe40d
RK
1188 /* If this is a CALL_INSN and caller-saves will need
1189 a spill register, act as if the spill register is
1190 needed for this insn. However, the spill register
1191 can be used by any reload of this insn, so we only
1192 need do something if no need for that class has
a8fdc208 1193 been recorded.
a8efe40d
RK
1194
1195 The assumption that every CALL_INSN will trigger a
1196 caller-save is highly conservative, however, the number
1197 of cases where caller-saves will need a spill register but
1198 a block containing a CALL_INSN won't need a spill register
1199 of that class should be quite rare.
1200
1201 If a group is needed, the size and mode of the group will
d45cf215 1202 have been set up at the beginning of this loop. */
a8efe40d
RK
1203
1204 if (GET_CODE (insn) == CALL_INSN
1205 && caller_save_spill_class != NO_REGS)
1206 {
1207 int *caller_save_needs
1208 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1209
1210 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1211 {
1212 register enum reg_class *p
1213 = reg_class_superclasses[(int) caller_save_spill_class];
1214
1215 caller_save_needs[(int) caller_save_spill_class]++;
1216
1217 while (*p != LIM_REG_CLASSES)
0aaa6af8 1218 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1219 }
1220
1221 if (caller_save_group_size > 1)
1222 insn_total_groups = MAX (insn_total_groups, 1);
1223 }
1224
32131a9c
RK
1225#ifdef SMALL_REGISTER_CLASSES
1226 /* If this insn stores the value of a function call,
1227 and that value is in a register that has been spilled,
1228 and if the insn needs a reload in a class
1229 that might use that register as the reload register,
1230 then add add an extra need in that class.
1231 This makes sure we have a register available that does
1232 not overlap the return value. */
1233 if (avoid_return_reg)
1234 {
1235 int regno = REGNO (avoid_return_reg);
1236 int nregs
1237 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1238 int r;
1239 int inc_groups = 0;
1240 for (r = regno; r < regno + nregs; r++)
1241 if (spill_reg_order[r] >= 0)
1242 for (i = 0; i < N_REG_CLASSES; i++)
1243 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1244 {
1245 if (insn_needs[i] > 0)
1246 insn_needs[i]++;
1247 if (insn_groups[i] > 0
1248 && nregs > 1)
1249 inc_groups = 1;
1250 }
1251 if (inc_groups)
1252 insn_groups[i]++;
1253 }
1254#endif /* SMALL_REGISTER_CLASSES */
1255
1256 /* For each class, collect maximum need of any insn. */
1257
1258 for (i = 0; i < N_REG_CLASSES; i++)
1259 {
1260 if (max_needs[i] < insn_needs[i])
1261 max_needs[i] = insn_needs[i];
1262 if (max_groups[i] < insn_groups[i])
1263 max_groups[i] = insn_groups[i];
1264 if (insn_total_groups > 0)
1265 if (max_nongroups[i] < insn_needs[i])
1266 max_nongroups[i] = insn_needs[i];
1267 }
1268 }
1269 /* Note that there is a continue statement above. */
1270 }
1271
d445b551 1272 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1273 will need a spill register. */
32131a9c 1274
d445b551 1275 if (caller_save_needed
a8efe40d
RK
1276 && ! setup_save_areas (&something_changed)
1277 && caller_save_spill_class == NO_REGS)
32131a9c 1278 {
a8efe40d
RK
1279 /* The class we will need depends on whether the machine
1280 supports the sum of two registers for an address; see
1281 find_address_reloads for details. */
1282
a8fdc208 1283 caller_save_spill_class
a8efe40d
RK
1284 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1285 caller_save_group_size
1286 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1287 something_changed = 1;
32131a9c
RK
1288 }
1289
1290 /* Now deduct from the needs for the registers already
1291 available (already spilled). */
1292
1293 CLEAR_HARD_REG_SET (counted_for_groups);
1294 CLEAR_HARD_REG_SET (counted_for_nongroups);
1295
1296 /* First find all regs alone in their class
1297 and count them (if desired) for non-groups.
1298 We would be screwed if a group took the only reg in a class
d445b551 1299 for which a non-group reload is needed.
32131a9c
RK
1300 (Note there is still a bug; if a class has 2 regs,
1301 both could be stolen by groups and we would lose the same way.
1302 With luck, no machine will need a nongroup in a 2-reg class.) */
1303
1304 for (i = 0; i < n_spills; i++)
1305 {
1306 register enum reg_class *p;
1307 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1308
1309 if (reg_class_size[class] == 1 && max_nongroups[class] > 0)
1310 {
1311 max_needs[class]--;
1312 p = reg_class_superclasses[class];
1313 while (*p != LIM_REG_CLASSES)
1314 max_needs[(int) *p++]--;
1315
1316 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1317 max_nongroups[class]--;
1318 p = reg_class_superclasses[class];
1319 while (*p != LIM_REG_CLASSES)
1320 {
1321 if (max_nongroups[(int) *p] > 0)
1322 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1323 max_nongroups[(int) *p++]--;
1324 }
1325 }
1326 }
1327
1328 /* Now find all consecutive groups of spilled registers
1329 and mark each group off against the need for such groups.
1330 But don't count them against ordinary need, yet. */
1331
1332 count_possible_groups (group_size, group_mode, max_groups);
1333
1334 /* Now count all spill regs against the individual need,
a8fdc208 1335 This includes those counted above for groups,
32131a9c
RK
1336 but not those previously counted for nongroups.
1337
1338 Those that weren't counted_for_groups can also count against
1339 the not-in-group need. */
1340
1341 for (i = 0; i < n_spills; i++)
1342 {
1343 register enum reg_class *p;
1344 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1345
1346 /* Those counted at the beginning shouldn't be counted twice. */
1347 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
1348 {
1349 max_needs[class]--;
1350 p = reg_class_superclasses[class];
1351 while (*p != LIM_REG_CLASSES)
1352 max_needs[(int) *p++]--;
1353
1354 if (! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i]))
1355 {
1356 if (max_nongroups[class] > 0)
1357 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1358 max_nongroups[class]--;
1359 p = reg_class_superclasses[class];
1360 while (*p != LIM_REG_CLASSES)
1361 {
1362 if (max_nongroups[(int) *p] > 0)
1363 SET_HARD_REG_BIT (counted_for_nongroups,
1364 spill_regs[i]);
1365 max_nongroups[(int) *p++]--;
1366 }
1367 }
1368 }
1369 }
1370
5c23c401
RK
1371 /* See if anything that happened changes which eliminations are valid.
1372 For example, on the Sparc, whether or not the frame pointer can
1373 be eliminated can depend on what registers have been used. We need
1374 not check some conditions again (such as flag_omit_frame_pointer)
1375 since they can't have changed. */
1376
1377 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1378 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1379#ifdef ELIMINABLE_REGS
1380 || ! CAN_ELIMINATE (ep->from, ep->to)
1381#endif
1382 )
1383 ep->can_eliminate = 0;
1384
32131a9c
RK
1385 /* Look for the case where we have discovered that we can't replace
1386 register A with register B and that means that we will now be
1387 trying to replace register A with register C. This means we can
1388 no longer replace register C with register B and we need to disable
1389 such an elimination, if it exists. This occurs often with A == ap,
1390 B == sp, and C == fp. */
a8fdc208 1391
32131a9c
RK
1392 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1393 {
1394 struct elim_table *op;
1395 register int new_to = -1;
1396
1397 if (! ep->can_eliminate && ep->can_eliminate_previous)
1398 {
1399 /* Find the current elimination for ep->from, if there is a
1400 new one. */
1401 for (op = reg_eliminate;
1402 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1403 if (op->from == ep->from && op->can_eliminate)
1404 {
1405 new_to = op->to;
1406 break;
1407 }
1408
1409 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1410 disable it. */
1411 for (op = reg_eliminate;
1412 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1413 if (op->from == new_to && op->to == ep->to)
1414 op->can_eliminate = 0;
1415 }
1416 }
1417
1418 /* See if any registers that we thought we could eliminate the previous
1419 time are no longer eliminable. If so, something has changed and we
1420 must spill the register. Also, recompute the number of eliminable
1421 registers and see if the frame pointer is needed; it is if there is
1422 no elimination of the frame pointer that we can perform. */
1423
1424 frame_pointer_needed = 1;
1425 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1426 {
1427 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1428 frame_pointer_needed = 0;
1429
1430 if (! ep->can_eliminate && ep->can_eliminate_previous)
1431 {
1432 ep->can_eliminate_previous = 0;
1433 spill_hard_reg (ep->from, global, dumpfile, 1);
1434 regs_ever_live[ep->from] = 1;
1435 something_changed = 1;
1436 num_eliminable--;
1437 }
1438 }
1439
1440 /* If all needs are met, we win. */
1441
1442 for (i = 0; i < N_REG_CLASSES; i++)
1443 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1444 break;
1445 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1446 break;
1447
1448 /* Not all needs are met; must spill more hard regs. */
1449
1450 /* If any element of basic_block_needs changed from 0 to 1,
1451 re-spill all the regs already spilled. This may spill
1452 additional pseudos that didn't spill before. */
1453
1454 if (new_basic_block_needs)
1455 for (i = 0; i < n_spills; i++)
1456 something_changed
1457 |= spill_hard_reg (spill_regs[i], global, dumpfile, 0);
1458
1459 /* Now find more reload regs to satisfy the remaining need
1460 Do it by ascending class number, since otherwise a reg
1461 might be spilled for a big class and might fail to count
1462 for a smaller class even though it belongs to that class.
1463
1464 Count spilled regs in `spills', and add entries to
1465 `spill_regs' and `spill_reg_order'.
1466
1467 ??? Note there is a problem here.
1468 When there is a need for a group in a high-numbered class,
1469 and also need for non-group regs that come from a lower class,
1470 the non-group regs are chosen first. If there aren't many regs,
1471 they might leave no room for a group.
1472
1473 This was happening on the 386. To fix it, we added the code
1474 that calls possible_group_p, so that the lower class won't
1475 break up the last possible group.
1476
1477 Really fixing the problem would require changes above
1478 in counting the regs already spilled, and in choose_reload_regs.
1479 It might be hard to avoid introducing bugs there. */
1480
1481 for (class = 0; class < N_REG_CLASSES; class++)
1482 {
1483 /* First get the groups of registers.
1484 If we got single registers first, we might fragment
1485 possible groups. */
1486 while (max_groups[class] > 0)
1487 {
1488 /* If any single spilled regs happen to form groups,
1489 count them now. Maybe we don't really need
1490 to spill another group. */
1491 count_possible_groups (group_size, group_mode, max_groups);
1492
1493 /* Groups of size 2 (the only groups used on most machines)
1494 are treated specially. */
1495 if (group_size[class] == 2)
1496 {
1497 /* First, look for a register that will complete a group. */
1498 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1499 {
1500 int j = potential_reload_regs[i];
1501 int other;
1502 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1503 &&
1504 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1505 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1506 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1507 && HARD_REGNO_MODE_OK (other, group_mode[class])
1508 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1509 other)
1510 /* We don't want one part of another group.
1511 We could get "two groups" that overlap! */
1512 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1513 ||
1514 (j < FIRST_PSEUDO_REGISTER - 1
1515 && (other = j + 1, spill_reg_order[other] >= 0)
1516 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1517 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1518 && HARD_REGNO_MODE_OK (j, group_mode[class])
1519 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1520 other)
1521 && ! TEST_HARD_REG_BIT (counted_for_groups,
1522 other))))
1523 {
1524 register enum reg_class *p;
1525
1526 /* We have found one that will complete a group,
1527 so count off one group as provided. */
1528 max_groups[class]--;
1529 p = reg_class_superclasses[class];
1530 while (*p != LIM_REG_CLASSES)
1531 max_groups[(int) *p++]--;
1532
1533 /* Indicate both these regs are part of a group. */
1534 SET_HARD_REG_BIT (counted_for_groups, j);
1535 SET_HARD_REG_BIT (counted_for_groups, other);
1536 break;
1537 }
1538 }
1539 /* We can't complete a group, so start one. */
1540 if (i == FIRST_PSEUDO_REGISTER)
1541 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1542 {
1543 int j = potential_reload_regs[i];
1544 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1545 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1546 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1547 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1548 && HARD_REGNO_MODE_OK (j, group_mode[class])
1549 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1550 j + 1))
1551 break;
1552 }
1553
1554 /* I should be the index in potential_reload_regs
1555 of the new reload reg we have found. */
1556
1557 something_changed
1558 |= new_spill_reg (i, class, max_needs, 0,
1559 global, dumpfile);
1560 }
1561 else
1562 {
1563 /* For groups of more than 2 registers,
1564 look for a sufficient sequence of unspilled registers,
1565 and spill them all at once. */
1566 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1567 {
1568 int j = potential_reload_regs[i];
1569 int k;
1570 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1571 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1572 {
1573 /* Check each reg in the sequence. */
1574 for (k = 0; k < group_size[class]; k++)
1575 if (! (spill_reg_order[j + k] < 0
1576 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1577 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1578 break;
1579 /* We got a full sequence, so spill them all. */
1580 if (k == group_size[class])
1581 {
1582 register enum reg_class *p;
1583 for (k = 0; k < group_size[class]; k++)
1584 {
1585 int idx;
1586 SET_HARD_REG_BIT (counted_for_groups, j + k);
1587 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1588 if (potential_reload_regs[idx] == j + k)
1589 break;
1590 something_changed
1591 |= new_spill_reg (idx, class, max_needs, 0,
1592 global, dumpfile);
1593 }
1594
1595 /* We have found one that will complete a group,
1596 so count off one group as provided. */
1597 max_groups[class]--;
1598 p = reg_class_superclasses[class];
1599 while (*p != LIM_REG_CLASSES)
1600 max_groups[(int) *p++]--;
1601
1602 break;
1603 }
1604 }
1605 }
fa52261e
RS
1606 /* We couldn't find any registers for this reload.
1607 Abort to avoid going into an infinite loop. */
1608 if (i == FIRST_PSEUDO_REGISTER)
1609 abort ();
32131a9c
RK
1610 }
1611 }
1612
1613 /* Now similarly satisfy all need for single registers. */
1614
1615 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1616 {
1617 /* Consider the potential reload regs that aren't
1618 yet in use as reload regs, in order of preference.
1619 Find the most preferred one that's in this class. */
1620
1621 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1622 if (potential_reload_regs[i] >= 0
1623 && TEST_HARD_REG_BIT (reg_class_contents[class],
1624 potential_reload_regs[i])
1625 /* If this reg will not be available for groups,
1626 pick one that does not foreclose possible groups.
1627 This is a kludge, and not very general,
1628 but it should be sufficient to make the 386 work,
1629 and the problem should not occur on machines with
1630 more registers. */
1631 && (max_nongroups[class] == 0
1632 || possible_group_p (potential_reload_regs[i], max_groups)))
1633 break;
1634
1635 /* I should be the index in potential_reload_regs
1636 of the new reload reg we have found. */
1637
1638 something_changed
1639 |= new_spill_reg (i, class, max_needs, max_nongroups,
1640 global, dumpfile);
1641 }
1642 }
1643 }
1644
1645 /* If global-alloc was run, notify it of any register eliminations we have
1646 done. */
1647 if (global)
1648 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1649 if (ep->can_eliminate)
1650 mark_elimination (ep->from, ep->to);
1651
1652 /* From now on, we need to emit any moves without making new pseudos. */
1653 reload_in_progress = 1;
1654
1655 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1656 around calls. Tell if what mode to use so that we will process
1657 those insns in reload_as_needed if we have to. */
32131a9c
RK
1658
1659 if (caller_save_needed)
a8efe40d
RK
1660 save_call_clobbered_regs (num_eliminable ? QImode
1661 : caller_save_spill_class != NO_REGS ? HImode
1662 : VOIDmode);
32131a9c
RK
1663
1664 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1665 If that insn didn't set the register (i.e., it copied the register to
1666 memory), just delete that insn instead of the equivalencing insn plus
1667 anything now dead. If we call delete_dead_insn on that insn, we may
1668 delete the insn that actually sets the register if the register die
1669 there and that is incorrect. */
1670
1671 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1672 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1673 && GET_CODE (reg_equiv_init[i]) != NOTE)
1674 {
1675 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1676 delete_dead_insn (reg_equiv_init[i]);
1677 else
1678 {
1679 PUT_CODE (reg_equiv_init[i], NOTE);
1680 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1681 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1682 }
1683 }
1684
1685 /* Use the reload registers where necessary
1686 by generating move instructions to move the must-be-register
1687 values into or out of the reload registers. */
1688
a8efe40d
RK
1689 if (something_needs_reloads || something_needs_elimination
1690 || (caller_save_needed && num_eliminable)
1691 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1692 reload_as_needed (first, global);
1693
1694 reload_in_progress = 0;
1695
1696 /* Now eliminate all pseudo regs by modifying them into
1697 their equivalent memory references.
1698 The REG-rtx's for the pseudos are modified in place,
1699 so all insns that used to refer to them now refer to memory.
1700
1701 For a reg that has a reg_equiv_address, all those insns
1702 were changed by reloading so that no insns refer to it any longer;
1703 but the DECL_RTL of a variable decl may refer to it,
1704 and if so this causes the debugging info to mention the variable. */
1705
1706 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1707 {
1708 rtx addr = 0;
ab1fd483 1709 int in_struct = 0;
32131a9c 1710 if (reg_equiv_mem[i])
ab1fd483
RS
1711 {
1712 addr = XEXP (reg_equiv_mem[i], 0);
1713 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1714 }
32131a9c
RK
1715 if (reg_equiv_address[i])
1716 addr = reg_equiv_address[i];
1717 if (addr)
1718 {
1719 if (reg_renumber[i] < 0)
1720 {
1721 rtx reg = regno_reg_rtx[i];
1722 XEXP (reg, 0) = addr;
1723 REG_USERVAR_P (reg) = 0;
ab1fd483 1724 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1725 PUT_CODE (reg, MEM);
1726 }
1727 else if (reg_equiv_mem[i])
1728 XEXP (reg_equiv_mem[i], 0) = addr;
1729 }
1730 }
1731
1732#ifdef PRESERVE_DEATH_INFO_REGNO_P
1733 /* Make a pass over all the insns and remove death notes for things that
1734 are no longer registers or no longer die in the insn (e.g., an input
1735 and output pseudo being tied). */
1736
1737 for (insn = first; insn; insn = NEXT_INSN (insn))
1738 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1739 {
1740 rtx note, next;
1741
1742 for (note = REG_NOTES (insn); note; note = next)
1743 {
1744 next = XEXP (note, 1);
1745 if (REG_NOTE_KIND (note) == REG_DEAD
1746 && (GET_CODE (XEXP (note, 0)) != REG
1747 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1748 remove_note (insn, note);
1749 }
1750 }
1751#endif
1752
1753 /* Indicate that we no longer have known memory locations or constants. */
1754 reg_equiv_constant = 0;
1755 reg_equiv_memory_loc = 0;
1756}
1757\f
1758/* Nonzero if, after spilling reg REGNO for non-groups,
1759 it will still be possible to find a group if we still need one. */
1760
1761static int
1762possible_group_p (regno, max_groups)
1763 int regno;
1764 int *max_groups;
1765{
1766 int i;
1767 int class = (int) NO_REGS;
1768
1769 for (i = 0; i < (int) N_REG_CLASSES; i++)
1770 if (max_groups[i] > 0)
1771 {
1772 class = i;
1773 break;
1774 }
1775
1776 if (class == (int) NO_REGS)
1777 return 1;
1778
1779 /* Consider each pair of consecutive registers. */
1780 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
1781 {
1782 /* Ignore pairs that include reg REGNO. */
1783 if (i == regno || i + 1 == regno)
1784 continue;
1785
1786 /* Ignore pairs that are outside the class that needs the group.
1787 ??? Here we fail to handle the case where two different classes
1788 independently need groups. But this never happens with our
1789 current machine descriptions. */
1790 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
1791 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
1792 continue;
1793
1794 /* A pair of consecutive regs we can still spill does the trick. */
1795 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
1796 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1797 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
1798 return 1;
1799
1800 /* A pair of one already spilled and one we can spill does it
1801 provided the one already spilled is not otherwise reserved. */
1802 if (spill_reg_order[i] < 0
1803 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1804 && spill_reg_order[i + 1] >= 0
1805 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
1806 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
1807 return 1;
1808 if (spill_reg_order[i + 1] < 0
1809 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
1810 && spill_reg_order[i] >= 0
1811 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
1812 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
1813 return 1;
1814 }
1815
1816 return 0;
1817}
1818\f
1819/* Count any groups that can be formed from the registers recently spilled.
1820 This is done class by class, in order of ascending class number. */
1821
1822static void
1823count_possible_groups (group_size, group_mode, max_groups)
1824 int *group_size, *max_groups;
1825 enum machine_mode *group_mode;
1826{
1827 int i;
1828 /* Now find all consecutive groups of spilled registers
1829 and mark each group off against the need for such groups.
1830 But don't count them against ordinary need, yet. */
1831
1832 for (i = 0; i < N_REG_CLASSES; i++)
1833 if (group_size[i] > 1)
1834 {
1835 char regmask[FIRST_PSEUDO_REGISTER];
1836 int j;
1837
1838 bzero (regmask, sizeof regmask);
1839 /* Make a mask of all the regs that are spill regs in class I. */
1840 for (j = 0; j < n_spills; j++)
1841 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
1842 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
1843 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1844 spill_regs[j]))
1845 regmask[spill_regs[j]] = 1;
1846 /* Find each consecutive group of them. */
1847 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
1848 if (regmask[j] && j + group_size[i] <= FIRST_PSEUDO_REGISTER
1849 /* Next line in case group-mode for this class
1850 demands an even-odd pair. */
1851 && HARD_REGNO_MODE_OK (j, group_mode[i]))
1852 {
1853 int k;
1854 for (k = 1; k < group_size[i]; k++)
1855 if (! regmask[j + k])
1856 break;
1857 if (k == group_size[i])
1858 {
1859 /* We found a group. Mark it off against this class's
1860 need for groups, and against each superclass too. */
1861 register enum reg_class *p;
1862 max_groups[i]--;
1863 p = reg_class_superclasses[i];
1864 while (*p != LIM_REG_CLASSES)
1865 max_groups[(int) *p++]--;
a8fdc208 1866 /* Don't count these registers again. */
32131a9c
RK
1867 for (k = 0; k < group_size[i]; k++)
1868 SET_HARD_REG_BIT (counted_for_groups, j + k);
1869 }
fa52261e
RS
1870 /* Skip to the last reg in this group. When j is incremented
1871 above, it will then point to the first reg of the next
1872 possible group. */
1873 j += k - 1;
32131a9c
RK
1874 }
1875 }
1876
1877}
1878\f
1879/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
1880 another mode that needs to be reloaded for the same register class CLASS.
1881 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
1882 ALLOCATE_MODE will never be smaller than OTHER_MODE.
1883
1884 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
1885 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
1886 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
1887 causes unnecessary failures on machines requiring alignment of register
1888 groups when the two modes are different sizes, because the larger mode has
1889 more strict alignment rules than the smaller mode. */
1890
1891static int
1892modes_equiv_for_class_p (allocate_mode, other_mode, class)
1893 enum machine_mode allocate_mode, other_mode;
1894 enum reg_class class;
1895{
1896 register int regno;
1897 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1898 {
1899 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
1900 && HARD_REGNO_MODE_OK (regno, allocate_mode)
1901 && ! HARD_REGNO_MODE_OK (regno, other_mode))
1902 return 0;
1903 }
1904 return 1;
1905}
1906
1907/* Add a new register to the tables of available spill-registers
1908 (as well as spilling all pseudos allocated to the register).
1909 I is the index of this register in potential_reload_regs.
1910 CLASS is the regclass whose need is being satisfied.
1911 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
1912 so that this register can count off against them.
1913 MAX_NONGROUPS is 0 if this register is part of a group.
1914 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
1915
1916static int
1917new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
1918 int i;
1919 int class;
1920 int *max_needs;
1921 int *max_nongroups;
1922 int global;
1923 FILE *dumpfile;
1924{
1925 register enum reg_class *p;
1926 int val;
1927 int regno = potential_reload_regs[i];
1928
1929 if (i >= FIRST_PSEUDO_REGISTER)
1930 abort (); /* Caller failed to find any register. */
1931
1932 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
1933 fatal ("fixed or forbidden register was spilled.\n\
1934This may be due to a compiler bug or to impossible asm statements.");
1935
1936 /* Make reg REGNO an additional reload reg. */
1937
1938 potential_reload_regs[i] = -1;
1939 spill_regs[n_spills] = regno;
1940 spill_reg_order[regno] = n_spills;
1941 if (dumpfile)
1942 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
1943
1944 /* Clear off the needs we just satisfied. */
1945
1946 max_needs[class]--;
1947 p = reg_class_superclasses[class];
1948 while (*p != LIM_REG_CLASSES)
1949 max_needs[(int) *p++]--;
1950
1951 if (max_nongroups && max_nongroups[class] > 0)
1952 {
1953 SET_HARD_REG_BIT (counted_for_nongroups, regno);
1954 max_nongroups[class]--;
1955 p = reg_class_superclasses[class];
1956 while (*p != LIM_REG_CLASSES)
1957 max_nongroups[(int) *p++]--;
1958 }
1959
1960 /* Spill every pseudo reg that was allocated to this reg
1961 or to something that overlaps this reg. */
1962
1963 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
1964
1965 /* If there are some registers still to eliminate and this register
1966 wasn't ever used before, additional stack space may have to be
1967 allocated to store this register. Thus, we may have changed the offset
1968 between the stack and frame pointers, so mark that something has changed.
1969 (If new pseudos were spilled, thus requiring more space, VAL would have
1970 been set non-zero by the call to spill_hard_reg above since additional
1971 reloads may be needed in that case.
1972
1973 One might think that we need only set VAL to 1 if this is a call-used
1974 register. However, the set of registers that must be saved by the
1975 prologue is not identical to the call-used set. For example, the
1976 register used by the call insn for the return PC is a call-used register,
1977 but must be saved by the prologue. */
1978 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
1979 val = 1;
1980
1981 regs_ever_live[spill_regs[n_spills]] = 1;
1982 n_spills++;
1983
1984 return val;
1985}
1986\f
1987/* Delete an unneeded INSN and any previous insns who sole purpose is loading
1988 data that is dead in INSN. */
1989
1990static void
1991delete_dead_insn (insn)
1992 rtx insn;
1993{
1994 rtx prev = prev_real_insn (insn);
1995 rtx prev_dest;
1996
1997 /* If the previous insn sets a register that dies in our insn, delete it
1998 too. */
1999 if (prev && GET_CODE (PATTERN (prev)) == SET
2000 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2001 && reg_mentioned_p (prev_dest, PATTERN (insn))
2002 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2003 delete_dead_insn (prev);
2004
2005 PUT_CODE (insn, NOTE);
2006 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2007 NOTE_SOURCE_FILE (insn) = 0;
2008}
2009
2010/* Modify the home of pseudo-reg I.
2011 The new home is present in reg_renumber[I].
2012
2013 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2014 or it may be -1, meaning there is none or it is not relevant.
2015 This is used so that all pseudos spilled from a given hard reg
2016 can share one stack slot. */
2017
2018static void
2019alter_reg (i, from_reg)
2020 register int i;
2021 int from_reg;
2022{
2023 /* When outputting an inline function, this can happen
2024 for a reg that isn't actually used. */
2025 if (regno_reg_rtx[i] == 0)
2026 return;
2027
2028 /* If the reg got changed to a MEM at rtl-generation time,
2029 ignore it. */
2030 if (GET_CODE (regno_reg_rtx[i]) != REG)
2031 return;
2032
2033 /* Modify the reg-rtx to contain the new hard reg
2034 number or else to contain its pseudo reg number. */
2035 REGNO (regno_reg_rtx[i])
2036 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2037
2038 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2039 allocate a stack slot for it. */
2040
2041 if (reg_renumber[i] < 0
2042 && reg_n_refs[i] > 0
2043 && reg_equiv_constant[i] == 0
2044 && reg_equiv_memory_loc[i] == 0)
2045 {
2046 register rtx x;
2047 int inherent_size = PSEUDO_REGNO_BYTES (i);
2048 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2049 int adjust = 0;
2050
2051 /* Each pseudo reg has an inherent size which comes from its own mode,
2052 and a total size which provides room for paradoxical subregs
2053 which refer to the pseudo reg in wider modes.
2054
2055 We can use a slot already allocated if it provides both
2056 enough inherent space and enough total space.
2057 Otherwise, we allocate a new slot, making sure that it has no less
2058 inherent space, and no less total space, then the previous slot. */
2059 if (from_reg == -1)
2060 {
2061 /* No known place to spill from => no slot to reuse. */
2062 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2063#if BYTES_BIG_ENDIAN
2064 /* Cancel the big-endian correction done in assign_stack_local.
2065 Get the address of the beginning of the slot.
2066 This is so we can do a big-endian correction unconditionally
2067 below. */
2068 adjust = inherent_size - total_size;
2069#endif
2070 }
2071 /* Reuse a stack slot if possible. */
2072 else if (spill_stack_slot[from_reg] != 0
2073 && spill_stack_slot_width[from_reg] >= total_size
2074 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2075 >= inherent_size))
2076 x = spill_stack_slot[from_reg];
2077 /* Allocate a bigger slot. */
2078 else
2079 {
2080 /* Compute maximum size needed, both for inherent size
2081 and for total size. */
2082 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2083 if (spill_stack_slot[from_reg])
2084 {
2085 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2086 > inherent_size)
2087 mode = GET_MODE (spill_stack_slot[from_reg]);
2088 if (spill_stack_slot_width[from_reg] > total_size)
2089 total_size = spill_stack_slot_width[from_reg];
2090 }
2091 /* Make a slot with that size. */
2092 x = assign_stack_local (mode, total_size, -1);
2093#if BYTES_BIG_ENDIAN
2094 /* Cancel the big-endian correction done in assign_stack_local.
2095 Get the address of the beginning of the slot.
2096 This is so we can do a big-endian correction unconditionally
2097 below. */
2098 adjust = GET_MODE_SIZE (mode) - total_size;
2099#endif
2100 spill_stack_slot[from_reg] = x;
2101 spill_stack_slot_width[from_reg] = total_size;
2102 }
2103
2104#if BYTES_BIG_ENDIAN
2105 /* On a big endian machine, the "address" of the slot
2106 is the address of the low part that fits its inherent mode. */
2107 if (inherent_size < total_size)
2108 adjust += (total_size - inherent_size);
2109#endif /* BYTES_BIG_ENDIAN */
2110
2111 /* If we have any adjustment to make, or if the stack slot is the
2112 wrong mode, make a new stack slot. */
2113 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2114 {
2115 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2116 plus_constant (XEXP (x, 0), adjust));
2117 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2118 }
2119
2120 /* Save the stack slot for later. */
2121 reg_equiv_memory_loc[i] = x;
2122 }
2123}
2124
2125/* Mark the slots in regs_ever_live for the hard regs
2126 used by pseudo-reg number REGNO. */
2127
2128void
2129mark_home_live (regno)
2130 int regno;
2131{
2132 register int i, lim;
2133 i = reg_renumber[regno];
2134 if (i < 0)
2135 return;
2136 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2137 while (i < lim)
2138 regs_ever_live[i++] = 1;
2139}
2140\f
2141/* This function handles the tracking of elimination offsets around branches.
2142
2143 X is a piece of RTL being scanned.
2144
2145 INSN is the insn that it came from, if any.
2146
2147 INITIAL_P is non-zero if we are to set the offset to be the initial
2148 offset and zero if we are setting the offset of the label to be the
2149 current offset. */
2150
2151static void
2152set_label_offsets (x, insn, initial_p)
2153 rtx x;
2154 rtx insn;
2155 int initial_p;
2156{
2157 enum rtx_code code = GET_CODE (x);
2158 rtx tem;
2159 int i;
2160 struct elim_table *p;
2161
2162 switch (code)
2163 {
2164 case LABEL_REF:
2165 x = XEXP (x, 0);
2166
2167 /* ... fall through ... */
2168
2169 case CODE_LABEL:
2170 /* If we know nothing about this label, set the desired offsets. Note
2171 that this sets the offset at a label to be the offset before a label
2172 if we don't know anything about the label. This is not correct for
2173 the label after a BARRIER, but is the best guess we can make. If
2174 we guessed wrong, we will suppress an elimination that might have
2175 been possible had we been able to guess correctly. */
2176
2177 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2178 {
2179 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2180 offsets_at[CODE_LABEL_NUMBER (x)][i]
2181 = (initial_p ? reg_eliminate[i].initial_offset
2182 : reg_eliminate[i].offset);
2183 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2184 }
2185
2186 /* Otherwise, if this is the definition of a label and it is
d45cf215 2187 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2188 that label. */
2189
2190 else if (x == insn
2191 && (tem = prev_nonnote_insn (insn)) != 0
2192 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2193 {
2194 num_not_at_initial_offset = 0;
2195 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2196 {
2197 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2198 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2199 if (reg_eliminate[i].can_eliminate
2200 && (reg_eliminate[i].offset
2201 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2202 num_not_at_initial_offset++;
2203 }
2204 }
32131a9c
RK
2205
2206 else
2207 /* If neither of the above cases is true, compare each offset
2208 with those previously recorded and suppress any eliminations
2209 where the offsets disagree. */
a8fdc208 2210
32131a9c
RK
2211 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2212 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2213 != (initial_p ? reg_eliminate[i].initial_offset
2214 : reg_eliminate[i].offset))
2215 reg_eliminate[i].can_eliminate = 0;
2216
2217 return;
2218
2219 case JUMP_INSN:
2220 set_label_offsets (PATTERN (insn), insn, initial_p);
2221
2222 /* ... fall through ... */
2223
2224 case INSN:
2225 case CALL_INSN:
2226 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2227 and hence must have all eliminations at their initial offsets. */
2228 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2229 if (REG_NOTE_KIND (tem) == REG_LABEL)
2230 set_label_offsets (XEXP (tem, 0), insn, 1);
2231 return;
2232
2233 case ADDR_VEC:
2234 case ADDR_DIFF_VEC:
2235 /* Each of the labels in the address vector must be at their initial
2236 offsets. We want the first first for ADDR_VEC and the second
2237 field for ADDR_DIFF_VEC. */
2238
2239 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2240 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2241 insn, initial_p);
2242 return;
2243
2244 case SET:
2245 /* We only care about setting PC. If the source is not RETURN,
2246 IF_THEN_ELSE, or a label, disable any eliminations not at
2247 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2248 isn't one of those possibilities. For branches to a label,
2249 call ourselves recursively.
2250
2251 Note that this can disable elimination unnecessarily when we have
2252 a non-local goto since it will look like a non-constant jump to
2253 someplace in the current function. This isn't a significant
2254 problem since such jumps will normally be when all elimination
2255 pairs are back to their initial offsets. */
2256
2257 if (SET_DEST (x) != pc_rtx)
2258 return;
2259
2260 switch (GET_CODE (SET_SRC (x)))
2261 {
2262 case PC:
2263 case RETURN:
2264 return;
2265
2266 case LABEL_REF:
2267 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2268 return;
2269
2270 case IF_THEN_ELSE:
2271 tem = XEXP (SET_SRC (x), 1);
2272 if (GET_CODE (tem) == LABEL_REF)
2273 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2274 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2275 break;
2276
2277 tem = XEXP (SET_SRC (x), 2);
2278 if (GET_CODE (tem) == LABEL_REF)
2279 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2280 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2281 break;
2282 return;
2283 }
2284
2285 /* If we reach here, all eliminations must be at their initial
2286 offset because we are doing a jump to a variable address. */
2287 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2288 if (p->offset != p->initial_offset)
2289 p->can_eliminate = 0;
2290 }
2291}
2292\f
2293/* Used for communication between the next two function to properly share
2294 the vector for an ASM_OPERANDS. */
2295
2296static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2297
a8fdc208 2298/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2299 replacement (such as sp), plus an offset.
2300
2301 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2302 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2303 MEM, we are allowed to replace a sum of a register and the constant zero
2304 with the register, which we cannot do outside a MEM. In addition, we need
2305 to record the fact that a register is referenced outside a MEM.
2306
2307 If INSN is nonzero, it is the insn containing X. If we replace a REG
2308 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2309 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2310 that the REG is being modified.
2311
2312 If we see a modification to a register we know about, take the
2313 appropriate action (see case SET, below).
2314
2315 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2316 replacements done assuming all offsets are at their initial values. If
2317 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2318 encounter, return the actual location so that find_reloads will do
2319 the proper thing. */
2320
2321rtx
2322eliminate_regs (x, mem_mode, insn)
2323 rtx x;
2324 enum machine_mode mem_mode;
2325 rtx insn;
2326{
2327 enum rtx_code code = GET_CODE (x);
2328 struct elim_table *ep;
2329 int regno;
2330 rtx new;
2331 int i, j;
2332 char *fmt;
2333 int copied = 0;
2334
2335 switch (code)
2336 {
2337 case CONST_INT:
2338 case CONST_DOUBLE:
2339 case CONST:
2340 case SYMBOL_REF:
2341 case CODE_LABEL:
2342 case PC:
2343 case CC0:
2344 case ASM_INPUT:
2345 case ADDR_VEC:
2346 case ADDR_DIFF_VEC:
2347 case RETURN:
2348 return x;
2349
2350 case REG:
2351 regno = REGNO (x);
2352
2353 /* First handle the case where we encounter a bare register that
2354 is eliminable. Replace it with a PLUS. */
2355 if (regno < FIRST_PSEUDO_REGISTER)
2356 {
2357 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2358 ep++)
2359 if (ep->from_rtx == x && ep->can_eliminate)
2360 {
2361 if (! mem_mode)
2362 ep->ref_outside_mem = 1;
2363 return plus_constant (ep->to_rtx, ep->previous_offset);
2364 }
2365
2366 }
2367 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2368 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2369 {
2370 /* In this case, find_reloads would attempt to either use an
2371 incorrect address (if something is not at its initial offset)
2372 or substitute an replaced address into an insn (which loses
2373 if the offset is changed by some later action). So we simply
2374 return the replaced stack slot (assuming it is changed by
2375 elimination) and ignore the fact that this is actually a
2376 reference to the pseudo. Ensure we make a copy of the
2377 address in case it is shared. */
2378 new = eliminate_regs (reg_equiv_memory_loc[regno], mem_mode, 0);
2379 if (new != reg_equiv_memory_loc[regno])
2380 return copy_rtx (new);
2381 }
2382 return x;
2383
2384 case PLUS:
2385 /* If this is the sum of an eliminable register and a constant, rework
2386 the sum. */
2387 if (GET_CODE (XEXP (x, 0)) == REG
2388 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2389 && CONSTANT_P (XEXP (x, 1)))
2390 {
2391 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2392 ep++)
2393 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2394 {
2395 if (! mem_mode)
2396 ep->ref_outside_mem = 1;
2397
2398 /* The only time we want to replace a PLUS with a REG (this
2399 occurs when the constant operand of the PLUS is the negative
2400 of the offset) is when we are inside a MEM. We won't want
2401 to do so at other times because that would change the
2402 structure of the insn in a way that reload can't handle.
2403 We special-case the commonest situation in
2404 eliminate_regs_in_insn, so just replace a PLUS with a
2405 PLUS here, unless inside a MEM. */
2406 if (mem_mode && GET_CODE (XEXP (x, 1)) == CONST_INT
2407 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2408 return ep->to_rtx;
2409 else
2410 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2411 plus_constant (XEXP (x, 1),
2412 ep->previous_offset));
2413 }
2414
2415 /* If the register is not eliminable, we are done since the other
2416 operand is a constant. */
2417 return x;
2418 }
2419
2420 /* If this is part of an address, we want to bring any constant to the
2421 outermost PLUS. We will do this by doing register replacement in
2422 our operands and seeing if a constant shows up in one of them.
2423
2424 We assume here this is part of an address (or a "load address" insn)
2425 since an eliminable register is not likely to appear in any other
2426 context.
2427
2428 If we have (plus (eliminable) (reg)), we want to produce
2429 (plus (plus (replacement) (reg) (const))). If this was part of a
2430 normal add insn, (plus (replacement) (reg)) will be pushed as a
2431 reload. This is the desired action. */
2432
2433 {
2434 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2435 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, 0);
2436
2437 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2438 {
2439 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2440 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2441 we must replace the constant here since it may no longer
2442 be in the position of any operand. */
2443 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2444 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2445 && reg_renumber[REGNO (new1)] < 0
2446 && reg_equiv_constant != 0
2447 && reg_equiv_constant[REGNO (new1)] != 0)
2448 new1 = reg_equiv_constant[REGNO (new1)];
2449 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2450 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2451 && reg_renumber[REGNO (new0)] < 0
2452 && reg_equiv_constant[REGNO (new0)] != 0)
2453 new0 = reg_equiv_constant[REGNO (new0)];
2454
2455 new = form_sum (new0, new1);
2456
2457 /* As above, if we are not inside a MEM we do not want to
2458 turn a PLUS into something else. We might try to do so here
2459 for an addition of 0 if we aren't optimizing. */
2460 if (! mem_mode && GET_CODE (new) != PLUS)
2461 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2462 else
2463 return new;
2464 }
2465 }
2466 return x;
2467
2468 case EXPR_LIST:
2469 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2470 if (XEXP (x, 0))
2471 {
2472 new = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2473 if (new != XEXP (x, 0))
2474 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2475 }
2476
2477 /* ... fall through ... */
2478
2479 case INSN_LIST:
2480 /* Now do eliminations in the rest of the chain. If this was
2481 an EXPR_LIST, this might result in allocating more memory than is
2482 strictly needed, but it simplifies the code. */
2483 if (XEXP (x, 1))
2484 {
2485 new = eliminate_regs (XEXP (x, 1), mem_mode, 0);
2486 if (new != XEXP (x, 1))
2487 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2488 }
2489 return x;
2490
2491 case CALL:
2492 case COMPARE:
2493 case MINUS:
2494 case MULT:
2495 case DIV: case UDIV:
2496 case MOD: case UMOD:
2497 case AND: case IOR: case XOR:
2498 case LSHIFT: case ASHIFT: case ROTATE:
2499 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2500 case NE: case EQ:
2501 case GE: case GT: case GEU: case GTU:
2502 case LE: case LT: case LEU: case LTU:
2503 {
2504 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2505 rtx new1 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, 0) : 0;
2506
2507 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2508 return gen_rtx (code, GET_MODE (x), new0, new1);
2509 }
2510 return x;
2511
2512 case PRE_INC:
2513 case POST_INC:
2514 case PRE_DEC:
2515 case POST_DEC:
2516 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2517 if (ep->to_rtx == XEXP (x, 0))
2518 {
2519 if (code == PRE_DEC || code == POST_DEC)
2520 ep->offset += GET_MODE_SIZE (mem_mode);
2521 else
2522 ep->offset -= GET_MODE_SIZE (mem_mode);
2523 }
2524
2525 /* Fall through to generic unary operation case. */
2526 case USE:
2527 case STRICT_LOW_PART:
2528 case NEG: case NOT:
2529 case SIGN_EXTEND: case ZERO_EXTEND:
2530 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2531 case FLOAT: case FIX:
2532 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2533 case ABS:
2534 case SQRT:
2535 case FFS:
2536 new = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2537 if (new != XEXP (x, 0))
2538 return gen_rtx (code, GET_MODE (x), new);
2539 return x;
2540
2541 case SUBREG:
2542 /* Similar to above processing, but preserve SUBREG_WORD.
2543 Convert (subreg (mem)) to (mem) if not paradoxical.
2544 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2545 pseudo didn't get a hard reg, we must replace this with the
2546 eliminated version of the memory location because push_reloads
2547 may do the replacement in certain circumstances. */
2548 if (GET_CODE (SUBREG_REG (x)) == REG
2549 && (GET_MODE_SIZE (GET_MODE (x))
2550 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2551 && reg_equiv_memory_loc != 0
2552 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2553 {
2554 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2555 mem_mode, 0);
2556
2557 /* If we didn't change anything, we must retain the pseudo. */
2558 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2559 new = XEXP (x, 0);
2560 else
2561 /* Otherwise, ensure NEW isn't shared in case we have to reload
2562 it. */
2563 new = copy_rtx (new);
2564 }
2565 else
2566 new = eliminate_regs (SUBREG_REG (x), mem_mode, 0);
2567
2568 if (new != XEXP (x, 0))
2569 {
2570 if (GET_CODE (new) == MEM
2571 && (GET_MODE_SIZE (GET_MODE (x))
2572 <= GET_MODE_SIZE (GET_MODE (new))))
2573 {
2574 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2575 enum machine_mode mode = GET_MODE (x);
2576
2577#if BYTES_BIG_ENDIAN
2578 offset += (MIN (UNITS_PER_WORD,
2579 GET_MODE_SIZE (GET_MODE (new)))
2580 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2581#endif
2582
2583 PUT_MODE (new, mode);
2584 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2585 return new;
2586 }
2587 else
2588 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2589 }
2590
2591 return x;
2592
2593 case CLOBBER:
2594 /* If clobbering a register that is the replacement register for an
d45cf215 2595 elimination we still think can be performed, note that it cannot
32131a9c
RK
2596 be performed. Otherwise, we need not be concerned about it. */
2597 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2598 if (ep->to_rtx == XEXP (x, 0))
2599 ep->can_eliminate = 0;
2600
2601 return x;
2602
2603 case ASM_OPERANDS:
2604 {
2605 rtx *temp_vec;
2606 /* Properly handle sharing input and constraint vectors. */
2607 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2608 {
2609 /* When we come to a new vector not seen before,
2610 scan all its elements; keep the old vector if none
2611 of them changes; otherwise, make a copy. */
2612 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2613 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2614 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2615 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2616 mem_mode, 0);
2617
2618 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2619 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2620 break;
2621
2622 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2623 new_asm_operands_vec = old_asm_operands_vec;
2624 else
2625 new_asm_operands_vec
2626 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2627 }
2628
2629 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2630 if (new_asm_operands_vec == old_asm_operands_vec)
2631 return x;
2632
2633 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2634 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2635 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2636 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2637 ASM_OPERANDS_SOURCE_FILE (x),
2638 ASM_OPERANDS_SOURCE_LINE (x));
2639 new->volatil = x->volatil;
2640 return new;
2641 }
2642
2643 case SET:
2644 /* Check for setting a register that we know about. */
2645 if (GET_CODE (SET_DEST (x)) == REG)
2646 {
2647 /* See if this is setting the replacement register for an
a8fdc208 2648 elimination.
32131a9c
RK
2649
2650 If DEST is the frame pointer, we do nothing because we assume that
2651 all assignments to the frame pointer are for non-local gotos and
2652 are being done at a time when they are valid and do not disturb
2653 anything else. Some machines want to eliminate a fake argument
2654 pointer with either the frame or stack pointer. Assignments to
2655 the frame pointer must not prevent this elimination. */
2656
2657 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2658 ep++)
2659 if (ep->to_rtx == SET_DEST (x)
2660 && SET_DEST (x) != frame_pointer_rtx)
2661 {
6dc42e49 2662 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2663 this elimination can't be done. */
2664 rtx src = SET_SRC (x);
2665
2666 if (GET_CODE (src) == PLUS
2667 && XEXP (src, 0) == SET_DEST (x)
2668 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2669 ep->offset -= INTVAL (XEXP (src, 1));
2670 else
2671 ep->can_eliminate = 0;
2672 }
2673
2674 /* Now check to see we are assigning to a register that can be
2675 eliminated. If so, it must be as part of a PARALLEL, since we
2676 will not have been called if this is a single SET. So indicate
2677 that we can no longer eliminate this reg. */
2678 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2679 ep++)
2680 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2681 ep->can_eliminate = 0;
2682 }
2683
2684 /* Now avoid the loop below in this common case. */
2685 {
2686 rtx new0 = eliminate_regs (SET_DEST (x), 0, 0);
2687 rtx new1 = eliminate_regs (SET_SRC (x), 0, 0);
2688
2689 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2690 write a CLOBBER insn. */
2691 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
2692 && insn != 0)
2693 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
2694
2695 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
2696 return gen_rtx (SET, VOIDmode, new0, new1);
2697 }
2698
2699 return x;
2700
2701 case MEM:
2702 /* Our only special processing is to pass the mode of the MEM to our
2703 recursive call and copy the flags. While we are here, handle this
2704 case more efficiently. */
2705 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), 0);
2706 if (new != XEXP (x, 0))
2707 {
2708 new = gen_rtx (MEM, GET_MODE (x), new);
2709 new->volatil = x->volatil;
2710 new->unchanging = x->unchanging;
2711 new->in_struct = x->in_struct;
2712 return new;
2713 }
2714 else
2715 return x;
2716 }
2717
2718 /* Process each of our operands recursively. If any have changed, make a
2719 copy of the rtx. */
2720 fmt = GET_RTX_FORMAT (code);
2721 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2722 {
2723 if (*fmt == 'e')
2724 {
2725 new = eliminate_regs (XEXP (x, i), mem_mode, 0);
2726 if (new != XEXP (x, i) && ! copied)
2727 {
2728 rtx new_x = rtx_alloc (code);
2729 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2730 + (sizeof (new_x->fld[0])
2731 * GET_RTX_LENGTH (code))));
2732 x = new_x;
2733 copied = 1;
2734 }
2735 XEXP (x, i) = new;
2736 }
2737 else if (*fmt == 'E')
2738 {
2739 int copied_vec = 0;
2740 for (j = 0; j < XVECLEN (x, i); j++)
2741 {
2742 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2743 if (new != XVECEXP (x, i, j) && ! copied_vec)
2744 {
2745 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2746 &XVECEXP (x, i, 0));
2747 if (! copied)
2748 {
2749 rtx new_x = rtx_alloc (code);
2750 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2751 + (sizeof (new_x->fld[0])
2752 * GET_RTX_LENGTH (code))));
2753 x = new_x;
2754 copied = 1;
2755 }
2756 XVEC (x, i) = new_v;
2757 copied_vec = 1;
2758 }
2759 XVECEXP (x, i, j) = new;
2760 }
2761 }
2762 }
2763
2764 return x;
2765}
2766\f
2767/* Scan INSN and eliminate all eliminable registers in it.
2768
2769 If REPLACE is nonzero, do the replacement destructively. Also
2770 delete the insn as dead it if it is setting an eliminable register.
2771
2772 If REPLACE is zero, do all our allocations in reload_obstack.
2773
2774 If no eliminations were done and this insn doesn't require any elimination
2775 processing (these are not identical conditions: it might be updating sp,
2776 but not referencing fp; this needs to be seen during reload_as_needed so
2777 that the offset between fp and sp can be taken into consideration), zero
2778 is returned. Otherwise, 1 is returned. */
2779
2780static int
2781eliminate_regs_in_insn (insn, replace)
2782 rtx insn;
2783 int replace;
2784{
2785 rtx old_body = PATTERN (insn);
2786 rtx new_body;
2787 int val = 0;
2788 struct elim_table *ep;
2789
2790 if (! replace)
2791 push_obstacks (&reload_obstack, &reload_obstack);
2792
2793 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
2794 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
2795 {
2796 /* Check for setting an eliminable register. */
2797 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2798 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
2799 {
2800 /* In this case this insn isn't serving a useful purpose. We
2801 will delete it in reload_as_needed once we know that this
2802 elimination is, in fact, being done.
2803
2804 If REPLACE isn't set, we can't delete this insn, but neededn't
2805 process it since it won't be used unless something changes. */
2806 if (replace)
2807 delete_dead_insn (insn);
2808 val = 1;
2809 goto done;
2810 }
2811
2812 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
2813 in the insn is the negative of the offset in FROM. Substitute
2814 (set (reg) (reg to)) for the insn and change its code.
2815
2816 We have to do this here, rather than in eliminate_regs, do that we can
2817 change the insn code. */
2818
2819 if (GET_CODE (SET_SRC (old_body)) == PLUS
2820 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
2821 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
2822 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2823 ep++)
2824 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
2825 && ep->can_eliminate
2826 && ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
2827 {
2828 PATTERN (insn) = gen_rtx (SET, VOIDmode,
2829 SET_DEST (old_body), ep->to_rtx);
2830 INSN_CODE (insn) = -1;
2831 val = 1;
2832 goto done;
2833 }
2834 }
2835
2836 old_asm_operands_vec = 0;
2837
2838 /* Replace the body of this insn with a substituted form. If we changed
2839 something, return non-zero. If this is the final call for this
2840 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
2841
2842 If we are replacing a body that was a (set X (plus Y Z)), try to
2843 re-recognize the insn. We do this in case we had a simple addition
2844 but now can do this as a load-address. This saves an insn in this
2845 common case. */
2846
2847 new_body = eliminate_regs (old_body, 0, replace ? insn : 0);
2848 if (new_body != old_body)
2849 {
2850 if (GET_CODE (old_body) != SET || GET_CODE (SET_SRC (old_body)) != PLUS
2851 || ! validate_change (insn, &PATTERN (insn), new_body, 0))
2852 PATTERN (insn) = new_body;
2853
2854 if (replace && REG_NOTES (insn))
2855 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, 0);
2856 val = 1;
2857 }
a8fdc208 2858
32131a9c
RK
2859 /* Loop through all elimination pairs. See if any have changed and
2860 recalculate the number not at initial offset.
2861
a8efe40d
RK
2862 Compute the maximum offset (minimum offset if the stack does not
2863 grow downward) for each elimination pair.
2864
32131a9c
RK
2865 We also detect a cases where register elimination cannot be done,
2866 namely, if a register would be both changed and referenced outside a MEM
2867 in the resulting insn since such an insn is often undefined and, even if
2868 not, we cannot know what meaning will be given to it. Note that it is
2869 valid to have a register used in an address in an insn that changes it
2870 (presumably with a pre- or post-increment or decrement).
2871
2872 If anything changes, return nonzero. */
2873
2874 num_not_at_initial_offset = 0;
2875 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2876 {
2877 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
2878 ep->can_eliminate = 0;
2879
2880 ep->ref_outside_mem = 0;
2881
2882 if (ep->previous_offset != ep->offset)
2883 val = 1;
2884
2885 ep->previous_offset = ep->offset;
2886 if (ep->can_eliminate && ep->offset != ep->initial_offset)
2887 num_not_at_initial_offset++;
a8efe40d
RK
2888
2889#ifdef STACK_GROWS_DOWNWARD
2890 ep->max_offset = MAX (ep->max_offset, ep->offset);
2891#else
2892 ep->max_offset = MIN (ep->max_offset, ep->offset);
2893#endif
32131a9c
RK
2894 }
2895
2896 done:
2897 if (! replace)
2898 pop_obstacks ();
2899
2900 return val;
2901}
2902
2903/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
2904 replacement we currently believe is valid, mark it as not eliminable if X
2905 modifies DEST in any way other than by adding a constant integer to it.
2906
2907 If DEST is the frame pointer, we do nothing because we assume that
2908 all assignments to the frame pointer are nonlocal gotos and are being done
2909 at a time when they are valid and do not disturb anything else.
2910 Some machines want to eliminate a fake argument pointer with either the
2911 frame or stack pointer. Assignments to the frame pointer must not prevent
2912 this elimination.
2913
2914 Called via note_stores from reload before starting its passes to scan
2915 the insns of the function. */
2916
2917static void
2918mark_not_eliminable (dest, x)
2919 rtx dest;
2920 rtx x;
2921{
2922 register int i;
2923
2924 /* A SUBREG of a hard register here is just changing its mode. We should
2925 not see a SUBREG of an eliminable hard register, but check just in
2926 case. */
2927 if (GET_CODE (dest) == SUBREG)
2928 dest = SUBREG_REG (dest);
2929
2930 if (dest == frame_pointer_rtx)
2931 return;
2932
2933 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2934 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
2935 && (GET_CODE (x) != SET
2936 || GET_CODE (SET_SRC (x)) != PLUS
2937 || XEXP (SET_SRC (x), 0) != dest
2938 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
2939 {
2940 reg_eliminate[i].can_eliminate_previous
2941 = reg_eliminate[i].can_eliminate = 0;
2942 num_eliminable--;
2943 }
2944}
2945\f
2946/* Kick all pseudos out of hard register REGNO.
2947 If GLOBAL is nonzero, try to find someplace else to put them.
2948 If DUMPFILE is nonzero, log actions taken on that file.
2949
2950 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
2951 because we found we can't eliminate some register. In the case, no pseudos
2952 are allowed to be in the register, even if they are only in a block that
2953 doesn't require spill registers, unlike the case when we are spilling this
2954 hard reg to produce another spill register.
2955
2956 Return nonzero if any pseudos needed to be kicked out. */
2957
2958static int
2959spill_hard_reg (regno, global, dumpfile, cant_eliminate)
2960 register int regno;
2961 int global;
2962 FILE *dumpfile;
2963 int cant_eliminate;
2964{
2965 int something_changed = 0;
2966 register int i;
2967
2968 SET_HARD_REG_BIT (forbidden_regs, regno);
2969
2970 /* Spill every pseudo reg that was allocated to this reg
2971 or to something that overlaps this reg. */
2972
2973 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2974 if (reg_renumber[i] >= 0
2975 && reg_renumber[i] <= regno
a8fdc208 2976 && (reg_renumber[i]
32131a9c
RK
2977 + HARD_REGNO_NREGS (reg_renumber[i],
2978 PSEUDO_REGNO_MODE (i))
2979 > regno))
2980 {
2981 enum reg_class class = REGNO_REG_CLASS (regno);
2982
2983 /* If this register belongs solely to a basic block which needed no
2984 spilling of any class that this register is contained in,
2985 leave it be, unless we are spilling this register because
2986 it was a hard register that can't be eliminated. */
2987
2988 if (! cant_eliminate
2989 && basic_block_needs[0]
2990 && reg_basic_block[i] >= 0
2991 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
2992 {
2993 enum reg_class *p;
2994
2995 for (p = reg_class_superclasses[(int) class];
2996 *p != LIM_REG_CLASSES; p++)
2997 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
2998 break;
a8fdc208 2999
32131a9c
RK
3000 if (*p == LIM_REG_CLASSES)
3001 continue;
3002 }
3003
3004 /* Mark it as no longer having a hard register home. */
3005 reg_renumber[i] = -1;
3006 /* We will need to scan everything again. */
3007 something_changed = 1;
3008 if (global)
3009 retry_global_alloc (i, forbidden_regs);
3010
3011 alter_reg (i, regno);
3012 if (dumpfile)
3013 {
3014 if (reg_renumber[i] == -1)
3015 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3016 else
3017 fprintf (dumpfile, " Register %d now in %d.\n\n",
3018 i, reg_renumber[i]);
3019 }
3020 }
3021
3022 return something_changed;
3023}
3024\f
3025/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3026
3027static void
3028scan_paradoxical_subregs (x)
3029 register rtx x;
3030{
3031 register int i;
3032 register char *fmt;
3033 register enum rtx_code code = GET_CODE (x);
3034
3035 switch (code)
3036 {
3037 case CONST_INT:
3038 case CONST:
3039 case SYMBOL_REF:
3040 case LABEL_REF:
3041 case CONST_DOUBLE:
3042 case CC0:
3043 case PC:
3044 case REG:
3045 case USE:
3046 case CLOBBER:
3047 return;
3048
3049 case SUBREG:
3050 if (GET_CODE (SUBREG_REG (x)) == REG
3051 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3052 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3053 = GET_MODE_SIZE (GET_MODE (x));
3054 return;
3055 }
3056
3057 fmt = GET_RTX_FORMAT (code);
3058 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3059 {
3060 if (fmt[i] == 'e')
3061 scan_paradoxical_subregs (XEXP (x, i));
3062 else if (fmt[i] == 'E')
3063 {
3064 register int j;
3065 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3066 scan_paradoxical_subregs (XVECEXP (x, i, j));
3067 }
3068 }
3069}
3070\f
3071struct hard_reg_n_uses { int regno; int uses; };
3072
3073static int
3074hard_reg_use_compare (p1, p2)
3075 struct hard_reg_n_uses *p1, *p2;
3076{
3077 int tem = p1->uses - p2->uses;
3078 if (tem != 0) return tem;
3079 /* If regs are equally good, sort by regno,
3080 so that the results of qsort leave nothing to chance. */
3081 return p1->regno - p2->regno;
3082}
3083
3084/* Choose the order to consider regs for use as reload registers
3085 based on how much trouble would be caused by spilling one.
3086 Store them in order of decreasing preference in potential_reload_regs. */
3087
3088static void
3089order_regs_for_reload ()
3090{
3091 register int i;
3092 register int o = 0;
3093 int large = 0;
3094
3095 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3096
3097 CLEAR_HARD_REG_SET (bad_spill_regs);
3098
3099 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3100 potential_reload_regs[i] = -1;
3101
3102 /* Count number of uses of each hard reg by pseudo regs allocated to it
3103 and then order them by decreasing use. */
3104
3105 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3106 {
3107 hard_reg_n_uses[i].uses = 0;
3108 hard_reg_n_uses[i].regno = i;
3109 }
3110
3111 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3112 {
3113 int regno = reg_renumber[i];
3114 if (regno >= 0)
3115 {
3116 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3117 while (regno < lim)
3118 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3119 }
3120 large += reg_n_refs[i];
3121 }
3122
3123 /* Now fixed registers (which cannot safely be used for reloading)
3124 get a very high use count so they will be considered least desirable.
3125 Registers used explicitly in the rtl code are almost as bad. */
3126
3127 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3128 {
3129 if (fixed_regs[i])
3130 {
3131 hard_reg_n_uses[i].uses += 2 * large + 2;
3132 SET_HARD_REG_BIT (bad_spill_regs, i);
3133 }
3134 else if (regs_explicitly_used[i])
3135 {
3136 hard_reg_n_uses[i].uses += large + 1;
3137 /* ??? We are doing this here because of the potential that
3138 bad code may be generated if a register explicitly used in
3139 an insn was used as a spill register for that insn. But
3140 not using these are spill registers may lose on some machine.
3141 We'll have to see how this works out. */
3142 SET_HARD_REG_BIT (bad_spill_regs, i);
3143 }
3144 }
3145 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3146 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3147
3148#ifdef ELIMINABLE_REGS
3149 /* If registers other than the frame pointer are eliminable, mark them as
3150 poor choices. */
3151 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3152 {
3153 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3154 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3155 }
3156#endif
3157
3158 /* Prefer registers not so far used, for use in temporary loading.
3159 Among them, if REG_ALLOC_ORDER is defined, use that order.
3160 Otherwise, prefer registers not preserved by calls. */
3161
3162#ifdef REG_ALLOC_ORDER
3163 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3164 {
3165 int regno = reg_alloc_order[i];
3166
3167 if (hard_reg_n_uses[regno].uses == 0)
3168 potential_reload_regs[o++] = regno;
3169 }
3170#else
3171 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3172 {
3173 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3174 potential_reload_regs[o++] = i;
3175 }
3176 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3177 {
3178 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3179 potential_reload_regs[o++] = i;
3180 }
3181#endif
3182
3183 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3184 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3185
3186 /* Now add the regs that are already used,
3187 preferring those used less often. The fixed and otherwise forbidden
3188 registers will be at the end of this list. */
3189
3190 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3191 if (hard_reg_n_uses[i].uses != 0)
3192 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3193}
3194\f
3195/* Reload pseudo-registers into hard regs around each insn as needed.
3196 Additional register load insns are output before the insn that needs it
3197 and perhaps store insns after insns that modify the reloaded pseudo reg.
3198
3199 reg_last_reload_reg and reg_reloaded_contents keep track of
3200 which pseudo-registers are already available in reload registers.
3201 We update these for the reloads that we perform,
3202 as the insns are scanned. */
3203
3204static void
3205reload_as_needed (first, live_known)
3206 rtx first;
3207 int live_known;
3208{
3209 register rtx insn;
3210 register int i;
3211 int this_block = 0;
3212 rtx x;
3213 rtx after_call = 0;
3214
3215 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3216 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3217 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3218 reg_has_output_reload = (char *) alloca (max_regno);
3219 for (i = 0; i < n_spills; i++)
3220 {
3221 reg_reloaded_contents[i] = -1;
3222 reg_reloaded_insn[i] = 0;
3223 }
3224
3225 /* Reset all offsets on eliminable registers to their initial values. */
3226#ifdef ELIMINABLE_REGS
3227 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3228 {
3229 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3230 reg_eliminate[i].initial_offset)
3231 reg_eliminate[i].previous_offset
3232 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3233 }
3234#else
3235 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3236 reg_eliminate[0].previous_offset
3237 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3238#endif
3239
3240 num_not_at_initial_offset = 0;
3241
3242 for (insn = first; insn;)
3243 {
3244 register rtx next = NEXT_INSN (insn);
3245
3246 /* Notice when we move to a new basic block. */
aa2c50d6 3247 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3248 && insn == basic_block_head[this_block+1])
3249 ++this_block;
3250
3251 /* If we pass a label, copy the offsets from the label information
3252 into the current offsets of each elimination. */
3253 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3254 {
3255 num_not_at_initial_offset = 0;
3256 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3257 {
3258 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3259 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3260 if (reg_eliminate[i].can_eliminate
3261 && (reg_eliminate[i].offset
3262 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3263 num_not_at_initial_offset++;
3264 }
3265 }
32131a9c
RK
3266
3267 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3268 {
3269 rtx avoid_return_reg = 0;
3270
3271#ifdef SMALL_REGISTER_CLASSES
3272 /* Set avoid_return_reg if this is an insn
3273 that might use the value of a function call. */
3274 if (GET_CODE (insn) == CALL_INSN)
3275 {
3276 if (GET_CODE (PATTERN (insn)) == SET)
3277 after_call = SET_DEST (PATTERN (insn));
3278 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3279 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3280 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3281 else
3282 after_call = 0;
3283 }
3284 else if (after_call != 0
3285 && !(GET_CODE (PATTERN (insn)) == SET
3286 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3287 {
3288 if (reg_mentioned_p (after_call, PATTERN (insn)))
3289 avoid_return_reg = after_call;
3290 after_call = 0;
3291 }
3292#endif /* SMALL_REGISTER_CLASSES */
3293
2758481d
RS
3294 /* If this is a USE and CLOBBER of a MEM, ensure that any
3295 references to eliminable registers have been removed. */
3296
3297 if ((GET_CODE (PATTERN (insn)) == USE
3298 || GET_CODE (PATTERN (insn)) == CLOBBER)
3299 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3300 XEXP (XEXP (PATTERN (insn), 0), 0)
3301 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3302 GET_MODE (XEXP (PATTERN (insn), 0)), 0);
3303
32131a9c
RK
3304 /* If we need to do register elimination processing, do so.
3305 This might delete the insn, in which case we are done. */
3306 if (num_eliminable && GET_MODE (insn) == QImode)
3307 {
3308 eliminate_regs_in_insn (insn, 1);
3309 if (GET_CODE (insn) == NOTE)
3310 {
3311 insn = next;
3312 continue;
3313 }
3314 }
3315
3316 if (GET_MODE (insn) == VOIDmode)
3317 n_reloads = 0;
3318 /* First find the pseudo regs that must be reloaded for this insn.
3319 This info is returned in the tables reload_... (see reload.h).
3320 Also modify the body of INSN by substituting RELOAD
3321 rtx's for those pseudo regs. */
3322 else
3323 {
3324 bzero (reg_has_output_reload, max_regno);
3325 CLEAR_HARD_REG_SET (reg_is_output_reload);
3326
3327 find_reloads (insn, 1, spill_indirect_levels, live_known,
3328 spill_reg_order);
3329 }
3330
3331 if (n_reloads > 0)
3332 {
3c3eeea6
RK
3333 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3334 rtx p;
32131a9c
RK
3335 int class;
3336
3337 /* If this block has not had spilling done for a
a8fdc208 3338 particular class, deactivate any optional reloads
32131a9c
RK
3339 of that class lest they try to use a spill-reg which isn't
3340 available here. If we have any non-optionals that need a
3341 spill reg, abort. */
3342
3343 for (class = 0; class < N_REG_CLASSES; class++)
3344 if (basic_block_needs[class] != 0
3345 && basic_block_needs[class][this_block] == 0)
3346 for (i = 0; i < n_reloads; i++)
3347 if (class == (int) reload_reg_class[i])
3348 {
3349 if (reload_optional[i])
b07ef7b9
RK
3350 {
3351 reload_in[i] = reload_out[i] = 0;
3352 reload_secondary_p[i] = 0;
3353 }
3354 else if (reload_reg_rtx[i] == 0
3355 && (reload_in[i] != 0 || reload_out[i] != 0
3356 || reload_secondary_p[i] != 0))
32131a9c
RK
3357 abort ();
3358 }
3359
3360 /* Now compute which reload regs to reload them into. Perhaps
3361 reusing reload regs from previous insns, or else output
3362 load insns to reload them. Maybe output store insns too.
3363 Record the choices of reload reg in reload_reg_rtx. */
3364 choose_reload_regs (insn, avoid_return_reg);
3365
3366 /* Generate the insns to reload operands into or out of
3367 their reload regs. */
3368 emit_reload_insns (insn);
3369
3370 /* Substitute the chosen reload regs from reload_reg_rtx
3371 into the insn's body (or perhaps into the bodies of other
3372 load and store insn that we just made for reloading
3373 and that we moved the structure into). */
3374 subst_reloads ();
3c3eeea6
RK
3375
3376 /* If this was an ASM, make sure that all the reload insns
3377 we have generated are valid. If not, give an error
3378 and delete them. */
3379
3380 if (asm_noperands (PATTERN (insn)) >= 0)
3381 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3382 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3383 && (recog_memoized (p) < 0
3384 || (insn_extract (p),
3385 ! constrain_operands (INSN_CODE (p), 1))))
3386 {
3387 error_for_asm (insn,
3388 "`asm' operand requires impossible reload");
3389 PUT_CODE (p, NOTE);
3390 NOTE_SOURCE_FILE (p) = 0;
3391 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3392 }
32131a9c
RK
3393 }
3394 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3395 is no longer validly lying around to save a future reload.
3396 Note that this does not detect pseudos that were reloaded
3397 for this insn in order to be stored in
3398 (obeying register constraints). That is correct; such reload
3399 registers ARE still valid. */
3400 note_stores (PATTERN (insn), forget_old_reloads_1);
3401
3402 /* There may have been CLOBBER insns placed after INSN. So scan
3403 between INSN and NEXT and use them to forget old reloads. */
3404 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3405 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3406 note_stores (PATTERN (x), forget_old_reloads_1);
3407
3408#ifdef AUTO_INC_DEC
3409 /* Likewise for regs altered by auto-increment in this insn.
3410 But note that the reg-notes are not changed by reloading:
3411 they still contain the pseudo-regs, not the spill regs. */
3412 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3413 if (REG_NOTE_KIND (x) == REG_INC)
3414 {
3415 /* See if this pseudo reg was reloaded in this insn.
3416 If so, its last-reload info is still valid
3417 because it is based on this insn's reload. */
3418 for (i = 0; i < n_reloads; i++)
3419 if (reload_out[i] == XEXP (x, 0))
3420 break;
3421
3422 if (i != n_reloads)
3423 forget_old_reloads_1 (XEXP (x, 0));
3424 }
3425#endif
3426 }
3427 /* A reload reg's contents are unknown after a label. */
3428 if (GET_CODE (insn) == CODE_LABEL)
3429 for (i = 0; i < n_spills; i++)
3430 {
3431 reg_reloaded_contents[i] = -1;
3432 reg_reloaded_insn[i] = 0;
3433 }
3434
3435 /* Don't assume a reload reg is still good after a call insn
3436 if it is a call-used reg. */
3437 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == CALL_INSN)
3438 for (i = 0; i < n_spills; i++)
3439 if (call_used_regs[spill_regs[i]])
3440 {
3441 reg_reloaded_contents[i] = -1;
3442 reg_reloaded_insn[i] = 0;
3443 }
3444
3445 /* In case registers overlap, allow certain insns to invalidate
3446 particular hard registers. */
3447
3448#ifdef INSN_CLOBBERS_REGNO_P
3449 for (i = 0 ; i < n_spills ; i++)
3450 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3451 {
3452 reg_reloaded_contents[i] = -1;
3453 reg_reloaded_insn[i] = 0;
3454 }
3455#endif
3456
3457 insn = next;
3458
3459#ifdef USE_C_ALLOCA
3460 alloca (0);
3461#endif
3462 }
3463}
3464
3465/* Discard all record of any value reloaded from X,
3466 or reloaded in X from someplace else;
3467 unless X is an output reload reg of the current insn.
3468
3469 X may be a hard reg (the reload reg)
3470 or it may be a pseudo reg that was reloaded from. */
3471
3472static void
3473forget_old_reloads_1 (x)
3474 rtx x;
3475{
3476 register int regno;
3477 int nr;
3478
3479 if (GET_CODE (x) != REG)
3480 return;
3481
3482 regno = REGNO (x);
3483
3484 if (regno >= FIRST_PSEUDO_REGISTER)
3485 nr = 1;
3486 else
3487 {
3488 int i;
3489 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3490 /* Storing into a spilled-reg invalidates its contents.
3491 This can happen if a block-local pseudo is allocated to that reg
3492 and it wasn't spilled because this block's total need is 0.
3493 Then some insn might have an optional reload and use this reg. */
3494 for (i = 0; i < nr; i++)
3495 if (spill_reg_order[regno + i] >= 0
3496 /* But don't do this if the reg actually serves as an output
3497 reload reg in the current instruction. */
3498 && (n_reloads == 0
3499 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3500 {
3501 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3502 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3503 }
3504 }
3505
3506 /* Since value of X has changed,
3507 forget any value previously copied from it. */
3508
3509 while (nr-- > 0)
3510 /* But don't forget a copy if this is the output reload
3511 that establishes the copy's validity. */
3512 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3513 reg_last_reload_reg[regno + nr] = 0;
3514}
3515\f
3516/* For each reload, the mode of the reload register. */
3517static enum machine_mode reload_mode[MAX_RELOADS];
3518
3519/* For each reload, the largest number of registers it will require. */
3520static int reload_nregs[MAX_RELOADS];
3521
3522/* Comparison function for qsort to decide which of two reloads
3523 should be handled first. *P1 and *P2 are the reload numbers. */
3524
3525static int
3526reload_reg_class_lower (p1, p2)
3527 short *p1, *p2;
3528{
3529 register int r1 = *p1, r2 = *p2;
3530 register int t;
a8fdc208 3531
32131a9c
RK
3532 /* Consider required reloads before optional ones. */
3533 t = reload_optional[r1] - reload_optional[r2];
3534 if (t != 0)
3535 return t;
3536
3537 /* Count all solitary classes before non-solitary ones. */
3538 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3539 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3540 if (t != 0)
3541 return t;
3542
3543 /* Aside from solitaires, consider all multi-reg groups first. */
3544 t = reload_nregs[r2] - reload_nregs[r1];
3545 if (t != 0)
3546 return t;
3547
3548 /* Consider reloads in order of increasing reg-class number. */
3549 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3550 if (t != 0)
3551 return t;
3552
3553 /* If reloads are equally urgent, sort by reload number,
3554 so that the results of qsort leave nothing to chance. */
3555 return r1 - r2;
3556}
3557\f
3558/* The following HARD_REG_SETs indicate when each hard register is
3559 used for a reload of various parts of the current insn. */
3560
3561/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3562static HARD_REG_SET reload_reg_used;
3563/* If reg is in use for a RELOAD_FOR_INPUT_RELOAD_ADDRESS reload. */
3564static HARD_REG_SET reload_reg_used_in_input_addr;
3565/* If reg is in use for a RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reload. */
3566static HARD_REG_SET reload_reg_used_in_output_addr;
3567/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3568static HARD_REG_SET reload_reg_used_in_op_addr;
3569/* If reg is in use for a RELOAD_FOR_INPUT reload. */
3570static HARD_REG_SET reload_reg_used_in_input;
3571/* If reg is in use for a RELOAD_FOR_OUTPUT reload. */
3572static HARD_REG_SET reload_reg_used_in_output;
3573
3574/* If reg is in use as a reload reg for any sort of reload. */
3575static HARD_REG_SET reload_reg_used_at_all;
3576
3577/* Mark reg REGNO as in use for a reload of the sort spec'd by WHEN_NEEDED.
3578 MODE is used to indicate how many consecutive regs are actually used. */
3579
3580static void
3581mark_reload_reg_in_use (regno, when_needed, mode)
3582 int regno;
3583 enum reload_when_needed when_needed;
3584 enum machine_mode mode;
3585{
3586 int nregs = HARD_REGNO_NREGS (regno, mode);
3587 int i;
3588
3589 for (i = regno; i < nregs + regno; i++)
3590 {
3591 switch (when_needed)
3592 {
3593 case RELOAD_OTHER:
3594 SET_HARD_REG_BIT (reload_reg_used, i);
3595 break;
3596
3597 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3598 SET_HARD_REG_BIT (reload_reg_used_in_input_addr, i);
3599 break;
3600
3601 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3602 SET_HARD_REG_BIT (reload_reg_used_in_output_addr, i);
3603 break;
3604
3605 case RELOAD_FOR_OPERAND_ADDRESS:
3606 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3607 break;
3608
3609 case RELOAD_FOR_INPUT:
3610 SET_HARD_REG_BIT (reload_reg_used_in_input, i);
3611 break;
3612
3613 case RELOAD_FOR_OUTPUT:
3614 SET_HARD_REG_BIT (reload_reg_used_in_output, i);
3615 break;
3616 }
3617
3618 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3619 }
3620}
3621
3622/* 1 if reg REGNO is free as a reload reg for a reload of the sort
3623 specified by WHEN_NEEDED. */
3624
3625static int
3626reload_reg_free_p (regno, when_needed)
3627 int regno;
3628 enum reload_when_needed when_needed;
3629{
3630 /* In use for a RELOAD_OTHER means it's not available for anything. */
3631 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
3632 return 0;
3633 switch (when_needed)
3634 {
3635 case RELOAD_OTHER:
3636 /* In use for anything means not available for a RELOAD_OTHER. */
3637 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
3638
3639 /* The other kinds of use can sometimes share a register. */
3640 case RELOAD_FOR_INPUT:
3641 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3642 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3643 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno));
3644 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3645 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno)
3646 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno));
3647 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3648 return (! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3649 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3650 case RELOAD_FOR_OPERAND_ADDRESS:
3651 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3652 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3653 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3654 case RELOAD_FOR_OUTPUT:
3655 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3656 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3657 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3658 }
3659 abort ();
3660}
3661
3662/* Return 1 if the value in reload reg REGNO, as used by a reload
3663 needed for the part of the insn specified by WHEN_NEEDED,
3664 is not in use for a reload in any prior part of the insn.
3665
3666 We can assume that the reload reg was already tested for availability
3667 at the time it is needed, and we should not check this again,
3668 in case the reg has already been marked in use. */
3669
3670static int
3671reload_reg_free_before_p (regno, when_needed)
3672 int regno;
3673 enum reload_when_needed when_needed;
3674{
3675 switch (when_needed)
3676 {
3677 case RELOAD_OTHER:
3678 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3679 its use starts from the beginning, so nothing can use it earlier. */
3680 return 1;
3681
3682 /* If this use is for part of the insn,
3683 check the reg is not in use for any prior part. */
3684 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3685 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
3686 return 0;
3687 case RELOAD_FOR_OUTPUT:
3688 if (TEST_HARD_REG_BIT (reload_reg_used_in_input, regno))
3689 return 0;
3690 case RELOAD_FOR_OPERAND_ADDRESS:
3691 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno))
3692 return 0;
3693 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3694 case RELOAD_FOR_INPUT:
3695 return 1;
3696 }
3697 abort ();
3698}
3699
3700/* Return 1 if the value in reload reg REGNO, as used by a reload
3701 needed for the part of the insn specified by WHEN_NEEDED,
3702 is still available in REGNO at the end of the insn.
3703
3704 We can assume that the reload reg was already tested for availability
3705 at the time it is needed, and we should not check this again,
3706 in case the reg has already been marked in use. */
3707
3708static int
3709reload_reg_reaches_end_p (regno, when_needed)
3710 int regno;
3711 enum reload_when_needed when_needed;
3712{
3713 switch (when_needed)
3714 {
3715 case RELOAD_OTHER:
3716 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3717 its value must reach the end. */
3718 return 1;
3719
3720 /* If this use is for part of the insn,
3721 its value reaches if no subsequent part uses the same register. */
3722 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3723 case RELOAD_FOR_INPUT:
3724 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3725 || TEST_HARD_REG_BIT (reload_reg_used_in_output, regno))
3726 return 0;
3727 case RELOAD_FOR_OPERAND_ADDRESS:
3728 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno))
3729 return 0;
3730 case RELOAD_FOR_OUTPUT:
3731 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3732 return 1;
3733 }
3734 abort ();
3735}
3736\f
3737/* Vector of reload-numbers showing the order in which the reloads should
3738 be processed. */
3739short reload_order[MAX_RELOADS];
3740
3741/* Indexed by reload number, 1 if incoming value
3742 inherited from previous insns. */
3743char reload_inherited[MAX_RELOADS];
3744
3745/* For an inherited reload, this is the insn the reload was inherited from,
3746 if we know it. Otherwise, this is 0. */
3747rtx reload_inheritance_insn[MAX_RELOADS];
3748
3749/* If non-zero, this is a place to get the value of the reload,
3750 rather than using reload_in. */
3751rtx reload_override_in[MAX_RELOADS];
3752
3753/* For each reload, the index in spill_regs of the spill register used,
3754 or -1 if we did not need one of the spill registers for this reload. */
3755int reload_spill_index[MAX_RELOADS];
3756
3757/* Index of last register assigned as a spill register. We allocate in
3758 a round-robin fashio. */
3759
3760static last_spill_reg = 0;
3761
3762/* Find a spill register to use as a reload register for reload R.
3763 LAST_RELOAD is non-zero if this is the last reload for the insn being
3764 processed.
3765
3766 Set reload_reg_rtx[R] to the register allocated.
3767
3768 If NOERROR is nonzero, we return 1 if successful,
3769 or 0 if we couldn't find a spill reg and we didn't change anything. */
3770
3771static int
3772allocate_reload_reg (r, insn, last_reload, noerror)
3773 int r;
3774 rtx insn;
3775 int last_reload;
3776 int noerror;
3777{
3778 int i;
3779 int pass;
3780 int count;
3781 rtx new;
3782 int regno;
3783
3784 /* If we put this reload ahead, thinking it is a group,
3785 then insist on finding a group. Otherwise we can grab a
a8fdc208 3786 reg that some other reload needs.
32131a9c
RK
3787 (That can happen when we have a 68000 DATA_OR_FP_REG
3788 which is a group of data regs or one fp reg.)
3789 We need not be so restrictive if there are no more reloads
3790 for this insn.
3791
3792 ??? Really it would be nicer to have smarter handling
3793 for that kind of reg class, where a problem like this is normal.
3794 Perhaps those classes should be avoided for reloading
3795 by use of more alternatives. */
3796
3797 int force_group = reload_nregs[r] > 1 && ! last_reload;
3798
3799 /* If we want a single register and haven't yet found one,
3800 take any reg in the right class and not in use.
3801 If we want a consecutive group, here is where we look for it.
3802
3803 We use two passes so we can first look for reload regs to
3804 reuse, which are already in use for other reloads in this insn,
3805 and only then use additional registers.
3806 I think that maximizing reuse is needed to make sure we don't
3807 run out of reload regs. Suppose we have three reloads, and
3808 reloads A and B can share regs. These need two regs.
3809 Suppose A and B are given different regs.
3810 That leaves none for C. */
3811 for (pass = 0; pass < 2; pass++)
3812 {
3813 /* I is the index in spill_regs.
3814 We advance it round-robin between insns to use all spill regs
3815 equally, so that inherited reloads have a chance
3816 of leapfrogging each other. */
3817
3818 for (count = 0, i = last_spill_reg; count < n_spills; count++)
3819 {
3820 int class = (int) reload_reg_class[r];
3821
3822 i = (i + 1) % n_spills;
3823
3824 if (reload_reg_free_p (spill_regs[i], reload_when_needed[r])
3825 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
3826 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
3827 /* Look first for regs to share, then for unshared. */
3828 && (pass || TEST_HARD_REG_BIT (reload_reg_used_at_all,
3829 spill_regs[i])))
3830 {
3831 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
3832 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
3833 (on 68000) got us two FP regs. If NR is 1,
3834 we would reject both of them. */
3835 if (force_group)
3836 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
3837 /* If we need only one reg, we have already won. */
3838 if (nr == 1)
3839 {
3840 /* But reject a single reg if we demand a group. */
3841 if (force_group)
3842 continue;
3843 break;
3844 }
3845 /* Otherwise check that as many consecutive regs as we need
3846 are available here.
3847 Also, don't use for a group registers that are
3848 needed for nongroups. */
3849 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
3850 while (nr > 1)
3851 {
3852 regno = spill_regs[i] + nr - 1;
3853 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
3854 && spill_reg_order[regno] >= 0
3855 && reload_reg_free_p (regno, reload_when_needed[r])
3856 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
3857 regno)))
3858 break;
3859 nr--;
3860 }
3861 if (nr == 1)
3862 break;
3863 }
3864 }
3865
3866 /* If we found something on pass 1, omit pass 2. */
3867 if (count < n_spills)
3868 break;
3869 }
3870
3871 /* We should have found a spill register by now. */
3872 if (count == n_spills)
3873 {
3874 if (noerror)
3875 return 0;
3876 abort ();
3877 }
3878
3879 last_spill_reg = i;
3880
3881 /* Mark as in use for this insn the reload regs we use for this. */
3882 mark_reload_reg_in_use (spill_regs[i], reload_when_needed[r],
3883 reload_mode[r]);
3884
3885 new = spill_reg_rtx[i];
3886
3887 if (new == 0 || GET_MODE (new) != reload_mode[r])
3888 spill_reg_rtx[i] = new = gen_rtx (REG, reload_mode[r], spill_regs[i]);
3889
3890 reload_reg_rtx[r] = new;
3891 reload_spill_index[r] = i;
3892 regno = true_regnum (new);
3893
3894 /* Detect when the reload reg can't hold the reload mode.
3895 This used to be one `if', but Sequent compiler can't handle that. */
3896 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
3897 {
3898 enum machine_mode test_mode = VOIDmode;
3899 if (reload_in[r])
3900 test_mode = GET_MODE (reload_in[r]);
3901 /* If reload_in[r] has VOIDmode, it means we will load it
3902 in whatever mode the reload reg has: to wit, reload_mode[r].
3903 We have already tested that for validity. */
3904 /* Aside from that, we need to test that the expressions
3905 to reload from or into have modes which are valid for this
3906 reload register. Otherwise the reload insns would be invalid. */
3907 if (! (reload_in[r] != 0 && test_mode != VOIDmode
3908 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
3909 if (! (reload_out[r] != 0
3910 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
3911 /* The reg is OK. */
3912 return 1;
3913 }
3914
3915 /* The reg is not OK. */
3916 if (noerror)
3917 return 0;
3918
3919 if (asm_noperands (PATTERN (insn)) < 0)
3920 /* It's the compiler's fault. */
3921 abort ();
3922
3923 /* It's the user's fault; the operand's mode and constraint
3924 don't match. Disable this reload so we don't crash in final. */
3925 error_for_asm (insn,
3926 "`asm' operand constraint incompatible with operand size");
3927 reload_in[r] = 0;
3928 reload_out[r] = 0;
3929 reload_reg_rtx[r] = 0;
3930 reload_optional[r] = 1;
3931 reload_secondary_p[r] = 1;
3932
3933 return 1;
3934}
3935\f
3936/* Assign hard reg targets for the pseudo-registers we must reload
3937 into hard regs for this insn.
3938 Also output the instructions to copy them in and out of the hard regs.
3939
3940 For machines with register classes, we are responsible for
3941 finding a reload reg in the proper class. */
3942
3943static void
3944choose_reload_regs (insn, avoid_return_reg)
3945 rtx insn;
3946 /* This argument is currently ignored. */
3947 rtx avoid_return_reg;
3948{
3949 register int i, j;
3950 int max_group_size = 1;
3951 enum reg_class group_class = NO_REGS;
3952 int inheritance;
3953
3954 rtx save_reload_reg_rtx[MAX_RELOADS];
3955 char save_reload_inherited[MAX_RELOADS];
3956 rtx save_reload_inheritance_insn[MAX_RELOADS];
3957 rtx save_reload_override_in[MAX_RELOADS];
3958 int save_reload_spill_index[MAX_RELOADS];
3959 HARD_REG_SET save_reload_reg_used;
3960 HARD_REG_SET save_reload_reg_used_in_input_addr;
3961 HARD_REG_SET save_reload_reg_used_in_output_addr;
3962 HARD_REG_SET save_reload_reg_used_in_op_addr;
3963 HARD_REG_SET save_reload_reg_used_in_input;
3964 HARD_REG_SET save_reload_reg_used_in_output;
3965 HARD_REG_SET save_reload_reg_used_at_all;
3966
3967 bzero (reload_inherited, MAX_RELOADS);
3968 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
3969 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
3970
3971 CLEAR_HARD_REG_SET (reload_reg_used);
3972 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
3973 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr);
3974 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr);
3975 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
3976 CLEAR_HARD_REG_SET (reload_reg_used_in_output);
3977 CLEAR_HARD_REG_SET (reload_reg_used_in_input);
3978
3979 /* Distinguish output-only and input-only reloads
3980 because they can overlap with other things. */
3981 for (j = 0; j < n_reloads; j++)
3982 if (reload_when_needed[j] == RELOAD_OTHER
3983 && ! reload_needed_for_multiple[j])
3984 {
3985 if (reload_in[j] == 0)
3986 {
3987 /* But earlyclobber operands must stay as RELOAD_OTHER. */
3988 for (i = 0; i < n_earlyclobbers; i++)
3989 if (rtx_equal_p (reload_out[j], reload_earlyclobbers[i]))
3990 break;
3991 if (i == n_earlyclobbers)
3992 reload_when_needed[j] = RELOAD_FOR_OUTPUT;
3993 }
3994 if (reload_out[j] == 0)
3995 reload_when_needed[j] = RELOAD_FOR_INPUT;
3996
3997 if (reload_secondary_reload[j] >= 0
3998 && ! reload_needed_for_multiple[reload_secondary_reload[j]])
3999 reload_when_needed[reload_secondary_reload[j]]
4000 = reload_when_needed[j];
4001 }
4002
4003#ifdef SMALL_REGISTER_CLASSES
4004 /* Don't bother with avoiding the return reg
4005 if we have no mandatory reload that could use it. */
4006 if (avoid_return_reg)
4007 {
4008 int do_avoid = 0;
4009 int regno = REGNO (avoid_return_reg);
4010 int nregs
4011 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4012 int r;
4013
4014 for (r = regno; r < regno + nregs; r++)
4015 if (spill_reg_order[r] >= 0)
4016 for (j = 0; j < n_reloads; j++)
4017 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4018 && (reload_in[j] != 0 || reload_out[j] != 0
4019 || reload_secondary_p[j])
4020 &&
4021 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4022 do_avoid = 1;
4023 if (!do_avoid)
4024 avoid_return_reg = 0;
4025 }
4026#endif /* SMALL_REGISTER_CLASSES */
4027
4028#if 0 /* Not needed, now that we can always retry without inheritance. */
4029 /* See if we have more mandatory reloads than spill regs.
4030 If so, then we cannot risk optimizations that could prevent
a8fdc208 4031 reloads from sharing one spill register.
32131a9c
RK
4032
4033 Since we will try finding a better register than reload_reg_rtx
4034 unless it is equal to reload_in or reload_out, count such reloads. */
4035
4036 {
4037 int tem = 0;
4038#ifdef SMALL_REGISTER_CLASSES
4039 int tem = (avoid_return_reg != 0);
a8fdc208 4040#endif
32131a9c
RK
4041 for (j = 0; j < n_reloads; j++)
4042 if (! reload_optional[j]
4043 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4044 && (reload_reg_rtx[j] == 0
4045 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4046 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4047 tem++;
4048 if (tem > n_spills)
4049 must_reuse = 1;
4050 }
4051#endif
4052
4053#ifdef SMALL_REGISTER_CLASSES
4054 /* Don't use the subroutine call return reg for a reload
4055 if we are supposed to avoid it. */
4056 if (avoid_return_reg)
4057 {
4058 int regno = REGNO (avoid_return_reg);
4059 int nregs
4060 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4061 int r;
4062
4063 for (r = regno; r < regno + nregs; r++)
4064 if (spill_reg_order[r] >= 0)
4065 SET_HARD_REG_BIT (reload_reg_used, r);
4066 }
4067#endif /* SMALL_REGISTER_CLASSES */
4068
4069 /* In order to be certain of getting the registers we need,
4070 we must sort the reloads into order of increasing register class.
4071 Then our grabbing of reload registers will parallel the process
a8fdc208 4072 that provided the reload registers.
32131a9c
RK
4073
4074 Also note whether any of the reloads wants a consecutive group of regs.
4075 If so, record the maximum size of the group desired and what
4076 register class contains all the groups needed by this insn. */
4077
4078 for (j = 0; j < n_reloads; j++)
4079 {
4080 reload_order[j] = j;
4081 reload_spill_index[j] = -1;
4082
4083 reload_mode[j]
4084 = (reload_strict_low[j] && reload_out[j]
4085 ? GET_MODE (SUBREG_REG (reload_out[j]))
4086 : (reload_inmode[j] == VOIDmode
4087 || (GET_MODE_SIZE (reload_outmode[j])
4088 > GET_MODE_SIZE (reload_inmode[j])))
4089 ? reload_outmode[j] : reload_inmode[j]);
4090
4091 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4092
4093 if (reload_nregs[j] > 1)
4094 {
4095 max_group_size = MAX (reload_nregs[j], max_group_size);
4096 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4097 }
4098
4099 /* If we have already decided to use a certain register,
4100 don't use it in another way. */
4101 if (reload_reg_rtx[j])
4102 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]),
4103 reload_when_needed[j], reload_mode[j]);
4104 }
4105
4106 if (n_reloads > 1)
4107 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4108
4109 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4110 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4111 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4112 sizeof reload_inheritance_insn);
4113 bcopy (reload_override_in, save_reload_override_in,
4114 sizeof reload_override_in);
4115 bcopy (reload_spill_index, save_reload_spill_index,
4116 sizeof reload_spill_index);
4117 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4118 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4119 COPY_HARD_REG_SET (save_reload_reg_used_in_output,
4120 reload_reg_used_in_output);
4121 COPY_HARD_REG_SET (save_reload_reg_used_in_input,
4122 reload_reg_used_in_input);
4123 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr,
4124 reload_reg_used_in_input_addr);
4125 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr,
4126 reload_reg_used_in_output_addr);
4127 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4128 reload_reg_used_in_op_addr);
4129
4130 /* Try first with inheritance, then turning it off. */
4131
4132 for (inheritance = 1; inheritance >= 0; inheritance--)
4133 {
4134 /* Process the reloads in order of preference just found.
4135 Beyond this point, subregs can be found in reload_reg_rtx.
4136
4137 This used to look for an existing reloaded home for all
4138 of the reloads, and only then perform any new reloads.
4139 But that could lose if the reloads were done out of reg-class order
4140 because a later reload with a looser constraint might have an old
4141 home in a register needed by an earlier reload with a tighter constraint.
4142
4143 To solve this, we make two passes over the reloads, in the order
4144 described above. In the first pass we try to inherit a reload
4145 from a previous insn. If there is a later reload that needs a
4146 class that is a proper subset of the class being processed, we must
4147 also allocate a spill register during the first pass.
4148
4149 Then make a second pass over the reloads to allocate any reloads
4150 that haven't been given registers yet. */
4151
4152 for (j = 0; j < n_reloads; j++)
4153 {
4154 register int r = reload_order[j];
4155
4156 /* Ignore reloads that got marked inoperative. */
4157 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4158 continue;
4159
4160 /* If find_reloads chose a to use reload_in or reload_out as a reload
4161 register, we don't need to chose one. Otherwise, try even if it found
4162 one since we might save an insn if we find the value lying around. */
4163 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4164 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4165 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4166 continue;
4167
4168#if 0 /* No longer needed for correct operation.
4169 It might give better code, or might not; worth an experiment? */
4170 /* If this is an optional reload, we can't inherit from earlier insns
4171 until we are sure that any non-optional reloads have been allocated.
4172 The following code takes advantage of the fact that optional reloads
4173 are at the end of reload_order. */
4174 if (reload_optional[r] != 0)
4175 for (i = 0; i < j; i++)
4176 if ((reload_out[reload_order[i]] != 0
4177 || reload_in[reload_order[i]] != 0
4178 || reload_secondary_p[reload_order[i]])
4179 && ! reload_optional[reload_order[i]]
4180 && reload_reg_rtx[reload_order[i]] == 0)
4181 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4182#endif
4183
4184 /* First see if this pseudo is already available as reloaded
4185 for a previous insn. We cannot try to inherit for reloads
4186 that are smaller than the maximum number of registers needed
4187 for groups unless the register we would allocate cannot be used
4188 for the groups.
4189
4190 We could check here to see if this is a secondary reload for
4191 an object that is already in a register of the desired class.
4192 This would avoid the need for the secondary reload register.
4193 But this is complex because we can't easily determine what
4194 objects might want to be loaded via this reload. So let a register
4195 be allocated here. In `emit_reload_insns' we suppress one of the
4196 loads in the case described above. */
4197
4198 if (inheritance)
4199 {
4200 register int regno = -1;
4201
4202 if (reload_in[r] == 0)
4203 ;
4204 else if (GET_CODE (reload_in[r]) == REG)
4205 regno = REGNO (reload_in[r]);
4206 else if (GET_CODE (reload_in_reg[r]) == REG)
4207 regno = REGNO (reload_in_reg[r]);
4208#if 0
4209 /* This won't work, since REGNO can be a pseudo reg number.
4210 Also, it takes much more hair to keep track of all the things
4211 that can invalidate an inherited reload of part of a pseudoreg. */
4212 else if (GET_CODE (reload_in[r]) == SUBREG
4213 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4214 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4215#endif
4216
4217 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4218 {
4219 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4220
4221 if (reg_reloaded_contents[i] == regno
4222 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4223 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4224 spill_regs[i])
4225 && (reload_nregs[r] == max_group_size
4226 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4227 spill_regs[i]))
4228 && reload_reg_free_p (spill_regs[i], reload_when_needed[r])
4229 && reload_reg_free_before_p (spill_regs[i],
4230 reload_when_needed[r]))
4231 {
4232 /* If a group is needed, verify that all the subsequent
4233 registers still have their values intact. */
4234 int nr
4235 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4236 int k;
4237
4238 for (k = 1; k < nr; k++)
4239 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4240 != regno)
4241 break;
4242
4243 if (k == nr)
4244 {
4245 /* Mark the register as in use for this part of
4246 the insn. */
4247 mark_reload_reg_in_use (spill_regs[i],
4248 reload_when_needed[r],
4249 reload_mode[r]);
4250 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4251 reload_inherited[r] = 1;
4252 reload_inheritance_insn[r] = reg_reloaded_insn[i];
4253 reload_spill_index[r] = i;
4254 }
4255 }
4256 }
4257 }
4258
4259 /* Here's another way to see if the value is already lying around. */
4260 if (inheritance
4261 && reload_in[r] != 0
4262 && ! reload_inherited[r]
4263 && reload_out[r] == 0
4264 && (CONSTANT_P (reload_in[r])
4265 || GET_CODE (reload_in[r]) == PLUS
4266 || GET_CODE (reload_in[r]) == REG
4267 || GET_CODE (reload_in[r]) == MEM)
4268 && (reload_nregs[r] == max_group_size
4269 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4270 {
4271 register rtx equiv
4272 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
4273 -1, 0, 0, reload_mode[r]);
4274 int regno;
4275
4276 if (equiv != 0)
4277 {
4278 if (GET_CODE (equiv) == REG)
4279 regno = REGNO (equiv);
4280 else if (GET_CODE (equiv) == SUBREG)
4281 {
4282 regno = REGNO (SUBREG_REG (equiv));
4283 if (regno < FIRST_PSEUDO_REGISTER)
4284 regno += SUBREG_WORD (equiv);
4285 }
4286 else
4287 abort ();
4288 }
4289
4290 /* If we found a spill reg, reject it unless it is free
4291 and of the desired class. */
4292 if (equiv != 0
4293 && ((spill_reg_order[regno] >= 0
4294 && ! reload_reg_free_before_p (regno,
4295 reload_when_needed[r]))
4296 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4297 regno)))
4298 equiv = 0;
4299
4300 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4301 equiv = 0;
4302
4303 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4304 equiv = 0;
4305
4306 /* We found a register that contains the value we need.
4307 If this register is the same as an `earlyclobber' operand
4308 of the current insn, just mark it as a place to reload from
4309 since we can't use it as the reload register itself. */
4310
4311 if (equiv != 0)
4312 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
4313 if (reg_overlap_mentioned_for_reload_p (equiv,
4314 reload_earlyclobbers[i]))
32131a9c
RK
4315 {
4316 reload_override_in[r] = equiv;
4317 equiv = 0;
4318 break;
4319 }
4320
4321 /* JRV: If the equiv register we have found is explicitly
4322 clobbered in the current insn, mark but don't use, as above. */
4323
4324 if (equiv != 0 && regno_clobbered_p (regno, insn))
4325 {
4326 reload_override_in[r] = equiv;
4327 equiv = 0;
4328 }
4329
4330 /* If we found an equivalent reg, say no code need be generated
4331 to load it, and use it as our reload reg. */
4332 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4333 {
4334 reload_reg_rtx[r] = equiv;
4335 reload_inherited[r] = 1;
4336 /* If it is a spill reg,
4337 mark the spill reg as in use for this insn. */
4338 i = spill_reg_order[regno];
4339 if (i >= 0)
4340 mark_reload_reg_in_use (regno, reload_when_needed[r],
4341 reload_mode[r]);
4342 }
4343 }
4344
4345 /* If we found a register to use already, or if this is an optional
4346 reload, we are done. */
4347 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
4348 continue;
4349
4350#if 0 /* No longer needed for correct operation. Might or might not
4351 give better code on the average. Want to experiment? */
4352
4353 /* See if there is a later reload that has a class different from our
4354 class that intersects our class or that requires less register
4355 than our reload. If so, we must allocate a register to this
4356 reload now, since that reload might inherit a previous reload
4357 and take the only available register in our class. Don't do this
4358 for optional reloads since they will force all previous reloads
4359 to be allocated. Also don't do this for reloads that have been
4360 turned off. */
4361
4362 for (i = j + 1; i < n_reloads; i++)
4363 {
4364 int s = reload_order[i];
4365
d45cf215
RS
4366 if ((reload_in[s] == 0 && reload_out[s] == 0
4367 && ! reload_secondary_p[s])
32131a9c
RK
4368 || reload_optional[s])
4369 continue;
4370
4371 if ((reload_reg_class[s] != reload_reg_class[r]
4372 && reg_classes_intersect_p (reload_reg_class[r],
4373 reload_reg_class[s]))
4374 || reload_nregs[s] < reload_nregs[r])
4375 break;
4376 }
4377
4378 if (i == n_reloads)
4379 continue;
4380
4381 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
4382#endif
4383 }
4384
4385 /* Now allocate reload registers for anything non-optional that
4386 didn't get one yet. */
4387 for (j = 0; j < n_reloads; j++)
4388 {
4389 register int r = reload_order[j];
4390
4391 /* Ignore reloads that got marked inoperative. */
4392 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4393 continue;
4394
4395 /* Skip reloads that already have a register allocated or are
4396 optional. */
4397 if (reload_reg_rtx[r] != 0 || reload_optional[r])
4398 continue;
4399
4400 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
4401 break;
4402 }
4403
4404 /* If that loop got all the way, we have won. */
4405 if (j == n_reloads)
4406 break;
4407
4408 fail:
4409 /* Loop around and try without any inheritance. */
4410 /* First undo everything done by the failed attempt
4411 to allocate with inheritance. */
4412 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
4413 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
4414 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
4415 sizeof reload_inheritance_insn);
4416 bcopy (save_reload_override_in, reload_override_in,
4417 sizeof reload_override_in);
4418 bcopy (save_reload_spill_index, reload_spill_index,
4419 sizeof reload_spill_index);
4420 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
4421 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
4422 COPY_HARD_REG_SET (reload_reg_used_in_input,
4423 save_reload_reg_used_in_input);
4424 COPY_HARD_REG_SET (reload_reg_used_in_output,
4425 save_reload_reg_used_in_output);
4426 COPY_HARD_REG_SET (reload_reg_used_in_input_addr,
4427 save_reload_reg_used_in_input_addr);
4428 COPY_HARD_REG_SET (reload_reg_used_in_output_addr,
4429 save_reload_reg_used_in_output_addr);
4430 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
4431 save_reload_reg_used_in_op_addr);
4432 }
4433
4434 /* If we thought we could inherit a reload, because it seemed that
4435 nothing else wanted the same reload register earlier in the insn,
4436 verify that assumption, now that all reloads have been assigned. */
4437
4438 for (j = 0; j < n_reloads; j++)
4439 {
4440 register int r = reload_order[j];
4441
4442 if (reload_inherited[r] && reload_reg_rtx[r] != 0
4443 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
4444 reload_when_needed[r]))
4445 reload_inherited[r] = 0;
4446
4447 /* If we found a better place to reload from,
4448 validate it in the same fashion, if it is a reload reg. */
4449 if (reload_override_in[r]
4450 && (GET_CODE (reload_override_in[r]) == REG
4451 || GET_CODE (reload_override_in[r]) == SUBREG))
4452 {
4453 int regno = true_regnum (reload_override_in[r]);
4454 if (spill_reg_order[regno] >= 0
4455 && ! reload_reg_free_before_p (regno, reload_when_needed[r]))
4456 reload_override_in[r] = 0;
4457 }
4458 }
4459
4460 /* Now that reload_override_in is known valid,
4461 actually override reload_in. */
4462 for (j = 0; j < n_reloads; j++)
4463 if (reload_override_in[j])
4464 reload_in[j] = reload_override_in[j];
4465
4466 /* If this reload won't be done because it has been cancelled or is
4467 optional and not inherited, clear reload_reg_rtx so other
4468 routines (such as subst_reloads) don't get confused. */
4469 for (j = 0; j < n_reloads; j++)
4470 if ((reload_optional[j] && ! reload_inherited[j])
4471 || (reload_in[j] == 0 && reload_out[j] == 0
4472 && ! reload_secondary_p[j]))
4473 reload_reg_rtx[j] = 0;
4474
4475 /* Record which pseudos and which spill regs have output reloads. */
4476 for (j = 0; j < n_reloads; j++)
4477 {
4478 register int r = reload_order[j];
4479
4480 i = reload_spill_index[r];
4481
4482 /* I is nonneg if this reload used one of the spill regs.
4483 If reload_reg_rtx[r] is 0, this is an optional reload
4484 that we opted to ignore. */
4485 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
4486 && reload_reg_rtx[r] != 0)
4487 {
4488 register int nregno = REGNO (reload_out[r]);
4489 int nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
4490
4491 while (--nr >= 0)
4492 {
4493 reg_has_output_reload[nregno + nr] = 1;
4494 if (i >= 0)
4495 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
4496 }
4497
4498 if (reload_when_needed[r] != RELOAD_OTHER
4499 && reload_when_needed[r] != RELOAD_FOR_OUTPUT)
4500 abort ();
4501 }
4502 }
4503}
4504\f
4505/* Output insns to reload values in and out of the chosen reload regs. */
4506
4507static void
4508emit_reload_insns (insn)
4509 rtx insn;
4510{
4511 register int j;
4512 rtx following_insn = NEXT_INSN (insn);
a8efe40d 4513 rtx before_insn = insn;
32131a9c
RK
4514 rtx first_output_reload_insn = NEXT_INSN (insn);
4515 rtx first_other_reload_insn = insn;
4516 rtx first_operand_address_reload_insn = insn;
4517 int special;
4518 /* Values to be put in spill_reg_store are put here first. */
4519 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
4520
d45cf215 4521 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
4522 must go in front of the first USE insn, not in front of INSN. */
4523
4524 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
4525 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
4526 while (GET_CODE (PREV_INSN (before_insn)) == INSN
4527 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
4528 first_other_reload_insn = first_operand_address_reload_insn
4529 = before_insn = PREV_INSN (before_insn);
4530
32131a9c
RK
4531 /* Now output the instructions to copy the data into and out of the
4532 reload registers. Do these in the order that the reloads were reported,
4533 since reloads of base and index registers precede reloads of operands
4534 and the operands may need the base and index registers reloaded. */
4535
4536 for (j = 0; j < n_reloads; j++)
4537 {
4538 register rtx old;
4539 rtx oldequiv_reg = 0;
4540 rtx this_reload_insn = 0;
4541 rtx store_insn = 0;
4542
4543 old = reload_in[j];
4544 if (old != 0 && ! reload_inherited[j]
4545 && ! rtx_equal_p (reload_reg_rtx[j], old)
4546 && reload_reg_rtx[j] != 0)
4547 {
4548 register rtx reloadreg = reload_reg_rtx[j];
4549 rtx oldequiv = 0;
4550 enum machine_mode mode;
4551 rtx where;
d445b551 4552 rtx reload_insn;
32131a9c
RK
4553
4554 /* Determine the mode to reload in.
4555 This is very tricky because we have three to choose from.
4556 There is the mode the insn operand wants (reload_inmode[J]).
4557 There is the mode of the reload register RELOADREG.
4558 There is the intrinsic mode of the operand, which we could find
4559 by stripping some SUBREGs.
4560 It turns out that RELOADREG's mode is irrelevant:
4561 we can change that arbitrarily.
4562
4563 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
4564 then the reload reg may not support QImode moves, so use SImode.
4565 If foo is in memory due to spilling a pseudo reg, this is safe,
4566 because the QImode value is in the least significant part of a
4567 slot big enough for a SImode. If foo is some other sort of
4568 memory reference, then it is impossible to reload this case,
4569 so previous passes had better make sure this never happens.
4570
4571 Then consider a one-word union which has SImode and one of its
4572 members is a float, being fetched as (SUBREG:SF union:SI).
4573 We must fetch that as SFmode because we could be loading into
4574 a float-only register. In this case OLD's mode is correct.
4575
4576 Consider an immediate integer: it has VOIDmode. Here we need
4577 to get a mode from something else.
4578
4579 In some cases, there is a fourth mode, the operand's
4580 containing mode. If the insn specifies a containing mode for
4581 this operand, it overrides all others.
4582
4583 I am not sure whether the algorithm here is always right,
4584 but it does the right things in those cases. */
4585
4586 mode = GET_MODE (old);
4587 if (mode == VOIDmode)
4588 mode = reload_inmode[j];
4589 if (reload_strict_low[j])
4590 mode = GET_MODE (SUBREG_REG (reload_in[j]));
4591
4592#ifdef SECONDARY_INPUT_RELOAD_CLASS
4593 /* If we need a secondary register for this operation, see if
4594 the value is already in a register in that class. Don't
4595 do this if the secondary register will be used as a scratch
4596 register. */
4597
4598 if (reload_secondary_reload[j] >= 0
4599 && reload_secondary_icode[j] == CODE_FOR_nothing)
4600 oldequiv
4601 = find_equiv_reg (old, insn,
4602 reload_reg_class[reload_secondary_reload[j]],
4603 -1, 0, 0, mode);
4604#endif
4605
4606 /* If reloading from memory, see if there is a register
4607 that already holds the same value. If so, reload from there.
4608 We can pass 0 as the reload_reg_p argument because
4609 any other reload has either already been emitted,
4610 in which case find_equiv_reg will see the reload-insn,
4611 or has yet to be emitted, in which case it doesn't matter
4612 because we will use this equiv reg right away. */
4613
4614 if (oldequiv == 0
4615 && (GET_CODE (old) == MEM
4616 || (GET_CODE (old) == REG
4617 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4618 && reg_renumber[REGNO (old)] < 0)))
4619 oldequiv = find_equiv_reg (old, insn, GENERAL_REGS,
4620 -1, 0, 0, mode);
4621
4622 if (oldequiv)
4623 {
4624 int regno = true_regnum (oldequiv);
4625
4626 /* If OLDEQUIV is a spill register, don't use it for this
4627 if any other reload needs it at an earlier stage of this insn
a8fdc208 4628 or at this stage. */
32131a9c
RK
4629 if (spill_reg_order[regno] >= 0
4630 && (! reload_reg_free_p (regno, reload_when_needed[j])
4631 || ! reload_reg_free_before_p (regno,
4632 reload_when_needed[j])))
4633 oldequiv = 0;
4634
4635 /* If OLDEQUIV is not a spill register,
4636 don't use it if any other reload wants it. */
4637 if (spill_reg_order[regno] < 0)
4638 {
4639 int k;
4640 for (k = 0; k < n_reloads; k++)
4641 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
4642 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
4643 oldequiv))
32131a9c
RK
4644 {
4645 oldequiv = 0;
4646 break;
4647 }
4648 }
4649 }
4650
4651 if (oldequiv == 0)
4652 oldequiv = old;
4653 else if (GET_CODE (oldequiv) == REG)
4654 oldequiv_reg = oldequiv;
4655 else if (GET_CODE (oldequiv) == SUBREG)
4656 oldequiv_reg = SUBREG_REG (oldequiv);
4657
4658 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
4659 then load RELOADREG from OLDEQUIV. */
4660
4661 if (GET_MODE (reloadreg) != mode)
4662 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
4663 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
4664 oldequiv = SUBREG_REG (oldequiv);
4665 if (GET_MODE (oldequiv) != VOIDmode
4666 && mode != GET_MODE (oldequiv))
4667 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
4668
4669 /* Decide where to put reload insn for this reload. */
4670 switch (reload_when_needed[j])
4671 {
4672 case RELOAD_FOR_INPUT:
4673 case RELOAD_OTHER:
4674 where = first_operand_address_reload_insn;
4675 break;
4676 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
4677 where = first_other_reload_insn;
4678 break;
4679 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
4680 where = first_output_reload_insn;
4681 break;
4682 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4683 where = before_insn;
32131a9c
RK
4684 }
4685
4686 special = 0;
4687
4688 /* Auto-increment addresses must be reloaded in a special way. */
4689 if (GET_CODE (oldequiv) == POST_INC
4690 || GET_CODE (oldequiv) == POST_DEC
4691 || GET_CODE (oldequiv) == PRE_INC
4692 || GET_CODE (oldequiv) == PRE_DEC)
4693 {
4694 /* We are not going to bother supporting the case where a
4695 incremented register can't be copied directly from
4696 OLDEQUIV since this seems highly unlikely. */
4697 if (reload_secondary_reload[j] >= 0)
4698 abort ();
4699 /* Prevent normal processing of this reload. */
4700 special = 1;
4701 /* Output a special code sequence for this case. */
4702 this_reload_insn
4703 = inc_for_reload (reloadreg, oldequiv, reload_inc[j], where);
4704 }
4705
4706 /* If we are reloading a pseudo-register that was set by the previous
4707 insn, see if we can get rid of that pseudo-register entirely
4708 by redirecting the previous insn into our reload register. */
4709
4710 else if (optimize && GET_CODE (old) == REG
4711 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4712 && dead_or_set_p (insn, old)
4713 /* This is unsafe if some other reload
4714 uses the same reg first. */
4715 && (reload_when_needed[j] == RELOAD_OTHER
4716 || reload_when_needed[j] == RELOAD_FOR_INPUT
4717 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS))
4718 {
4719 rtx temp = PREV_INSN (insn);
4720 while (temp && GET_CODE (temp) == NOTE)
4721 temp = PREV_INSN (temp);
4722 if (temp
4723 && GET_CODE (temp) == INSN
4724 && GET_CODE (PATTERN (temp)) == SET
4725 && SET_DEST (PATTERN (temp)) == old
4726 /* Make sure we can access insn_operand_constraint. */
4727 && asm_noperands (PATTERN (temp)) < 0
4728 /* This is unsafe if prev insn rejects our reload reg. */
4729 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
4730 reloadreg)
4731 /* This is unsafe if operand occurs more than once in current
4732 insn. Perhaps some occurrences aren't reloaded. */
4733 && count_occurrences (PATTERN (insn), old) == 1
4734 /* Don't risk splitting a matching pair of operands. */
4735 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
4736 {
4737 /* Store into the reload register instead of the pseudo. */
4738 SET_DEST (PATTERN (temp)) = reloadreg;
4739 /* If these are the only uses of the pseudo reg,
4740 pretend for GDB it lives in the reload reg we used. */
4741 if (reg_n_deaths[REGNO (old)] == 1
4742 && reg_n_sets[REGNO (old)] == 1)
4743 {
4744 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
4745 alter_reg (REGNO (old), -1);
4746 }
4747 special = 1;
4748 }
4749 }
4750
4751 /* We can't do that, so output an insn to load RELOADREG.
4752 Keep them in the following order:
4753 all reloads for input reload addresses,
4754 all reloads for ordinary input operands,
4755 all reloads for addresses of non-reloaded operands,
4756 the insn being reloaded,
4757 all reloads for addresses of output reloads,
4758 the output reloads. */
4759 if (! special)
4760 {
4761#ifdef SECONDARY_INPUT_RELOAD_CLASS
4762 rtx second_reload_reg = 0;
4763 enum insn_code icode;
4764
4765 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
4766 and icode, if any. If OLDEQUIV and OLD are different or
4767 if this is an in-out reload, recompute whether or not we
4768 still need a secondary register and what the icode should
4769 be. If we still need a secondary register and the class or
4770 icode is different, go back to reloading from OLD if using
4771 OLDEQUIV means that we got the wrong type of register. We
4772 cannot have different class or icode due to an in-out reload
4773 because we don't make such reloads when both the input and
4774 output need secondary reload registers. */
32131a9c
RK
4775
4776 if (reload_secondary_reload[j] >= 0)
4777 {
4778 int secondary_reload = reload_secondary_reload[j];
1554c2c6
RK
4779 rtx real_oldequiv = oldequiv;
4780 rtx real_old = old;
4781
4782 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
4783 and similarly for OLD.
4784 See comments in find_secondary_reload in reload.c. */
4785 if (GET_CODE (oldequiv) == REG
4786 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
4787 && reg_equiv_mem[REGNO (oldequiv)] != 0)
4788 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
4789
4790 if (GET_CODE (old) == REG
4791 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4792 && reg_equiv_mem[REGNO (old)] != 0)
4793 real_old = reg_equiv_mem[REGNO (old)];
4794
32131a9c
RK
4795 second_reload_reg = reload_reg_rtx[secondary_reload];
4796 icode = reload_secondary_icode[j];
4797
d445b551
RK
4798 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
4799 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
4800 {
4801 enum reg_class new_class
4802 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 4803 mode, real_oldequiv);
32131a9c
RK
4804
4805 if (new_class == NO_REGS)
4806 second_reload_reg = 0;
4807 else
4808 {
4809 enum insn_code new_icode;
4810 enum machine_mode new_mode;
4811
4812 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
4813 REGNO (second_reload_reg)))
1554c2c6 4814 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
4815 else
4816 {
4817 new_icode = reload_in_optab[(int) mode];
4818 if (new_icode != CODE_FOR_nothing
4819 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 4820 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 4821 (reloadreg, mode)))
a8fdc208
RS
4822 || (insn_operand_predicate[(int) new_icode][1]
4823 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 4824 (real_oldequiv, mode)))))
32131a9c
RK
4825 new_icode = CODE_FOR_nothing;
4826
4827 if (new_icode == CODE_FOR_nothing)
4828 new_mode = mode;
4829 else
4830 new_mode = insn_operand_mode[new_icode][2];
4831
4832 if (GET_MODE (second_reload_reg) != new_mode)
4833 {
4834 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
4835 new_mode))
1554c2c6 4836 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
4837 else
4838 second_reload_reg
4839 = gen_reg_rtx (REG, new_mode,
4840 REGNO (second_reload_reg));
4841 }
4842 }
4843 }
4844 }
4845
4846 /* If we still need a secondary reload register, check
4847 to see if it is being used as a scratch or intermediate
1554c2c6
RK
4848 register and generate code appropriately. If we need
4849 a scratch register, use REAL_OLDEQUIV since the form of
4850 the insn may depend on the actual address if it is
4851 a MEM. */
32131a9c
RK
4852
4853 if (second_reload_reg)
4854 {
4855 if (icode != CODE_FOR_nothing)
4856 {
d445b551 4857 reload_insn = emit_insn_before (GEN_FCN (icode)
1554c2c6
RK
4858 (reloadreg,
4859 real_oldequiv,
d445b551
RK
4860 second_reload_reg),
4861 where);
4862 if (this_reload_insn == 0)
4863 this_reload_insn = reload_insn;
32131a9c
RK
4864 special = 1;
4865 }
4866 else
4867 {
4868 /* See if we need a scratch register to load the
4869 intermediate register (a tertiary reload). */
4870 enum insn_code tertiary_icode
4871 = reload_secondary_icode[secondary_reload];
4872
4873 if (tertiary_icode != CODE_FOR_nothing)
4874 {
4875 rtx third_reload_reg
4876 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
4877
d445b551
RK
4878 reload_insn
4879 = emit_insn_before ((GEN_FCN (tertiary_icode)
4880 (second_reload_reg,
1554c2c6 4881 real_oldequiv,
d445b551
RK
4882 third_reload_reg)),
4883 where);
4884 if (this_reload_insn == 0)
4885 this_reload_insn = reload_insn;
32131a9c
RK
4886 }
4887 else
4888 {
d445b551
RK
4889 reload_insn
4890 = gen_input_reload (second_reload_reg,
fe751ebf 4891 oldequiv, where);
d445b551
RK
4892 if (this_reload_insn == 0)
4893 this_reload_insn = reload_insn;
32131a9c
RK
4894 oldequiv = second_reload_reg;
4895 }
4896 }
4897 }
4898 }
4899#endif
4900
4901 if (! special)
d445b551 4902 {
3c3eeea6 4903 reload_insn = gen_input_reload (reloadreg, oldequiv, where);
d445b551
RK
4904 if (this_reload_insn == 0)
4905 this_reload_insn = reload_insn;
4906 }
32131a9c
RK
4907
4908#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
4909 /* We may have to make a REG_DEAD note for the secondary reload
4910 register in the insns we just made. Find the last insn that
4911 mentioned the register. */
4912 if (! special && second_reload_reg
4913 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
4914 {
4915 rtx prev;
4916
4917 for (prev = where;
4918 prev != PREV_INSN (this_reload_insn);
4919 prev = PREV_INSN (prev))
4920 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
4921 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
4922 PATTERN (prev)))
32131a9c
RK
4923 {
4924 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
4925 second_reload_reg,
4926 REG_NOTES (prev));
4927 break;
4928 }
4929 }
4930#endif
4931 }
4932
4933 /* Update where to put other reload insns. */
4934 if (this_reload_insn)
4935 switch (reload_when_needed[j])
4936 {
4937 case RELOAD_FOR_INPUT:
4938 case RELOAD_OTHER:
4939 if (first_other_reload_insn == first_operand_address_reload_insn)
4940 first_other_reload_insn = this_reload_insn;
4941 break;
4942 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4943 if (first_operand_address_reload_insn == before_insn)
32131a9c 4944 first_operand_address_reload_insn = this_reload_insn;
a8efe40d 4945 if (first_other_reload_insn == before_insn)
32131a9c
RK
4946 first_other_reload_insn = this_reload_insn;
4947 }
4948
4949 /* reload_inc[j] was formerly processed here. */
4950 }
4951
4952 /* Add a note saying the input reload reg
4953 dies in this insn, if anyone cares. */
4954#ifdef PRESERVE_DEATH_INFO_REGNO_P
4955 if (old != 0
4956 && reload_reg_rtx[j] != old
4957 && reload_reg_rtx[j] != 0
4958 && reload_out[j] == 0
4959 && ! reload_inherited[j]
4960 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
4961 {
4962 register rtx reloadreg = reload_reg_rtx[j];
4963
a8fdc208 4964#if 0
32131a9c
RK
4965 /* We can't abort here because we need to support this for sched.c.
4966 It's not terrible to miss a REG_DEAD note, but we should try
4967 to figure out how to do this correctly. */
4968 /* The code below is incorrect for address-only reloads. */
4969 if (reload_when_needed[j] != RELOAD_OTHER
4970 && reload_when_needed[j] != RELOAD_FOR_INPUT)
4971 abort ();
4972#endif
4973
4974 /* Add a death note to this insn, for an input reload. */
4975
4976 if ((reload_when_needed[j] == RELOAD_OTHER
4977 || reload_when_needed[j] == RELOAD_FOR_INPUT)
4978 && ! dead_or_set_p (insn, reloadreg))
4979 REG_NOTES (insn)
4980 = gen_rtx (EXPR_LIST, REG_DEAD,
4981 reloadreg, REG_NOTES (insn));
4982 }
4983
4984 /* When we inherit a reload, the last marked death of the reload reg
4985 may no longer really be a death. */
4986 if (reload_reg_rtx[j] != 0
4987 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
4988 && reload_inherited[j])
4989 {
4990 /* Handle inheriting an output reload.
4991 Remove the death note from the output reload insn. */
4992 if (reload_spill_index[j] >= 0
4993 && GET_CODE (reload_in[j]) == REG
4994 && spill_reg_store[reload_spill_index[j]] != 0
4995 && find_regno_note (spill_reg_store[reload_spill_index[j]],
4996 REG_DEAD, REGNO (reload_reg_rtx[j])))
4997 remove_death (REGNO (reload_reg_rtx[j]),
4998 spill_reg_store[reload_spill_index[j]]);
4999 /* Likewise for input reloads that were inherited. */
5000 else if (reload_spill_index[j] >= 0
5001 && GET_CODE (reload_in[j]) == REG
5002 && spill_reg_store[reload_spill_index[j]] == 0
5003 && reload_inheritance_insn[j] != 0
a8fdc208 5004 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5005 REGNO (reload_reg_rtx[j])))
5006 remove_death (REGNO (reload_reg_rtx[j]),
5007 reload_inheritance_insn[j]);
5008 else
5009 {
5010 rtx prev;
5011
5012 /* We got this register from find_equiv_reg.
5013 Search back for its last death note and get rid of it.
5014 But don't search back too far.
5015 Don't go past a place where this reg is set,
5016 since a death note before that remains valid. */
5017 for (prev = PREV_INSN (insn);
5018 prev && GET_CODE (prev) != CODE_LABEL;
5019 prev = PREV_INSN (prev))
5020 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5021 && dead_or_set_p (prev, reload_reg_rtx[j]))
5022 {
5023 if (find_regno_note (prev, REG_DEAD,
5024 REGNO (reload_reg_rtx[j])))
5025 remove_death (REGNO (reload_reg_rtx[j]), prev);
5026 break;
5027 }
5028 }
5029 }
5030
5031 /* We might have used find_equiv_reg above to choose an alternate
5032 place from which to reload. If so, and it died, we need to remove
5033 that death and move it to one of the insns we just made. */
5034
5035 if (oldequiv_reg != 0
5036 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5037 {
5038 rtx prev, prev1;
5039
5040 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5041 prev = PREV_INSN (prev))
5042 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5043 && dead_or_set_p (prev, oldequiv_reg))
5044 {
5045 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5046 {
5047 for (prev1 = this_reload_insn;
5048 prev1; prev1 = PREV_INSN (prev1))
5049 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5050 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5051 PATTERN (prev1)))
32131a9c
RK
5052 {
5053 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5054 oldequiv_reg,
5055 REG_NOTES (prev1));
5056 break;
5057 }
5058 remove_death (REGNO (oldequiv_reg), prev);
5059 }
5060 break;
5061 }
5062 }
5063#endif
5064
5065 /* If we are reloading a register that was recently stored in with an
5066 output-reload, see if we can prove there was
5067 actually no need to store the old value in it. */
5068
5069 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5070 /* This is unsafe if some other reload uses the same reg first. */
5071 && (reload_when_needed[j] == RELOAD_OTHER
5072 || reload_when_needed[j] == RELOAD_FOR_INPUT
5073 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS)
5074 && GET_CODE (reload_in[j]) == REG
5075#if 0
5076 /* There doesn't seem to be any reason to restrict this to pseudos
5077 and doing so loses in the case where we are copying from a
5078 register of the wrong class. */
5079 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5080#endif
5081 && spill_reg_store[reload_spill_index[j]] != 0
5082 && dead_or_set_p (insn, reload_in[j])
5083 /* This is unsafe if operand occurs more than once in current
5084 insn. Perhaps some occurrences weren't reloaded. */
5085 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5086 delete_output_reload (insn, j,
5087 spill_reg_store[reload_spill_index[j]]);
5088
5089 /* Input-reloading is done. Now do output-reloading,
5090 storing the value from the reload-register after the main insn
5091 if reload_out[j] is nonzero.
5092
5093 ??? At some point we need to support handling output reloads of
5094 JUMP_INSNs or insns that set cc0. */
5095 old = reload_out[j];
5096 if (old != 0
5097 && reload_reg_rtx[j] != old
5098 && reload_reg_rtx[j] != 0)
5099 {
5100 register rtx reloadreg = reload_reg_rtx[j];
5101 register rtx second_reloadreg = 0;
5102 rtx prev_insn = PREV_INSN (first_output_reload_insn);
5103 rtx note, p;
5104 enum machine_mode mode;
5105 int special = 0;
5106
5107 /* An output operand that dies right away does need a reload,
5108 but need not be copied from it. Show the new location in the
5109 REG_UNUSED note. */
5110 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5111 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5112 {
5113 XEXP (note, 0) = reload_reg_rtx[j];
5114 continue;
5115 }
5116 else if (GET_CODE (old) == SCRATCH)
5117 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5118 but we don't want to make an output reload. */
5119 continue;
5120
5121#if 0
5122 /* Strip off of OLD any size-increasing SUBREGs such as
5123 (SUBREG:SI foo:QI 0). */
5124
5125 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5126 && (GET_MODE_SIZE (GET_MODE (old))
5127 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5128 old = SUBREG_REG (old);
5129#endif
5130
5131 /* If is a JUMP_INSN, we can't support output reloads yet. */
5132 if (GET_CODE (insn) == JUMP_INSN)
5133 abort ();
5134
5135 /* Determine the mode to reload in.
5136 See comments above (for input reloading). */
5137
5138 mode = GET_MODE (old);
5139 if (mode == VOIDmode)
5140 abort (); /* Should never happen for an output. */
5141
5142 /* A strict-low-part output operand needs to be reloaded
5143 in the mode of the entire value. */
5144 if (reload_strict_low[j])
5145 {
5146 mode = GET_MODE (SUBREG_REG (reload_out[j]));
5147 /* Encapsulate OLD into that mode. */
5148 /* If OLD is a subreg, then strip it, since the subreg will
5149 be altered by this very reload. */
5150 while (GET_CODE (old) == SUBREG && GET_MODE (old) != mode)
5151 old = SUBREG_REG (old);
5152 if (GET_MODE (old) != VOIDmode
5153 && mode != GET_MODE (old))
5154 old = gen_rtx (SUBREG, mode, old, 0);
5155 }
5156
5157 if (GET_MODE (reloadreg) != mode)
5158 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5159
5160#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5161
5162 /* If we need two reload regs, set RELOADREG to the intermediate
5163 one, since it will be stored into OUT. We might need a secondary
5164 register only for an input reload, so check again here. */
5165
1554c2c6 5166 if (reload_secondary_reload[j] >= 0)
32131a9c 5167 {
1554c2c6 5168 rtx real_old = old;
32131a9c 5169
1554c2c6
RK
5170 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5171 && reg_equiv_mem[REGNO (old)] != 0)
5172 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 5173
1554c2c6
RK
5174 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5175 mode, real_old)
5176 != NO_REGS))
5177 {
5178 second_reloadreg = reloadreg;
5179 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
32131a9c 5180
1554c2c6
RK
5181 /* See if RELOADREG is to be used as a scratch register
5182 or as an intermediate register. */
5183 if (reload_secondary_icode[j] != CODE_FOR_nothing)
32131a9c 5184 {
1554c2c6
RK
5185 emit_insn_before ((GEN_FCN (reload_secondary_icode[j])
5186 (real_old, second_reloadreg,
5187 reloadreg)),
5188 first_output_reload_insn);
5189 special = 1;
32131a9c
RK
5190 }
5191 else
1554c2c6
RK
5192 {
5193 /* See if we need both a scratch and intermediate reload
5194 register. */
5195 int secondary_reload = reload_secondary_reload[j];
5196 enum insn_code tertiary_icode
5197 = reload_secondary_icode[secondary_reload];
5198 rtx pat;
32131a9c 5199
1554c2c6
RK
5200 if (GET_MODE (reloadreg) != mode)
5201 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5202
5203 if (tertiary_icode != CODE_FOR_nothing)
5204 {
5205 rtx third_reloadreg
5206 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5207 pat = (GEN_FCN (tertiary_icode)
5208 (reloadreg, second_reloadreg, third_reloadreg));
5209 }
5210 else
5211 pat = gen_move_insn (reloadreg, second_reloadreg);
5212
5213 emit_insn_before (pat, first_output_reload_insn);
5214 }
32131a9c
RK
5215 }
5216 }
5217#endif
5218
5219 /* Output the last reload insn. */
5220 if (! special)
5221 emit_insn_before (gen_move_insn (old, reloadreg),
5222 first_output_reload_insn);
5223
5224#ifdef PRESERVE_DEATH_INFO_REGNO_P
5225 /* If final will look at death notes for this reg,
5226 put one on the last output-reload insn to use it. Similarly
5227 for any secondary register. */
5228 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
5229 for (p = PREV_INSN (first_output_reload_insn);
5230 p != prev_insn; p = PREV_INSN (p))
5231 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5232 && reg_overlap_mentioned_for_reload_p (reloadreg,
5233 PATTERN (p)))
32131a9c
RK
5234 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5235 reloadreg, REG_NOTES (p));
5236
5237#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5238 if (! special
5239 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
5240 for (p = PREV_INSN (first_output_reload_insn);
5241 p != prev_insn; p = PREV_INSN (p))
5242 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5243 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
5244 PATTERN (p)))
32131a9c
RK
5245 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5246 second_reloadreg, REG_NOTES (p));
5247#endif
5248#endif
5249 /* Look at all insns we emitted, just to be safe. */
5250 for (p = NEXT_INSN (prev_insn); p != first_output_reload_insn;
5251 p = NEXT_INSN (p))
5252 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
5253 {
5254 /* If this output reload doesn't come from a spill reg,
5255 clear any memory of reloaded copies of the pseudo reg.
5256 If this output reload comes from a spill reg,
5257 reg_has_output_reload will make this do nothing. */
5258 note_stores (PATTERN (p), forget_old_reloads_1);
5259
5260 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
5261 store_insn = p;
5262 }
5263
5264 first_output_reload_insn = NEXT_INSN (prev_insn);
5265 }
5266
5267 if (reload_spill_index[j] >= 0)
5268 new_spill_reg_store[reload_spill_index[j]] = store_insn;
5269 }
5270
32131a9c
RK
5271 /* Move death notes from INSN
5272 to output-operand-address and output reload insns. */
5273#ifdef PRESERVE_DEATH_INFO_REGNO_P
5274 {
5275 rtx insn1;
5276 /* Loop over those insns, last ones first. */
5277 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
5278 insn1 = PREV_INSN (insn1))
5279 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
5280 {
5281 rtx source = SET_SRC (PATTERN (insn1));
5282 rtx dest = SET_DEST (PATTERN (insn1));
5283
5284 /* The note we will examine next. */
5285 rtx reg_notes = REG_NOTES (insn);
5286 /* The place that pointed to this note. */
5287 rtx *prev_reg_note = &REG_NOTES (insn);
5288
5289 /* If the note is for something used in the source of this
5290 reload insn, or in the output address, move the note. */
5291 while (reg_notes)
5292 {
5293 rtx next_reg_notes = XEXP (reg_notes, 1);
5294 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
5295 && GET_CODE (XEXP (reg_notes, 0)) == REG
5296 && ((GET_CODE (dest) != REG
bfa30b22
RK
5297 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5298 dest))
5299 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5300 source)))
32131a9c
RK
5301 {
5302 *prev_reg_note = next_reg_notes;
5303 XEXP (reg_notes, 1) = REG_NOTES (insn1);
5304 REG_NOTES (insn1) = reg_notes;
5305 }
5306 else
5307 prev_reg_note = &XEXP (reg_notes, 1);
5308
5309 reg_notes = next_reg_notes;
5310 }
5311 }
5312 }
5313#endif
5314
5315 /* For all the spill regs newly reloaded in this instruction,
5316 record what they were reloaded from, so subsequent instructions
d445b551
RK
5317 can inherit the reloads.
5318
5319 Update spill_reg_store for the reloads of this insn.
e9e79d69 5320 Copy the elements that were updated in the loop above. */
32131a9c
RK
5321
5322 for (j = 0; j < n_reloads; j++)
5323 {
5324 register int r = reload_order[j];
5325 register int i = reload_spill_index[r];
5326
5327 /* I is nonneg if this reload used one of the spill regs.
5328 If reload_reg_rtx[r] is 0, this is an optional reload
5329 that we opted to ignore. */
d445b551 5330
32131a9c
RK
5331 if (i >= 0 && reload_reg_rtx[r] != 0)
5332 {
5333 /* First, clear out memory of what used to be in this spill reg.
5334 If consecutive registers are used, clear them all. */
5335 int nr
5336 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
5337 int k;
5338
5339 for (k = 0; k < nr; k++)
5340 {
5341 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
5342 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
5343 }
5344
5345 /* Maybe the spill reg contains a copy of reload_out. */
5346 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5347 {
5348 register int nregno = REGNO (reload_out[r]);
d445b551
RK
5349
5350 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 5351 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 5352
32131a9c
RK
5353 for (k = 0; k < nr; k++)
5354 {
5355 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5356 = nregno;
5357 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
5358 }
5359 }
d445b551 5360
32131a9c
RK
5361 /* Maybe the spill reg contains a copy of reload_in. */
5362 else if (reload_out[r] == 0
5363 && reload_in[r] != 0
5364 && (GET_CODE (reload_in[r]) == REG
5365 || GET_CODE (reload_in_reg[r]) == REG))
5366 {
5367 register int nregno;
5368 if (GET_CODE (reload_in[r]) == REG)
5369 nregno = REGNO (reload_in[r]);
5370 else
5371 nregno = REGNO (reload_in_reg[r]);
5372
5373 /* If there are two separate reloads (one in and one out)
5374 for the same (hard or pseudo) reg,
a8fdc208 5375 leave reg_last_reload_reg set
32131a9c
RK
5376 based on the output reload.
5377 Otherwise, set it from this input reload. */
5378 if (!reg_has_output_reload[nregno]
5379 /* But don't do so if another input reload
5380 will clobber this one's value. */
5381 && reload_reg_reaches_end_p (spill_regs[i],
5382 reload_when_needed[r]))
5383 {
5384 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551
RK
5385
5386 /* Unless we inherited this reload, show we haven't
5387 recently done a store. */
5388 if (! reload_inherited[r])
5389 spill_reg_store[i] = 0;
5390
32131a9c
RK
5391 for (k = 0; k < nr; k++)
5392 {
5393 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5394 = nregno;
5395 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
5396 = insn;
5397 }
5398 }
5399 }
5400 }
5401
5402 /* The following if-statement was #if 0'd in 1.34 (or before...).
5403 It's reenabled in 1.35 because supposedly nothing else
5404 deals with this problem. */
5405
5406 /* If a register gets output-reloaded from a non-spill register,
5407 that invalidates any previous reloaded copy of it.
5408 But forget_old_reloads_1 won't get to see it, because
5409 it thinks only about the original insn. So invalidate it here. */
5410 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5411 {
5412 register int nregno = REGNO (reload_out[r]);
5413 reg_last_reload_reg[nregno] = 0;
5414 }
5415 }
5416}
5417\f
5418/* Emit code before BEFORE_INSN to perform an input reload of IN to RELOADREG.
3c3eeea6 5419 Returns first insn emitted. */
32131a9c
RK
5420
5421rtx
3c3eeea6 5422gen_input_reload (reloadreg, in, before_insn)
32131a9c
RK
5423 rtx reloadreg;
5424 rtx in;
5425 rtx before_insn;
5426{
5427 register rtx prev_insn = PREV_INSN (before_insn);
5428
a8fdc208 5429 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
5430 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
5431 register that didn't get a hard register. In that case we can just
5432 call emit_move_insn.
5433
5434 We can also be asked to reload a PLUS that adds either two registers or
5435 a register and a constant or MEM. This can occur during frame pointer
5436 elimination. That case if handled by trying to emit a single insn
5437 to perform the add. If it is not valid, we use a two insn sequence.
5438
5439 Finally, we could be called to handle an 'o' constraint by putting
5440 an address into a register. In that case, we first try to do this
5441 with a named pattern of "reload_load_address". If no such pattern
5442 exists, we just emit a SET insn and hope for the best (it will normally
5443 be valid on machines that use 'o').
5444
5445 This entire process is made complex because reload will never
5446 process the insns we generate here and so we must ensure that
5447 they will fit their constraints and also by the fact that parts of
5448 IN might be being reloaded separately and replaced with spill registers.
5449 Because of this, we are, in some sense, just guessing the right approach
5450 here. The one listed above seems to work.
5451
5452 ??? At some point, this whole thing needs to be rethought. */
5453
5454 if (GET_CODE (in) == PLUS
5455 && GET_CODE (XEXP (in, 0)) == REG
5456 && (GET_CODE (XEXP (in, 1)) == REG
5457 || CONSTANT_P (XEXP (in, 1))
5458 || GET_CODE (XEXP (in, 1)) == MEM))
5459 {
5460 /* We need to compute the sum of what is either a register and a
5461 constant, a register and memory, or a hard register and a pseudo
5462 register and put it into the reload register. The best possible way
5463 of doing this is if the machine has a three-operand ADD insn that
5464 accepts the required operands.
5465
5466 The simplest approach is to try to generate such an insn and see if it
5467 is recognized and matches its constraints. If so, it can be used.
5468
5469 It might be better not to actually emit the insn unless it is valid,
0009eff2
RK
5470 but we need to pass the insn as an operand to `recog' and
5471 `insn_extract'and it is simpler to emit and then delete the insn if
5472 not valid than to dummy things up. */
a8fdc208 5473
af929c62 5474 rtx op0, op1, tem, insn;
32131a9c 5475 int code;
a8fdc208 5476
af929c62
RK
5477 op0 = find_replacement (&XEXP (in, 0));
5478 op1 = find_replacement (&XEXP (in, 1));
5479
32131a9c
RK
5480 /* Since constraint checking is strict, commutativity won't be
5481 checked, so we need to do that here to avoid spurious failure
5482 if the add instruction is two-address and the second operand
5483 of the add is the same as the reload reg, which is frequently
5484 the case. If the insn would be A = B + A, rearrange it so
5485 it will be A = A + B as constrain_operands expects. */
a8fdc208 5486
32131a9c
RK
5487 if (GET_CODE (XEXP (in, 1)) == REG
5488 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
5489 tem = op0, op0 = op1, op1 = tem;
5490
5491 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
5492 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c
RK
5493
5494 insn = emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in),
5495 before_insn);
5496 code = recog_memoized (insn);
5497
5498 if (code >= 0)
5499 {
5500 insn_extract (insn);
5501 /* We want constrain operands to treat this insn strictly in
5502 its validity determination, i.e., the way it would after reload
5503 has completed. */
5504 if (constrain_operands (code, 1))
5505 return insn;
5506 }
5507
5508 if (PREV_INSN (insn))
5509 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
5510 if (NEXT_INSN (insn))
5511 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
5512
5513 /* If that failed, we must use a conservative two-insn sequence.
5514 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
5515 register since "move" will be able to handle an arbitrary operand,
5516 unlike add which can't, in general. Then add the registers.
32131a9c
RK
5517
5518 If there is another way to do this for a specific machine, a
5519 DEFINE_PEEPHOLE should be specified that recognizes the sequence
5520 we emit below. */
5521
af929c62
RK
5522 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
5523 || (GET_CODE (op1) == REG
5524 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
5525 tem = op0, op0 = op1, op1 = tem;
32131a9c 5526
af929c62
RK
5527 emit_insn_before (gen_move_insn (reloadreg, op0), before_insn);
5528 emit_insn_before (gen_add2_insn (reloadreg, op1), before_insn);
32131a9c
RK
5529 }
5530
5531 /* If IN is a simple operand, use gen_move_insn. */
5532 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
3c3eeea6 5533 emit_insn_before (gen_move_insn (reloadreg, in), before_insn);
32131a9c
RK
5534
5535#ifdef HAVE_reload_load_address
5536 else if (HAVE_reload_load_address)
3c3eeea6 5537 emit_insn_before (gen_reload_load_address (reloadreg, in), before_insn);
32131a9c
RK
5538#endif
5539
5540 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
5541 else
3c3eeea6 5542 emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in), before_insn);
32131a9c
RK
5543
5544 /* Return the first insn emitted.
5545 We can not just return PREV_INSN (before_insn), because there may have
5546 been multiple instructions emitted. Also note that gen_move_insn may
5547 emit more than one insn itself, so we can not assume that there is one
5548 insn emitted per emit_insn_before call. */
5549
5550 return NEXT_INSN (prev_insn);
5551}
5552\f
5553/* Delete a previously made output-reload
5554 whose result we now believe is not needed.
5555 First we double-check.
5556
5557 INSN is the insn now being processed.
5558 OUTPUT_RELOAD_INSN is the insn of the output reload.
5559 J is the reload-number for this insn. */
5560
5561static void
5562delete_output_reload (insn, j, output_reload_insn)
5563 rtx insn;
5564 int j;
5565 rtx output_reload_insn;
5566{
5567 register rtx i1;
5568
5569 /* Get the raw pseudo-register referred to. */
5570
5571 rtx reg = reload_in[j];
5572 while (GET_CODE (reg) == SUBREG)
5573 reg = SUBREG_REG (reg);
5574
5575 /* If the pseudo-reg we are reloading is no longer referenced
5576 anywhere between the store into it and here,
5577 and no jumps or labels intervene, then the value can get
5578 here through the reload reg alone.
5579 Otherwise, give up--return. */
5580 for (i1 = NEXT_INSN (output_reload_insn);
5581 i1 != insn; i1 = NEXT_INSN (i1))
5582 {
5583 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
5584 return;
5585 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
5586 && reg_mentioned_p (reg, PATTERN (i1)))
5587 return;
5588 }
5589
5590 /* If this insn will store in the pseudo again,
5591 the previous store can be removed. */
5592 if (reload_out[j] == reload_in[j])
5593 delete_insn (output_reload_insn);
5594
5595 /* See if the pseudo reg has been completely replaced
5596 with reload regs. If so, delete the store insn
5597 and forget we had a stack slot for the pseudo. */
5598 else if (reg_n_deaths[REGNO (reg)] == 1
5599 && reg_basic_block[REGNO (reg)] >= 0
5600 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
5601 {
5602 rtx i2;
5603
5604 /* We know that it was used only between here
5605 and the beginning of the current basic block.
5606 (We also know that the last use before INSN was
5607 the output reload we are thinking of deleting, but never mind that.)
5608 Search that range; see if any ref remains. */
5609 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5610 {
d445b551
RK
5611 rtx set = single_set (i2);
5612
32131a9c
RK
5613 /* Uses which just store in the pseudo don't count,
5614 since if they are the only uses, they are dead. */
d445b551 5615 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5616 continue;
5617 if (GET_CODE (i2) == CODE_LABEL
5618 || GET_CODE (i2) == JUMP_INSN)
5619 break;
5620 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
5621 && reg_mentioned_p (reg, PATTERN (i2)))
5622 /* Some other ref remains;
5623 we can't do anything. */
5624 return;
5625 }
5626
5627 /* Delete the now-dead stores into this pseudo. */
5628 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5629 {
d445b551
RK
5630 rtx set = single_set (i2);
5631
5632 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5633 delete_insn (i2);
5634 if (GET_CODE (i2) == CODE_LABEL
5635 || GET_CODE (i2) == JUMP_INSN)
5636 break;
5637 }
5638
5639 /* For the debugging info,
5640 say the pseudo lives in this reload reg. */
5641 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
5642 alter_reg (REGNO (reg), -1);
5643 }
5644}
5645
5646\f
a8fdc208 5647/* Output reload-insns to reload VALUE into RELOADREG.
32131a9c
RK
5648 VALUE is a autoincrement or autodecrement RTX whose operand
5649 is a register or memory location;
5650 so reloading involves incrementing that location.
5651
5652 INC_AMOUNT is the number to increment or decrement by (always positive).
5653 This cannot be deduced from VALUE.
5654
5655 INSN is the insn before which the new insns should be emitted.
5656
5657 The return value is the first of the insns emitted. */
5658
5659static rtx
5660inc_for_reload (reloadreg, value, inc_amount, insn)
5661 rtx reloadreg;
5662 rtx value;
5663 int inc_amount;
5664 rtx insn;
5665{
5666 /* REG or MEM to be copied and incremented. */
5667 rtx incloc = XEXP (value, 0);
5668 /* Nonzero if increment after copying. */
5669 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
0009eff2
RK
5670 rtx prev = PREV_INSN (insn);
5671 rtx inc;
5672 rtx add_insn;
5673 int code;
32131a9c
RK
5674
5675 /* No hard register is equivalent to this register after
5676 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
5677 we could inc/dec that register as well (maybe even using it for
5678 the source), but I'm not sure it's worth worrying about. */
5679 if (GET_CODE (incloc) == REG)
5680 reg_last_reload_reg[REGNO (incloc)] = 0;
5681
5682 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
5683 inc_amount = - inc_amount;
5684
0009eff2
RK
5685 inc = gen_rtx (CONST_INT, VOIDmode, inc_amount);
5686
5687 /* If this is post-increment, first copy the location to the reload reg. */
5688 if (post)
5689 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5690
5691 /* See if we can directly increment INCLOC. Use a method similar to that
5692 in gen_input_reload. */
5693
5694 add_insn = emit_insn_before (gen_rtx (SET, VOIDmode, incloc,
5695 gen_rtx (PLUS, GET_MODE (incloc),
5696 incloc, inc)), insn);
5697
5698 code = recog_memoized (add_insn);
5699 if (code >= 0)
32131a9c 5700 {
0009eff2
RK
5701 insn_extract (add_insn);
5702 if (constrain_operands (code, 1))
32131a9c 5703 {
0009eff2
RK
5704 /* If this is a pre-increment and we have incremented the value
5705 where it lives, copy the incremented value to RELOADREG to
5706 be used as an address. */
5707
5708 if (! post)
5709 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5710 return NEXT_INSN (prev);
32131a9c
RK
5711 }
5712 }
0009eff2
RK
5713
5714 if (PREV_INSN (add_insn))
5715 NEXT_INSN (PREV_INSN (add_insn)) = NEXT_INSN (add_insn);
5716 if (NEXT_INSN (add_insn))
5717 PREV_INSN (NEXT_INSN (add_insn)) = PREV_INSN (add_insn);
5718
5719 /* If couldn't do the increment directly, must increment in RELOADREG.
5720 The way we do this depends on whether this is pre- or post-increment.
5721 For pre-increment, copy INCLOC to the reload register, increment it
5722 there, then save back. */
5723
5724 if (! post)
5725 {
5726 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5727 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5728 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
5729 }
32131a9c
RK
5730 else
5731 {
0009eff2
RK
5732 /* Postincrement.
5733 Because this might be a jump insn or a compare, and because RELOADREG
5734 may not be available after the insn in an input reload, we must do
5735 the incrementation before the insn being reloaded for.
5736
5737 We have already copied INCLOC to RELOADREG. Increment the copy in
5738 RELOADREG, save that back, then decrement RELOADREG so it has
5739 the original value. */
5740
5741 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5742 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
5743 emit_insn_before (gen_add2_insn (reloadreg,
5744 gen_rtx (CONST_INT, VOIDmode,
5745 -inc_amount)),
5746 insn);
32131a9c 5747 }
0009eff2
RK
5748
5749 return NEXT_INSN (prev);
32131a9c
RK
5750}
5751\f
5752/* Return 1 if we are certain that the constraint-string STRING allows
5753 the hard register REG. Return 0 if we can't be sure of this. */
5754
5755static int
5756constraint_accepts_reg_p (string, reg)
5757 char *string;
5758 rtx reg;
5759{
5760 int value = 0;
5761 int regno = true_regnum (reg);
5762 int c;
5763
5764 /* Initialize for first alternative. */
5765 value = 0;
5766 /* Check that each alternative contains `g' or `r'. */
5767 while (1)
5768 switch (c = *string++)
5769 {
5770 case 0:
5771 /* If an alternative lacks `g' or `r', we lose. */
5772 return value;
5773 case ',':
5774 /* If an alternative lacks `g' or `r', we lose. */
5775 if (value == 0)
5776 return 0;
5777 /* Initialize for next alternative. */
5778 value = 0;
5779 break;
5780 case 'g':
5781 case 'r':
5782 /* Any general reg wins for this alternative. */
5783 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
5784 value = 1;
5785 break;
5786 default:
5787 /* Any reg in specified class wins for this alternative. */
5788 {
0009eff2 5789 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 5790
0009eff2 5791 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
5792 value = 1;
5793 }
5794 }
5795}
5796\f
d445b551
RK
5797/* Return the number of places FIND appears within X, but don't count
5798 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
5799
5800static int
5801count_occurrences (x, find)
5802 register rtx x, find;
5803{
5804 register int i, j;
5805 register enum rtx_code code;
5806 register char *format_ptr;
5807 int count;
5808
5809 if (x == find)
5810 return 1;
5811 if (x == 0)
5812 return 0;
5813
5814 code = GET_CODE (x);
5815
5816 switch (code)
5817 {
5818 case REG:
5819 case QUEUED:
5820 case CONST_INT:
5821 case CONST_DOUBLE:
5822 case SYMBOL_REF:
5823 case CODE_LABEL:
5824 case PC:
5825 case CC0:
5826 return 0;
d445b551
RK
5827
5828 case SET:
5829 if (SET_DEST (x) == find)
5830 return count_occurrences (SET_SRC (x), find);
5831 break;
32131a9c
RK
5832 }
5833
5834 format_ptr = GET_RTX_FORMAT (code);
5835 count = 0;
5836
5837 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5838 {
5839 switch (*format_ptr++)
5840 {
5841 case 'e':
5842 count += count_occurrences (XEXP (x, i), find);
5843 break;
5844
5845 case 'E':
5846 if (XVEC (x, i) != NULL)
5847 {
5848 for (j = 0; j < XVECLEN (x, i); j++)
5849 count += count_occurrences (XVECEXP (x, i, j), find);
5850 }
5851 break;
5852 }
5853 }
5854 return count;
5855}
This page took 0.628812 seconds and 5 git commands to generate.