]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
entered into RCS
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
a8efe40d 2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "obstack.h"
24#include "insn-config.h"
25#include "insn-flags.h"
26#include "insn-codes.h"
27#include "flags.h"
28#include "expr.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "reload.h"
32#include "recog.h"
33#include "basic-block.h"
34#include "output.h"
35#include <stdio.h>
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69\f
70/* During reload_as_needed, element N contains a REG rtx for the hard reg
71 into which pseudo reg N has been reloaded (perhaps for a previous insn). */
72static rtx *reg_last_reload_reg;
73
74/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
75 for an output reload that stores into reg N. */
76static char *reg_has_output_reload;
77
78/* Indicates which hard regs are reload-registers for an output reload
79 in the current insn. */
80static HARD_REG_SET reg_is_output_reload;
81
82/* Element N is the constant value to which pseudo reg N is equivalent,
83 or zero if pseudo reg N is not equivalent to a constant.
84 find_reloads looks at this in order to replace pseudo reg N
85 with the constant it stands for. */
86rtx *reg_equiv_constant;
87
88/* Element N is a memory location to which pseudo reg N is equivalent,
89 prior to any register elimination (such as frame pointer to stack
90 pointer). Depending on whether or not it is a valid address, this value
91 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 92rtx *reg_equiv_memory_loc;
32131a9c
RK
93
94/* Element N is the address of stack slot to which pseudo reg N is equivalent.
95 This is used when the address is not valid as a memory address
96 (because its displacement is too big for the machine.) */
97rtx *reg_equiv_address;
98
99/* Element N is the memory slot to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a memory slot. */
101rtx *reg_equiv_mem;
102
103/* Widest width in which each pseudo reg is referred to (via subreg). */
104static int *reg_max_ref_width;
105
106/* Element N is the insn that initialized reg N from its equivalent
107 constant or memory slot. */
108static rtx *reg_equiv_init;
109
110/* During reload_as_needed, element N contains the last pseudo regno
111 reloaded into the Nth reload register. This vector is in parallel
112 with spill_regs. If that pseudo reg occupied more than one register,
113 reg_reloaded_contents points to that pseudo for each spill register in
114 use; all of these must remain set for an inheritance to occur. */
115static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
116
117/* During reload_as_needed, element N contains the insn for which
118 the Nth reload register was last used. This vector is in parallel
119 with spill_regs, and its contents are significant only when
120 reg_reloaded_contents is significant. */
121static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
122
123/* Number of spill-regs so far; number of valid elements of spill_regs. */
124static int n_spills;
125
126/* In parallel with spill_regs, contains REG rtx's for those regs.
127 Holds the last rtx used for any given reg, or 0 if it has never
128 been used for spilling yet. This rtx is reused, provided it has
129 the proper mode. */
130static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
131
132/* In parallel with spill_regs, contains nonzero for a spill reg
133 that was stored after the last time it was used.
134 The precise value is the insn generated to do the store. */
135static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
136
137/* This table is the inverse mapping of spill_regs:
138 indexed by hard reg number,
139 it contains the position of that reg in spill_regs,
140 or -1 for something that is not in spill_regs. */
141static short spill_reg_order[FIRST_PSEUDO_REGISTER];
142
143/* This reg set indicates registers that may not be used for retrying global
144 allocation. The registers that may not be used include all spill registers
145 and the frame pointer (if we are using one). */
146HARD_REG_SET forbidden_regs;
147
148/* This reg set indicates registers that are not good for spill registers.
149 They will not be used to complete groups of spill registers. This includes
150 all fixed registers, registers that may be eliminated, and registers
151 explicitly used in the rtl.
152
153 (spill_reg_order prevents these registers from being used to start a
154 group.) */
155static HARD_REG_SET bad_spill_regs;
156
157/* Describes order of use of registers for reloading
158 of spilled pseudo-registers. `spills' is the number of
159 elements that are actually valid; new ones are added at the end. */
160static short spill_regs[FIRST_PSEUDO_REGISTER];
161
162/* Describes order of preference for putting regs into spill_regs.
163 Contains the numbers of all the hard regs, in order most preferred first.
164 This order is different for each function.
165 It is set up by order_regs_for_reload.
166 Empty elements at the end contain -1. */
167static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
168
169/* 1 for a hard register that appears explicitly in the rtl
170 (for example, function value registers, special registers
171 used by insns, structure value pointer registers). */
172static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
173
174/* Indicates if a register was counted against the need for
175 groups. 0 means it can count against max_nongroup instead. */
176static HARD_REG_SET counted_for_groups;
177
178/* Indicates if a register was counted against the need for
179 non-groups. 0 means it can become part of a new group.
180 During choose_reload_regs, 1 here means don't use this reg
181 as part of a group, even if it seems to be otherwise ok. */
182static HARD_REG_SET counted_for_nongroups;
183
184/* Nonzero if indirect addressing is supported on the machine; this means
185 that spilling (REG n) does not require reloading it into a register in
186 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
187 value indicates the level of indirect addressing supported, e.g., two
188 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
189 a hard register. */
190
191static char spill_indirect_levels;
192
193/* Nonzero if indirect addressing is supported when the innermost MEM is
194 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
195 which these are valid is the same as spill_indirect_levels, above. */
196
197char indirect_symref_ok;
198
199/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
200
201char double_reg_address_ok;
202
203/* Record the stack slot for each spilled hard register. */
204
205static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
206
207/* Width allocated so far for that stack slot. */
208
209static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
210
211/* Indexed by register class and basic block number, nonzero if there is
212 any need for a spill register of that class in that basic block.
213 The pointer is 0 if we did stupid allocation and don't know
214 the structure of basic blocks. */
215
216char *basic_block_needs[N_REG_CLASSES];
217
218/* First uid used by insns created by reload in this function.
219 Used in find_equiv_reg. */
220int reload_first_uid;
221
222/* Flag set by local-alloc or global-alloc if anything is live in
223 a call-clobbered reg across calls. */
224
225int caller_save_needed;
226
227/* Set to 1 while reload_as_needed is operating.
228 Required by some machines to handle any generated moves differently. */
229
230int reload_in_progress = 0;
231
232/* These arrays record the insn_code of insns that may be needed to
233 perform input and output reloads of special objects. They provide a
234 place to pass a scratch register. */
235
236enum insn_code reload_in_optab[NUM_MACHINE_MODES];
237enum insn_code reload_out_optab[NUM_MACHINE_MODES];
238
d45cf215 239/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
240 The allocated storage can be freed once find_reloads has processed the
241 insn. */
242
243struct obstack reload_obstack;
244char *reload_firstobj;
245
246#define obstack_chunk_alloc xmalloc
247#define obstack_chunk_free free
248
249extern int xmalloc ();
250extern void free ();
251
252/* List of labels that must never be deleted. */
253extern rtx forced_labels;
254\f
255/* This structure is used to record information about register eliminations.
256 Each array entry describes one possible way of eliminating a register
257 in favor of another. If there is more than one way of eliminating a
258 particular register, the most preferred should be specified first. */
259
260static struct elim_table
261{
262 int from; /* Register number to be eliminated. */
263 int to; /* Register number used as replacement. */
264 int initial_offset; /* Initial difference between values. */
265 int can_eliminate; /* Non-zero if this elimination can be done. */
266 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
267 insns made by reload. */
268 int offset; /* Current offset between the two regs. */
a8efe40d 269 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
270 int previous_offset; /* Offset at end of previous insn. */
271 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
272 rtx from_rtx; /* REG rtx for the register to be eliminated.
273 We cannot simply compare the number since
274 we might then spuriously replace a hard
275 register corresponding to a pseudo
276 assigned to the reg to be eliminated. */
277 rtx to_rtx; /* REG rtx for the replacement. */
278} reg_eliminate[] =
279
280/* If a set of eliminable registers was specified, define the table from it.
281 Otherwise, default to the normal case of the frame pointer being
282 replaced by the stack pointer. */
283
284#ifdef ELIMINABLE_REGS
285 ELIMINABLE_REGS;
286#else
287 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
288#endif
289
290#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
291
292/* Record the number of pending eliminations that have an offset not equal
293 to their initial offset. If non-zero, we use a new copy of each
294 replacement result in any insns encountered. */
295static int num_not_at_initial_offset;
296
297/* Count the number of registers that we may be able to eliminate. */
298static int num_eliminable;
299
300/* For each label, we record the offset of each elimination. If we reach
301 a label by more than one path and an offset differs, we cannot do the
302 elimination. This information is indexed by the number of the label.
303 The first table is an array of flags that records whether we have yet
304 encountered a label and the second table is an array of arrays, one
305 entry in the latter array for each elimination. */
306
307static char *offsets_known_at;
308static int (*offsets_at)[NUM_ELIMINABLE_REGS];
309
310/* Number of labels in the current function. */
311
312static int num_labels;
313\f
314void mark_home_live ();
315static void count_possible_groups ();
316static int possible_group_p ();
317static void scan_paradoxical_subregs ();
318static void reload_as_needed ();
319static int modes_equiv_for_class_p ();
320static void alter_reg ();
321static void delete_dead_insn ();
322static int new_spill_reg();
323static void set_label_offsets ();
324static int eliminate_regs_in_insn ();
325static void mark_not_eliminable ();
326static int spill_hard_reg ();
327static void choose_reload_regs ();
328static void emit_reload_insns ();
329static void delete_output_reload ();
330static void forget_old_reloads_1 ();
331static void order_regs_for_reload ();
332static rtx inc_for_reload ();
333static int constraint_accepts_reg_p ();
334static int count_occurrences ();
335
336extern void remove_death ();
337extern rtx adj_offsettable_operand ();
338extern rtx form_sum ();
339\f
340void
341init_reload ()
342{
343 register int i;
344
345 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
346 Set spill_indirect_levels to the number of levels such addressing is
347 permitted, zero if it is not permitted at all. */
348
349 register rtx tem
350 = gen_rtx (MEM, Pmode,
351 gen_rtx (PLUS, Pmode,
352 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
353 gen_rtx (CONST_INT, VOIDmode, 4)));
354 spill_indirect_levels = 0;
355
356 while (memory_address_p (QImode, tem))
357 {
358 spill_indirect_levels++;
359 tem = gen_rtx (MEM, Pmode, tem);
360 }
361
362 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
363
364 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
365 indirect_symref_ok = memory_address_p (QImode, tem);
366
367 /* See if reg+reg is a valid (and offsettable) address. */
368
a8fdc208 369 tem = gen_rtx (PLUS, Pmode,
32131a9c
RK
370 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
371 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM));
372 /* This way, we make sure that reg+reg is an offsettable address. */
373 tem = plus_constant (tem, 4);
374
375 double_reg_address_ok = memory_address_p (QImode, tem);
376
377 /* Initialize obstack for our rtl allocation. */
378 gcc_obstack_init (&reload_obstack);
379 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
380
381#ifdef HAVE_SECONDARY_RELOADS
382
383 /* Initialize the optabs for doing special input and output reloads. */
384
385 for (i = 0; i < NUM_MACHINE_MODES; i++)
386 reload_in_optab[i] = reload_out_optab[i] = CODE_FOR_nothing;
387
388#ifdef HAVE_reload_inqi
389 if (HAVE_reload_inqi)
390 reload_in_optab[(int) QImode] = CODE_FOR_reload_inqi;
391#endif
392#ifdef HAVE_reload_inhi
393 if (HAVE_reload_inhi)
394 reload_in_optab[(int) HImode] = CODE_FOR_reload_inhi;
395#endif
396#ifdef HAVE_reload_insi
397 if (HAVE_reload_insi)
398 reload_in_optab[(int) SImode] = CODE_FOR_reload_insi;
399#endif
400#ifdef HAVE_reload_indi
401 if (HAVE_reload_indi)
402 reload_in_optab[(int) DImode] = CODE_FOR_reload_indi;
403#endif
404#ifdef HAVE_reload_inti
405 if (HAVE_reload_inti)
406 reload_in_optab[(int) TImode] = CODE_FOR_reload_inti;
407#endif
408#ifdef HAVE_reload_insf
409 if (HAVE_reload_insf)
410 reload_in_optab[(int) SFmode] = CODE_FOR_reload_insf;
411#endif
412#ifdef HAVE_reload_indf
413 if (HAVE_reload_indf)
414 reload_in_optab[(int) DFmode] = CODE_FOR_reload_indf;
415#endif
416#ifdef HAVE_reload_inxf
417 if (HAVE_reload_inxf)
418 reload_in_optab[(int) XFmode] = CODE_FOR_reload_inxf;
419#endif
420#ifdef HAVE_reload_intf
421 if (HAVE_reload_intf)
422 reload_in_optab[(int) TFmode] = CODE_FOR_reload_intf;
423#endif
424
425#ifdef HAVE_reload_outqi
426 if (HAVE_reload_outqi)
427 reload_out_optab[(int) QImode] = CODE_FOR_reload_outqi;
428#endif
429#ifdef HAVE_reload_outhi
430 if (HAVE_reload_outhi)
431 reload_out_optab[(int) HImode] = CODE_FOR_reload_outhi;
432#endif
433#ifdef HAVE_reload_outsi
434 if (HAVE_reload_outsi)
435 reload_out_optab[(int) SImode] = CODE_FOR_reload_outsi;
436#endif
437#ifdef HAVE_reload_outdi
438 if (HAVE_reload_outdi)
439 reload_out_optab[(int) DImode] = CODE_FOR_reload_outdi;
440#endif
441#ifdef HAVE_reload_outti
442 if (HAVE_reload_outti)
443 reload_out_optab[(int) TImode] = CODE_FOR_reload_outti;
444#endif
445#ifdef HAVE_reload_outsf
446 if (HAVE_reload_outsf)
447 reload_out_optab[(int) SFmode] = CODE_FOR_reload_outsf;
448#endif
449#ifdef HAVE_reload_outdf
450 if (HAVE_reload_outdf)
451 reload_out_optab[(int) DFmode] = CODE_FOR_reload_outdf;
452#endif
453#ifdef HAVE_reload_outxf
454 if (HAVE_reload_outxf)
455 reload_out_optab[(int) XFmode] = CODE_FOR_reload_outxf;
456#endif
457#ifdef HAVE_reload_outtf
458 if (HAVE_reload_outtf)
459 reload_out_optab[(int) TFmode] = CODE_FOR_reload_outtf;
460#endif
461
462#endif /* HAVE_SECONDARY_RELOADS */
463
464}
465
466/* Main entry point for the reload pass, and only entry point
467 in this file.
468
469 FIRST is the first insn of the function being compiled.
470
471 GLOBAL nonzero means we were called from global_alloc
472 and should attempt to reallocate any pseudoregs that we
473 displace from hard regs we will use for reloads.
474 If GLOBAL is zero, we do not have enough information to do that,
475 so any pseudo reg that is spilled must go to the stack.
476
477 DUMPFILE is the global-reg debugging dump file stream, or 0.
478 If it is nonzero, messages are written to it to describe
479 which registers are seized as reload regs, which pseudo regs
480 are spilled from them, and where the pseudo regs are reallocated to. */
481
482void
483reload (first, global, dumpfile)
484 rtx first;
485 int global;
486 FILE *dumpfile;
487{
488 register int class;
489 register int i;
490 register rtx insn;
491 register struct elim_table *ep;
492
493 int something_changed;
494 int something_needs_reloads;
495 int something_needs_elimination;
496 int new_basic_block_needs;
a8efe40d
RK
497 enum reg_class caller_save_spill_class = NO_REGS;
498 int caller_save_group_size = 1;
32131a9c
RK
499
500 /* The basic block number currently being processed for INSN. */
501 int this_block;
502
503 /* Make sure even insns with volatile mem refs are recognizable. */
504 init_recog ();
505
506 /* Enable find_equiv_reg to distinguish insns made by reload. */
507 reload_first_uid = get_max_uid ();
508
509 for (i = 0; i < N_REG_CLASSES; i++)
510 basic_block_needs[i] = 0;
511
512 /* Remember which hard regs appear explicitly
513 before we merge into `regs_ever_live' the ones in which
514 pseudo regs have been allocated. */
515 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
516
517 /* We don't have a stack slot for any spill reg yet. */
518 bzero (spill_stack_slot, sizeof spill_stack_slot);
519 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
520
a8efe40d
RK
521 /* Initialize the save area information for caller-save, in case some
522 are needed. */
523 init_save_areas ();
a8fdc208 524
32131a9c
RK
525 /* Compute which hard registers are now in use
526 as homes for pseudo registers.
527 This is done here rather than (eg) in global_alloc
528 because this point is reached even if not optimizing. */
529
530 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
531 mark_home_live (i);
532
533 /* Make sure that the last insn in the chain
534 is not something that needs reloading. */
535 emit_note (0, NOTE_INSN_DELETED);
536
537 /* Find all the pseudo registers that didn't get hard regs
538 but do have known equivalent constants or memory slots.
539 These include parameters (known equivalent to parameter slots)
540 and cse'd or loop-moved constant memory addresses.
541
542 Record constant equivalents in reg_equiv_constant
543 so they will be substituted by find_reloads.
544 Record memory equivalents in reg_mem_equiv so they can
545 be substituted eventually by altering the REG-rtx's. */
546
547 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
548 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
549 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
550 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
551 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
552 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
553 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
554 bzero (reg_equiv_init, max_regno * sizeof (rtx));
555 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
556 bzero (reg_equiv_address, max_regno * sizeof (rtx));
557 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
558 bzero (reg_max_ref_width, max_regno * sizeof (int));
559
560 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
561 Also find all paradoxical subregs
562 and find largest such for each pseudo. */
563
564 for (insn = first; insn; insn = NEXT_INSN (insn))
565 {
566 rtx set = single_set (insn);
567
568 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
569 {
570 rtx note = find_reg_note (insn, REG_EQUIV, 0);
a8efe40d
RK
571 if (note
572#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 573 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
574 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
575#endif
576 )
32131a9c
RK
577 {
578 rtx x = XEXP (note, 0);
579 i = REGNO (SET_DEST (set));
580 if (i > LAST_VIRTUAL_REGISTER)
581 {
582 if (GET_CODE (x) == MEM)
583 reg_equiv_memory_loc[i] = x;
584 else if (CONSTANT_P (x))
585 {
586 if (LEGITIMATE_CONSTANT_P (x))
587 reg_equiv_constant[i] = x;
588 else
589 reg_equiv_memory_loc[i]
d445b551 590 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
591 }
592 else
593 continue;
594
595 /* If this register is being made equivalent to a MEM
596 and the MEM is not SET_SRC, the equivalencing insn
597 is one with the MEM as a SET_DEST and it occurs later.
598 So don't mark this insn now. */
599 if (GET_CODE (x) != MEM
600 || rtx_equal_p (SET_SRC (set), x))
601 reg_equiv_init[i] = insn;
602 }
603 }
604 }
605
606 /* If this insn is setting a MEM from a register equivalent to it,
607 this is the equivalencing insn. */
608 else if (set && GET_CODE (SET_DEST (set)) == MEM
609 && GET_CODE (SET_SRC (set)) == REG
610 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
611 && rtx_equal_p (SET_DEST (set),
612 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
613 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
614
615 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
616 scan_paradoxical_subregs (PATTERN (insn));
617 }
618
619 /* Does this function require a frame pointer? */
620
621 frame_pointer_needed = (! flag_omit_frame_pointer
622#ifdef EXIT_IGNORE_STACK
623 /* ?? If EXIT_IGNORE_STACK is set, we will not save
624 and restore sp for alloca. So we can't eliminate
625 the frame pointer in that case. At some point,
626 we should improve this by emitting the
627 sp-adjusting insns for this case. */
628 || (current_function_calls_alloca
629 && EXIT_IGNORE_STACK)
630#endif
631 || FRAME_POINTER_REQUIRED);
632
633 num_eliminable = 0;
634
635 /* Initialize the table of registers to eliminate. The way we do this
636 depends on how the eliminable registers were defined. */
637#ifdef ELIMINABLE_REGS
638 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
639 {
640 ep->can_eliminate = ep->can_eliminate_previous
641 = (CAN_ELIMINATE (ep->from, ep->to)
642 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
643 }
644#else
645 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
646 = ! frame_pointer_needed;
647#endif
648
649 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 650 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
651 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
652 We depend on this. */
653 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
654 {
655 num_eliminable += ep->can_eliminate;
656 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
657 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
658 }
659
660 num_labels = max_label_num () - get_first_label_num ();
661
662 /* Allocate the tables used to store offset information at labels. */
663 offsets_known_at = (char *) alloca (num_labels);
664 offsets_at
665 = (int (*)[NUM_ELIMINABLE_REGS])
666 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
667
668 offsets_known_at -= get_first_label_num ();
669 offsets_at -= get_first_label_num ();
670
671 /* Alter each pseudo-reg rtx to contain its hard reg number.
672 Assign stack slots to the pseudos that lack hard regs or equivalents.
673 Do not touch virtual registers. */
674
675 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
676 alter_reg (i, -1);
677
678 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
679 because the stack size may be a part of the offset computation for
680 register elimination. */
681 assign_stack_local (BLKmode, 0, 0);
682
683 /* If we have some registers we think can be eliminated, scan all insns to
684 see if there is an insn that sets one of these registers to something
685 other than itself plus a constant. If so, the register cannot be
686 eliminated. Doing this scan here eliminates an extra pass through the
687 main reload loop in the most common case where register elimination
688 cannot be done. */
689 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
690 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
691 || GET_CODE (insn) == CALL_INSN)
692 note_stores (PATTERN (insn), mark_not_eliminable);
693
694#ifndef REGISTER_CONSTRAINTS
695 /* If all the pseudo regs have hard regs,
696 except for those that are never referenced,
697 we know that no reloads are needed. */
698 /* But that is not true if there are register constraints, since
699 in that case some pseudos might be in the wrong kind of hard reg. */
700
701 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
702 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
703 break;
704
b8093d02 705 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
706 return;
707#endif
708
709 /* Compute the order of preference for hard registers to spill.
710 Store them by decreasing preference in potential_reload_regs. */
711
712 order_regs_for_reload ();
713
714 /* So far, no hard regs have been spilled. */
715 n_spills = 0;
716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
717 spill_reg_order[i] = -1;
718
719 /* On most machines, we can't use any register explicitly used in the
720 rtl as a spill register. But on some, we have to. Those will have
721 taken care to keep the life of hard regs as short as possible. */
722
723#ifdef SMALL_REGISTER_CLASSES
724 CLEAR_HARD_REG_SET (forbidden_regs);
725#else
726 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
727#endif
728
729 /* Spill any hard regs that we know we can't eliminate. */
730 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
731 if (! ep->can_eliminate)
732 {
733 spill_hard_reg (ep->from, global, dumpfile, 1);
734 regs_ever_live[ep->from] = 1;
735 }
736
737 if (global)
738 for (i = 0; i < N_REG_CLASSES; i++)
739 {
740 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
741 bzero (basic_block_needs[i], n_basic_blocks);
742 }
743
744 /* This loop scans the entire function each go-round
745 and repeats until one repetition spills no additional hard regs. */
746
d45cf215 747 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
748 to require another pass. Note that getting an additional reload
749 reg does not necessarily imply any pseudo reg was spilled;
750 sometimes we find a reload reg that no pseudo reg was allocated in. */
751 something_changed = 1;
752 /* This flag is set if there are any insns that require reloading. */
753 something_needs_reloads = 0;
754 /* This flag is set if there are any insns that require register
755 eliminations. */
756 something_needs_elimination = 0;
757 while (something_changed)
758 {
759 rtx after_call = 0;
760
761 /* For each class, number of reload regs needed in that class.
762 This is the maximum over all insns of the needs in that class
763 of the individual insn. */
764 int max_needs[N_REG_CLASSES];
765 /* For each class, size of group of consecutive regs
766 that is needed for the reloads of this class. */
767 int group_size[N_REG_CLASSES];
768 /* For each class, max number of consecutive groups needed.
769 (Each group contains group_size[CLASS] consecutive registers.) */
770 int max_groups[N_REG_CLASSES];
771 /* For each class, max number needed of regs that don't belong
772 to any of the groups. */
773 int max_nongroups[N_REG_CLASSES];
774 /* For each class, the machine mode which requires consecutive
775 groups of regs of that class.
776 If two different modes ever require groups of one class,
777 they must be the same size and equally restrictive for that class,
778 otherwise we can't handle the complexity. */
779 enum machine_mode group_mode[N_REG_CLASSES];
780 rtx x;
781
782 something_changed = 0;
783 bzero (max_needs, sizeof max_needs);
784 bzero (max_groups, sizeof max_groups);
785 bzero (max_nongroups, sizeof max_nongroups);
786 bzero (group_size, sizeof group_size);
787 for (i = 0; i < N_REG_CLASSES; i++)
788 group_mode[i] = VOIDmode;
789
790 /* Keep track of which basic blocks are needing the reloads. */
791 this_block = 0;
792
793 /* Remember whether any element of basic_block_needs
794 changes from 0 to 1 in this pass. */
795 new_basic_block_needs = 0;
796
797 /* Reset all offsets on eliminable registers to their initial values. */
798#ifdef ELIMINABLE_REGS
799 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
800 {
801 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
802 ep->previous_offset = ep->offset
803 = ep->max_offset = ep->initial_offset;
32131a9c
RK
804 }
805#else
806#ifdef INITIAL_FRAME_POINTER_OFFSET
807 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
808#else
809 if (!FRAME_POINTER_REQUIRED)
810 abort ();
811 reg_eliminate[0].initial_offset = 0;
812#endif
a8efe40d 813 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
814 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
815#endif
816
817 num_not_at_initial_offset = 0;
818
819 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
820
821 /* Set a known offset for each forced label to be at the initial offset
822 of each elimination. We do this because we assume that all
823 computed jumps occur from a location where each elimination is
824 at its initial offset. */
825
826 for (x = forced_labels; x; x = XEXP (x, 1))
827 if (XEXP (x, 0))
828 set_label_offsets (XEXP (x, 0), 0, 1);
829
830 /* For each pseudo register that has an equivalent location defined,
831 try to eliminate any eliminable registers (such as the frame pointer)
832 assuming initial offsets for the replacement register, which
833 is the normal case.
834
835 If the resulting location is directly addressable, substitute
836 the MEM we just got directly for the old REG.
837
838 If it is not addressable but is a constant or the sum of a hard reg
839 and constant, it is probably not addressable because the constant is
840 out of range, in that case record the address; we will generate
841 hairy code to compute the address in a register each time it is
a8fdc208 842 needed.
32131a9c
RK
843
844 If the location is not addressable, but does not have one of the
845 above forms, assign a stack slot. We have to do this to avoid the
846 potential of producing lots of reloads if, e.g., a location involves
847 a pseudo that didn't get a hard register and has an equivalent memory
848 location that also involves a pseudo that didn't get a hard register.
849
850 Perhaps at some point we will improve reload_when_needed handling
851 so this problem goes away. But that's very hairy. */
852
853 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
854 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
855 {
856 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, 0);
857
858 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
859 XEXP (x, 0)))
860 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
861 else if (CONSTANT_P (XEXP (x, 0))
862 || (GET_CODE (XEXP (x, 0)) == PLUS
863 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
864 && (REGNO (XEXP (XEXP (x, 0), 0))
865 < FIRST_PSEUDO_REGISTER)
866 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
867 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
868 else
869 {
870 /* Make a new stack slot. Then indicate that something
a8fdc208 871 changed so we go back and recompute offsets for
32131a9c
RK
872 eliminable registers because the allocation of memory
873 below might change some offset. reg_equiv_{mem,address}
874 will be set up for this pseudo on the next pass around
875 the loop. */
876 reg_equiv_memory_loc[i] = 0;
877 reg_equiv_init[i] = 0;
878 alter_reg (i, -1);
879 something_changed = 1;
880 }
881 }
a8fdc208 882
d45cf215 883 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
884 bookkeeping. */
885 if (something_changed)
886 continue;
887
a8efe40d
RK
888 /* If caller-saves needs a group, initialize the group to include
889 the size and mode required for caller-saves. */
890
891 if (caller_save_group_size > 1)
892 {
893 group_mode[(int) caller_save_spill_class] = Pmode;
894 group_size[(int) caller_save_spill_class] = caller_save_group_size;
895 }
896
32131a9c
RK
897 /* Compute the most additional registers needed by any instruction.
898 Collect information separately for each class of regs. */
899
900 for (insn = first; insn; insn = NEXT_INSN (insn))
901 {
902 if (global && this_block + 1 < n_basic_blocks
903 && insn == basic_block_head[this_block+1])
904 ++this_block;
905
906 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
907 might include REG_LABEL), we need to see what effects this
908 has on the known offsets at labels. */
909
910 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
911 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
912 && REG_NOTES (insn) != 0))
913 set_label_offsets (insn, insn, 0);
914
915 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
916 {
917 /* Nonzero means don't use a reload reg that overlaps
918 the place where a function value can be returned. */
919 rtx avoid_return_reg = 0;
920
921 rtx old_body = PATTERN (insn);
922 int old_code = INSN_CODE (insn);
923 rtx old_notes = REG_NOTES (insn);
924 int did_elimination = 0;
925
926 /* Initially, count RELOAD_OTHER reloads.
927 Later, merge in the other kinds. */
928 int insn_needs[N_REG_CLASSES];
929 int insn_groups[N_REG_CLASSES];
930 int insn_total_groups = 0;
931
932 /* Count RELOAD_FOR_INPUT_RELOAD_ADDRESS reloads. */
933 int insn_needs_for_inputs[N_REG_CLASSES];
934 int insn_groups_for_inputs[N_REG_CLASSES];
935 int insn_total_groups_for_inputs = 0;
936
937 /* Count RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reloads. */
938 int insn_needs_for_outputs[N_REG_CLASSES];
939 int insn_groups_for_outputs[N_REG_CLASSES];
940 int insn_total_groups_for_outputs = 0;
941
942 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
943 int insn_needs_for_operands[N_REG_CLASSES];
944 int insn_groups_for_operands[N_REG_CLASSES];
945 int insn_total_groups_for_operands = 0;
946
32131a9c
RK
947#if 0 /* This wouldn't work nowadays, since optimize_bit_field
948 looks for non-strict memory addresses. */
949 /* Optimization: a bit-field instruction whose field
950 happens to be a byte or halfword in memory
951 can be changed to a move instruction. */
952
953 if (GET_CODE (PATTERN (insn)) == SET)
954 {
955 rtx dest = SET_DEST (PATTERN (insn));
956 rtx src = SET_SRC (PATTERN (insn));
957
958 if (GET_CODE (dest) == ZERO_EXTRACT
959 || GET_CODE (dest) == SIGN_EXTRACT)
960 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
961 if (GET_CODE (src) == ZERO_EXTRACT
962 || GET_CODE (src) == SIGN_EXTRACT)
963 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
964 }
965#endif
966
967 /* If needed, eliminate any eliminable registers. */
968 if (num_eliminable)
969 did_elimination = eliminate_regs_in_insn (insn, 0);
970
971#ifdef SMALL_REGISTER_CLASSES
972 /* Set avoid_return_reg if this is an insn
973 that might use the value of a function call. */
974 if (GET_CODE (insn) == CALL_INSN)
975 {
976 if (GET_CODE (PATTERN (insn)) == SET)
977 after_call = SET_DEST (PATTERN (insn));
978 else if (GET_CODE (PATTERN (insn)) == PARALLEL
979 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
980 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
981 else
982 after_call = 0;
983 }
984 else if (after_call != 0
985 && !(GET_CODE (PATTERN (insn)) == SET
986 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
987 {
988 if (reg_mentioned_p (after_call, PATTERN (insn)))
989 avoid_return_reg = after_call;
990 after_call = 0;
991 }
992#endif /* SMALL_REGISTER_CLASSES */
993
994 /* Analyze the instruction. */
995 find_reloads (insn, 0, spill_indirect_levels, global,
996 spill_reg_order);
997
998 /* Remember for later shortcuts which insns had any reloads or
999 register eliminations.
1000
1001 One might think that it would be worthwhile to mark insns
1002 that need register replacements but not reloads, but this is
1003 not safe because find_reloads may do some manipulation of
1004 the insn (such as swapping commutative operands), which would
1005 be lost when we restore the old pattern after register
1006 replacement. So the actions of find_reloads must be redone in
1007 subsequent passes or in reload_as_needed.
1008
1009 However, it is safe to mark insns that need reloads
1010 but not register replacement. */
1011
1012 PUT_MODE (insn, (did_elimination ? QImode
1013 : n_reloads ? HImode
1014 : VOIDmode));
1015
1016 /* Discard any register replacements done. */
1017 if (did_elimination)
1018 {
1019 obstack_free (&reload_obstack, reload_firstobj);
1020 PATTERN (insn) = old_body;
1021 INSN_CODE (insn) = old_code;
1022 REG_NOTES (insn) = old_notes;
1023 something_needs_elimination = 1;
1024 }
1025
a8efe40d 1026 /* If this insn has no reloads, we need not do anything except
a8fdc208 1027 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1028 caller-save needs reloads. */
1029
1030 if (n_reloads == 0
1031 && ! (GET_CODE (insn) == CALL_INSN
1032 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1033 continue;
1034
1035 something_needs_reloads = 1;
1036
a8efe40d
RK
1037 for (i = 0; i < N_REG_CLASSES; i++)
1038 {
1039 insn_needs[i] = 0, insn_groups[i] = 0;
1040 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1041 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1042 insn_needs_for_operands[i] = 0, insn_groups_for_operands[i] = 0;
1043 }
1044
32131a9c
RK
1045 /* Count each reload once in every class
1046 containing the reload's own class. */
1047
1048 for (i = 0; i < n_reloads; i++)
1049 {
1050 register enum reg_class *p;
e85ddd99 1051 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1052 int size;
1053 enum machine_mode mode;
1054 int *this_groups;
1055 int *this_needs;
1056 int *this_total_groups;
1057
1058 /* Don't count the dummy reloads, for which one of the
1059 regs mentioned in the insn can be used for reloading.
1060 Don't count optional reloads.
1061 Don't count reloads that got combined with others. */
1062 if (reload_reg_rtx[i] != 0
1063 || reload_optional[i] != 0
1064 || (reload_out[i] == 0 && reload_in[i] == 0
1065 && ! reload_secondary_p[i]))
1066 continue;
1067
e85ddd99
RK
1068 /* Show that a reload register of this class is needed
1069 in this basic block. We do not use insn_needs and
1070 insn_groups because they are overly conservative for
1071 this purpose. */
1072 if (global && ! basic_block_needs[(int) class][this_block])
1073 {
1074 basic_block_needs[(int) class][this_block] = 1;
1075 new_basic_block_needs = 1;
1076 }
1077
32131a9c
RK
1078 /* Decide which time-of-use to count this reload for. */
1079 switch (reload_when_needed[i])
1080 {
1081 case RELOAD_OTHER:
1082 case RELOAD_FOR_OUTPUT:
1083 case RELOAD_FOR_INPUT:
1084 this_needs = insn_needs;
1085 this_groups = insn_groups;
1086 this_total_groups = &insn_total_groups;
1087 break;
1088
1089 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
1090 this_needs = insn_needs_for_inputs;
1091 this_groups = insn_groups_for_inputs;
1092 this_total_groups = &insn_total_groups_for_inputs;
1093 break;
1094
1095 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
1096 this_needs = insn_needs_for_outputs;
1097 this_groups = insn_groups_for_outputs;
1098 this_total_groups = &insn_total_groups_for_outputs;
1099 break;
1100
1101 case RELOAD_FOR_OPERAND_ADDRESS:
1102 this_needs = insn_needs_for_operands;
1103 this_groups = insn_groups_for_operands;
1104 this_total_groups = &insn_total_groups_for_operands;
1105 break;
1106 }
1107
1108 mode = reload_inmode[i];
1109 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1110 mode = reload_outmode[i];
e85ddd99 1111 size = CLASS_MAX_NREGS (class, mode);
32131a9c
RK
1112 if (size > 1)
1113 {
1114 enum machine_mode other_mode, allocate_mode;
1115
1116 /* Count number of groups needed separately from
1117 number of individual regs needed. */
e85ddd99
RK
1118 this_groups[(int) class]++;
1119 p = reg_class_superclasses[(int) class];
32131a9c
RK
1120 while (*p != LIM_REG_CLASSES)
1121 this_groups[(int) *p++]++;
1122 (*this_total_groups)++;
1123
1124 /* Record size and mode of a group of this class. */
1125 /* If more than one size group is needed,
1126 make all groups the largest needed size. */
e85ddd99 1127 if (group_size[(int) class] < size)
32131a9c 1128 {
e85ddd99 1129 other_mode = group_mode[(int) class];
32131a9c
RK
1130 allocate_mode = mode;
1131
e85ddd99
RK
1132 group_size[(int) class] = size;
1133 group_mode[(int) class] = mode;
32131a9c
RK
1134 }
1135 else
1136 {
1137 other_mode = mode;
e85ddd99 1138 allocate_mode = group_mode[(int) class];
32131a9c
RK
1139 }
1140
1141 /* Crash if two dissimilar machine modes both need
1142 groups of consecutive regs of the same class. */
1143
1144 if (other_mode != VOIDmode
1145 && other_mode != allocate_mode
1146 && ! modes_equiv_for_class_p (allocate_mode,
1147 other_mode,
e85ddd99 1148 class))
32131a9c
RK
1149 abort ();
1150 }
1151 else if (size == 1)
1152 {
e85ddd99
RK
1153 this_needs[(int) class] += 1;
1154 p = reg_class_superclasses[(int) class];
32131a9c
RK
1155 while (*p != LIM_REG_CLASSES)
1156 this_needs[(int) *p++] += 1;
1157 }
1158 else
1159 abort ();
1160 }
1161
1162 /* All reloads have been counted for this insn;
1163 now merge the various times of use.
1164 This sets insn_needs, etc., to the maximum total number
1165 of registers needed at any point in this insn. */
1166
1167 for (i = 0; i < N_REG_CLASSES; i++)
1168 {
1169 int this_max;
1170 this_max = insn_needs_for_inputs[i];
1171 if (insn_needs_for_outputs[i] > this_max)
1172 this_max = insn_needs_for_outputs[i];
1173 if (insn_needs_for_operands[i] > this_max)
1174 this_max = insn_needs_for_operands[i];
1175 insn_needs[i] += this_max;
1176 this_max = insn_groups_for_inputs[i];
1177 if (insn_groups_for_outputs[i] > this_max)
1178 this_max = insn_groups_for_outputs[i];
1179 if (insn_groups_for_operands[i] > this_max)
1180 this_max = insn_groups_for_operands[i];
1181 insn_groups[i] += this_max;
32131a9c 1182 }
a8efe40d 1183
32131a9c
RK
1184 insn_total_groups += MAX (insn_total_groups_for_inputs,
1185 MAX (insn_total_groups_for_outputs,
1186 insn_total_groups_for_operands));
1187
a8efe40d
RK
1188 /* If this is a CALL_INSN and caller-saves will need
1189 a spill register, act as if the spill register is
1190 needed for this insn. However, the spill register
1191 can be used by any reload of this insn, so we only
1192 need do something if no need for that class has
a8fdc208 1193 been recorded.
a8efe40d
RK
1194
1195 The assumption that every CALL_INSN will trigger a
1196 caller-save is highly conservative, however, the number
1197 of cases where caller-saves will need a spill register but
1198 a block containing a CALL_INSN won't need a spill register
1199 of that class should be quite rare.
1200
1201 If a group is needed, the size and mode of the group will
d45cf215 1202 have been set up at the beginning of this loop. */
a8efe40d
RK
1203
1204 if (GET_CODE (insn) == CALL_INSN
1205 && caller_save_spill_class != NO_REGS)
1206 {
1207 int *caller_save_needs
1208 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1209
1210 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1211 {
1212 register enum reg_class *p
1213 = reg_class_superclasses[(int) caller_save_spill_class];
1214
1215 caller_save_needs[(int) caller_save_spill_class]++;
1216
1217 while (*p != LIM_REG_CLASSES)
0aaa6af8 1218 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1219 }
1220
1221 if (caller_save_group_size > 1)
1222 insn_total_groups = MAX (insn_total_groups, 1);
1223 }
1224
32131a9c
RK
1225#ifdef SMALL_REGISTER_CLASSES
1226 /* If this insn stores the value of a function call,
1227 and that value is in a register that has been spilled,
1228 and if the insn needs a reload in a class
1229 that might use that register as the reload register,
1230 then add add an extra need in that class.
1231 This makes sure we have a register available that does
1232 not overlap the return value. */
1233 if (avoid_return_reg)
1234 {
1235 int regno = REGNO (avoid_return_reg);
1236 int nregs
1237 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1238 int r;
1239 int inc_groups = 0;
1240 for (r = regno; r < regno + nregs; r++)
1241 if (spill_reg_order[r] >= 0)
1242 for (i = 0; i < N_REG_CLASSES; i++)
1243 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1244 {
1245 if (insn_needs[i] > 0)
1246 insn_needs[i]++;
1247 if (insn_groups[i] > 0
1248 && nregs > 1)
1249 inc_groups = 1;
1250 }
1251 if (inc_groups)
1252 insn_groups[i]++;
1253 }
1254#endif /* SMALL_REGISTER_CLASSES */
1255
1256 /* For each class, collect maximum need of any insn. */
1257
1258 for (i = 0; i < N_REG_CLASSES; i++)
1259 {
1260 if (max_needs[i] < insn_needs[i])
1261 max_needs[i] = insn_needs[i];
1262 if (max_groups[i] < insn_groups[i])
1263 max_groups[i] = insn_groups[i];
1264 if (insn_total_groups > 0)
1265 if (max_nongroups[i] < insn_needs[i])
1266 max_nongroups[i] = insn_needs[i];
1267 }
1268 }
1269 /* Note that there is a continue statement above. */
1270 }
1271
d445b551 1272 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1273 will need a spill register. */
32131a9c 1274
d445b551 1275 if (caller_save_needed
a8efe40d
RK
1276 && ! setup_save_areas (&something_changed)
1277 && caller_save_spill_class == NO_REGS)
32131a9c 1278 {
a8efe40d
RK
1279 /* The class we will need depends on whether the machine
1280 supports the sum of two registers for an address; see
1281 find_address_reloads for details. */
1282
a8fdc208 1283 caller_save_spill_class
a8efe40d
RK
1284 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1285 caller_save_group_size
1286 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1287 something_changed = 1;
32131a9c
RK
1288 }
1289
1290 /* Now deduct from the needs for the registers already
1291 available (already spilled). */
1292
1293 CLEAR_HARD_REG_SET (counted_for_groups);
1294 CLEAR_HARD_REG_SET (counted_for_nongroups);
1295
1296 /* First find all regs alone in their class
1297 and count them (if desired) for non-groups.
1298 We would be screwed if a group took the only reg in a class
d445b551 1299 for which a non-group reload is needed.
32131a9c
RK
1300 (Note there is still a bug; if a class has 2 regs,
1301 both could be stolen by groups and we would lose the same way.
1302 With luck, no machine will need a nongroup in a 2-reg class.) */
1303
1304 for (i = 0; i < n_spills; i++)
1305 {
1306 register enum reg_class *p;
1307 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1308
1309 if (reg_class_size[class] == 1 && max_nongroups[class] > 0)
1310 {
1311 max_needs[class]--;
1312 p = reg_class_superclasses[class];
1313 while (*p != LIM_REG_CLASSES)
1314 max_needs[(int) *p++]--;
1315
1316 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1317 max_nongroups[class]--;
1318 p = reg_class_superclasses[class];
1319 while (*p != LIM_REG_CLASSES)
1320 {
1321 if (max_nongroups[(int) *p] > 0)
1322 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1323 max_nongroups[(int) *p++]--;
1324 }
1325 }
1326 }
1327
1328 /* Now find all consecutive groups of spilled registers
1329 and mark each group off against the need for such groups.
1330 But don't count them against ordinary need, yet. */
1331
1332 count_possible_groups (group_size, group_mode, max_groups);
1333
1334 /* Now count all spill regs against the individual need,
a8fdc208 1335 This includes those counted above for groups,
32131a9c
RK
1336 but not those previously counted for nongroups.
1337
1338 Those that weren't counted_for_groups can also count against
1339 the not-in-group need. */
1340
1341 for (i = 0; i < n_spills; i++)
1342 {
1343 register enum reg_class *p;
1344 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1345
1346 /* Those counted at the beginning shouldn't be counted twice. */
1347 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
1348 {
1349 max_needs[class]--;
1350 p = reg_class_superclasses[class];
1351 while (*p != LIM_REG_CLASSES)
1352 max_needs[(int) *p++]--;
1353
1354 if (! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i]))
1355 {
1356 if (max_nongroups[class] > 0)
1357 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1358 max_nongroups[class]--;
1359 p = reg_class_superclasses[class];
1360 while (*p != LIM_REG_CLASSES)
1361 {
1362 if (max_nongroups[(int) *p] > 0)
1363 SET_HARD_REG_BIT (counted_for_nongroups,
1364 spill_regs[i]);
1365 max_nongroups[(int) *p++]--;
1366 }
1367 }
1368 }
1369 }
1370
5c23c401
RK
1371 /* See if anything that happened changes which eliminations are valid.
1372 For example, on the Sparc, whether or not the frame pointer can
1373 be eliminated can depend on what registers have been used. We need
1374 not check some conditions again (such as flag_omit_frame_pointer)
1375 since they can't have changed. */
1376
1377 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1378 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1379#ifdef ELIMINABLE_REGS
1380 || ! CAN_ELIMINATE (ep->from, ep->to)
1381#endif
1382 )
1383 ep->can_eliminate = 0;
1384
32131a9c
RK
1385 /* Look for the case where we have discovered that we can't replace
1386 register A with register B and that means that we will now be
1387 trying to replace register A with register C. This means we can
1388 no longer replace register C with register B and we need to disable
1389 such an elimination, if it exists. This occurs often with A == ap,
1390 B == sp, and C == fp. */
a8fdc208 1391
32131a9c
RK
1392 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1393 {
1394 struct elim_table *op;
1395 register int new_to = -1;
1396
1397 if (! ep->can_eliminate && ep->can_eliminate_previous)
1398 {
1399 /* Find the current elimination for ep->from, if there is a
1400 new one. */
1401 for (op = reg_eliminate;
1402 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1403 if (op->from == ep->from && op->can_eliminate)
1404 {
1405 new_to = op->to;
1406 break;
1407 }
1408
1409 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1410 disable it. */
1411 for (op = reg_eliminate;
1412 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1413 if (op->from == new_to && op->to == ep->to)
1414 op->can_eliminate = 0;
1415 }
1416 }
1417
1418 /* See if any registers that we thought we could eliminate the previous
1419 time are no longer eliminable. If so, something has changed and we
1420 must spill the register. Also, recompute the number of eliminable
1421 registers and see if the frame pointer is needed; it is if there is
1422 no elimination of the frame pointer that we can perform. */
1423
1424 frame_pointer_needed = 1;
1425 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1426 {
1427 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1428 frame_pointer_needed = 0;
1429
1430 if (! ep->can_eliminate && ep->can_eliminate_previous)
1431 {
1432 ep->can_eliminate_previous = 0;
1433 spill_hard_reg (ep->from, global, dumpfile, 1);
1434 regs_ever_live[ep->from] = 1;
1435 something_changed = 1;
1436 num_eliminable--;
1437 }
1438 }
1439
1440 /* If all needs are met, we win. */
1441
1442 for (i = 0; i < N_REG_CLASSES; i++)
1443 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1444 break;
1445 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1446 break;
1447
1448 /* Not all needs are met; must spill more hard regs. */
1449
1450 /* If any element of basic_block_needs changed from 0 to 1,
1451 re-spill all the regs already spilled. This may spill
1452 additional pseudos that didn't spill before. */
1453
1454 if (new_basic_block_needs)
1455 for (i = 0; i < n_spills; i++)
1456 something_changed
1457 |= spill_hard_reg (spill_regs[i], global, dumpfile, 0);
1458
1459 /* Now find more reload regs to satisfy the remaining need
1460 Do it by ascending class number, since otherwise a reg
1461 might be spilled for a big class and might fail to count
1462 for a smaller class even though it belongs to that class.
1463
1464 Count spilled regs in `spills', and add entries to
1465 `spill_regs' and `spill_reg_order'.
1466
1467 ??? Note there is a problem here.
1468 When there is a need for a group in a high-numbered class,
1469 and also need for non-group regs that come from a lower class,
1470 the non-group regs are chosen first. If there aren't many regs,
1471 they might leave no room for a group.
1472
1473 This was happening on the 386. To fix it, we added the code
1474 that calls possible_group_p, so that the lower class won't
1475 break up the last possible group.
1476
1477 Really fixing the problem would require changes above
1478 in counting the regs already spilled, and in choose_reload_regs.
1479 It might be hard to avoid introducing bugs there. */
1480
1481 for (class = 0; class < N_REG_CLASSES; class++)
1482 {
1483 /* First get the groups of registers.
1484 If we got single registers first, we might fragment
1485 possible groups. */
1486 while (max_groups[class] > 0)
1487 {
1488 /* If any single spilled regs happen to form groups,
1489 count them now. Maybe we don't really need
1490 to spill another group. */
1491 count_possible_groups (group_size, group_mode, max_groups);
1492
1493 /* Groups of size 2 (the only groups used on most machines)
1494 are treated specially. */
1495 if (group_size[class] == 2)
1496 {
1497 /* First, look for a register that will complete a group. */
1498 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1499 {
1500 int j = potential_reload_regs[i];
1501 int other;
1502 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1503 &&
1504 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1505 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1506 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1507 && HARD_REGNO_MODE_OK (other, group_mode[class])
1508 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1509 other)
1510 /* We don't want one part of another group.
1511 We could get "two groups" that overlap! */
1512 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1513 ||
1514 (j < FIRST_PSEUDO_REGISTER - 1
1515 && (other = j + 1, spill_reg_order[other] >= 0)
1516 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1517 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1518 && HARD_REGNO_MODE_OK (j, group_mode[class])
1519 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1520 other)
1521 && ! TEST_HARD_REG_BIT (counted_for_groups,
1522 other))))
1523 {
1524 register enum reg_class *p;
1525
1526 /* We have found one that will complete a group,
1527 so count off one group as provided. */
1528 max_groups[class]--;
1529 p = reg_class_superclasses[class];
1530 while (*p != LIM_REG_CLASSES)
1531 max_groups[(int) *p++]--;
1532
1533 /* Indicate both these regs are part of a group. */
1534 SET_HARD_REG_BIT (counted_for_groups, j);
1535 SET_HARD_REG_BIT (counted_for_groups, other);
1536 break;
1537 }
1538 }
1539 /* We can't complete a group, so start one. */
1540 if (i == FIRST_PSEUDO_REGISTER)
1541 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1542 {
1543 int j = potential_reload_regs[i];
1544 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1545 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1546 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1547 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1548 && HARD_REGNO_MODE_OK (j, group_mode[class])
1549 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1550 j + 1))
1551 break;
1552 }
1553
1554 /* I should be the index in potential_reload_regs
1555 of the new reload reg we have found. */
1556
1557 something_changed
1558 |= new_spill_reg (i, class, max_needs, 0,
1559 global, dumpfile);
1560 }
1561 else
1562 {
1563 /* For groups of more than 2 registers,
1564 look for a sufficient sequence of unspilled registers,
1565 and spill them all at once. */
1566 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1567 {
1568 int j = potential_reload_regs[i];
1569 int k;
1570 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1571 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1572 {
1573 /* Check each reg in the sequence. */
1574 for (k = 0; k < group_size[class]; k++)
1575 if (! (spill_reg_order[j + k] < 0
1576 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1577 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1578 break;
1579 /* We got a full sequence, so spill them all. */
1580 if (k == group_size[class])
1581 {
1582 register enum reg_class *p;
1583 for (k = 0; k < group_size[class]; k++)
1584 {
1585 int idx;
1586 SET_HARD_REG_BIT (counted_for_groups, j + k);
1587 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1588 if (potential_reload_regs[idx] == j + k)
1589 break;
1590 something_changed
1591 |= new_spill_reg (idx, class, max_needs, 0,
1592 global, dumpfile);
1593 }
1594
1595 /* We have found one that will complete a group,
1596 so count off one group as provided. */
1597 max_groups[class]--;
1598 p = reg_class_superclasses[class];
1599 while (*p != LIM_REG_CLASSES)
1600 max_groups[(int) *p++]--;
1601
1602 break;
1603 }
1604 }
1605 }
1606 }
1607 }
1608
1609 /* Now similarly satisfy all need for single registers. */
1610
1611 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1612 {
1613 /* Consider the potential reload regs that aren't
1614 yet in use as reload regs, in order of preference.
1615 Find the most preferred one that's in this class. */
1616
1617 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1618 if (potential_reload_regs[i] >= 0
1619 && TEST_HARD_REG_BIT (reg_class_contents[class],
1620 potential_reload_regs[i])
1621 /* If this reg will not be available for groups,
1622 pick one that does not foreclose possible groups.
1623 This is a kludge, and not very general,
1624 but it should be sufficient to make the 386 work,
1625 and the problem should not occur on machines with
1626 more registers. */
1627 && (max_nongroups[class] == 0
1628 || possible_group_p (potential_reload_regs[i], max_groups)))
1629 break;
1630
1631 /* I should be the index in potential_reload_regs
1632 of the new reload reg we have found. */
1633
1634 something_changed
1635 |= new_spill_reg (i, class, max_needs, max_nongroups,
1636 global, dumpfile);
1637 }
1638 }
1639 }
1640
1641 /* If global-alloc was run, notify it of any register eliminations we have
1642 done. */
1643 if (global)
1644 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1645 if (ep->can_eliminate)
1646 mark_elimination (ep->from, ep->to);
1647
1648 /* From now on, we need to emit any moves without making new pseudos. */
1649 reload_in_progress = 1;
1650
1651 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1652 around calls. Tell if what mode to use so that we will process
1653 those insns in reload_as_needed if we have to. */
32131a9c
RK
1654
1655 if (caller_save_needed)
a8efe40d
RK
1656 save_call_clobbered_regs (num_eliminable ? QImode
1657 : caller_save_spill_class != NO_REGS ? HImode
1658 : VOIDmode);
32131a9c
RK
1659
1660 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1661 If that insn didn't set the register (i.e., it copied the register to
1662 memory), just delete that insn instead of the equivalencing insn plus
1663 anything now dead. If we call delete_dead_insn on that insn, we may
1664 delete the insn that actually sets the register if the register die
1665 there and that is incorrect. */
1666
1667 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1668 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1669 && GET_CODE (reg_equiv_init[i]) != NOTE)
1670 {
1671 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1672 delete_dead_insn (reg_equiv_init[i]);
1673 else
1674 {
1675 PUT_CODE (reg_equiv_init[i], NOTE);
1676 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1677 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1678 }
1679 }
1680
1681 /* Use the reload registers where necessary
1682 by generating move instructions to move the must-be-register
1683 values into or out of the reload registers. */
1684
a8efe40d
RK
1685 if (something_needs_reloads || something_needs_elimination
1686 || (caller_save_needed && num_eliminable)
1687 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1688 reload_as_needed (first, global);
1689
1690 reload_in_progress = 0;
1691
1692 /* Now eliminate all pseudo regs by modifying them into
1693 their equivalent memory references.
1694 The REG-rtx's for the pseudos are modified in place,
1695 so all insns that used to refer to them now refer to memory.
1696
1697 For a reg that has a reg_equiv_address, all those insns
1698 were changed by reloading so that no insns refer to it any longer;
1699 but the DECL_RTL of a variable decl may refer to it,
1700 and if so this causes the debugging info to mention the variable. */
1701
1702 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1703 {
1704 rtx addr = 0;
ab1fd483 1705 int in_struct = 0;
32131a9c 1706 if (reg_equiv_mem[i])
ab1fd483
RS
1707 {
1708 addr = XEXP (reg_equiv_mem[i], 0);
1709 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1710 }
32131a9c
RK
1711 if (reg_equiv_address[i])
1712 addr = reg_equiv_address[i];
1713 if (addr)
1714 {
1715 if (reg_renumber[i] < 0)
1716 {
1717 rtx reg = regno_reg_rtx[i];
1718 XEXP (reg, 0) = addr;
1719 REG_USERVAR_P (reg) = 0;
ab1fd483 1720 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1721 PUT_CODE (reg, MEM);
1722 }
1723 else if (reg_equiv_mem[i])
1724 XEXP (reg_equiv_mem[i], 0) = addr;
1725 }
1726 }
1727
1728#ifdef PRESERVE_DEATH_INFO_REGNO_P
1729 /* Make a pass over all the insns and remove death notes for things that
1730 are no longer registers or no longer die in the insn (e.g., an input
1731 and output pseudo being tied). */
1732
1733 for (insn = first; insn; insn = NEXT_INSN (insn))
1734 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1735 {
1736 rtx note, next;
1737
1738 for (note = REG_NOTES (insn); note; note = next)
1739 {
1740 next = XEXP (note, 1);
1741 if (REG_NOTE_KIND (note) == REG_DEAD
1742 && (GET_CODE (XEXP (note, 0)) != REG
1743 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1744 remove_note (insn, note);
1745 }
1746 }
1747#endif
1748
1749 /* Indicate that we no longer have known memory locations or constants. */
1750 reg_equiv_constant = 0;
1751 reg_equiv_memory_loc = 0;
1752}
1753\f
1754/* Nonzero if, after spilling reg REGNO for non-groups,
1755 it will still be possible to find a group if we still need one. */
1756
1757static int
1758possible_group_p (regno, max_groups)
1759 int regno;
1760 int *max_groups;
1761{
1762 int i;
1763 int class = (int) NO_REGS;
1764
1765 for (i = 0; i < (int) N_REG_CLASSES; i++)
1766 if (max_groups[i] > 0)
1767 {
1768 class = i;
1769 break;
1770 }
1771
1772 if (class == (int) NO_REGS)
1773 return 1;
1774
1775 /* Consider each pair of consecutive registers. */
1776 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
1777 {
1778 /* Ignore pairs that include reg REGNO. */
1779 if (i == regno || i + 1 == regno)
1780 continue;
1781
1782 /* Ignore pairs that are outside the class that needs the group.
1783 ??? Here we fail to handle the case where two different classes
1784 independently need groups. But this never happens with our
1785 current machine descriptions. */
1786 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
1787 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
1788 continue;
1789
1790 /* A pair of consecutive regs we can still spill does the trick. */
1791 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
1792 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1793 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
1794 return 1;
1795
1796 /* A pair of one already spilled and one we can spill does it
1797 provided the one already spilled is not otherwise reserved. */
1798 if (spill_reg_order[i] < 0
1799 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1800 && spill_reg_order[i + 1] >= 0
1801 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
1802 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
1803 return 1;
1804 if (spill_reg_order[i + 1] < 0
1805 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
1806 && spill_reg_order[i] >= 0
1807 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
1808 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
1809 return 1;
1810 }
1811
1812 return 0;
1813}
1814\f
1815/* Count any groups that can be formed from the registers recently spilled.
1816 This is done class by class, in order of ascending class number. */
1817
1818static void
1819count_possible_groups (group_size, group_mode, max_groups)
1820 int *group_size, *max_groups;
1821 enum machine_mode *group_mode;
1822{
1823 int i;
1824 /* Now find all consecutive groups of spilled registers
1825 and mark each group off against the need for such groups.
1826 But don't count them against ordinary need, yet. */
1827
1828 for (i = 0; i < N_REG_CLASSES; i++)
1829 if (group_size[i] > 1)
1830 {
1831 char regmask[FIRST_PSEUDO_REGISTER];
1832 int j;
1833
1834 bzero (regmask, sizeof regmask);
1835 /* Make a mask of all the regs that are spill regs in class I. */
1836 for (j = 0; j < n_spills; j++)
1837 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
1838 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
1839 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1840 spill_regs[j]))
1841 regmask[spill_regs[j]] = 1;
1842 /* Find each consecutive group of them. */
1843 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
1844 if (regmask[j] && j + group_size[i] <= FIRST_PSEUDO_REGISTER
1845 /* Next line in case group-mode for this class
1846 demands an even-odd pair. */
1847 && HARD_REGNO_MODE_OK (j, group_mode[i]))
1848 {
1849 int k;
1850 for (k = 1; k < group_size[i]; k++)
1851 if (! regmask[j + k])
1852 break;
1853 if (k == group_size[i])
1854 {
1855 /* We found a group. Mark it off against this class's
1856 need for groups, and against each superclass too. */
1857 register enum reg_class *p;
1858 max_groups[i]--;
1859 p = reg_class_superclasses[i];
1860 while (*p != LIM_REG_CLASSES)
1861 max_groups[(int) *p++]--;
a8fdc208 1862 /* Don't count these registers again. */
32131a9c
RK
1863 for (k = 0; k < group_size[i]; k++)
1864 SET_HARD_REG_BIT (counted_for_groups, j + k);
1865 }
1866 j += k;
1867 }
1868 }
1869
1870}
1871\f
1872/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
1873 another mode that needs to be reloaded for the same register class CLASS.
1874 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
1875 ALLOCATE_MODE will never be smaller than OTHER_MODE.
1876
1877 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
1878 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
1879 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
1880 causes unnecessary failures on machines requiring alignment of register
1881 groups when the two modes are different sizes, because the larger mode has
1882 more strict alignment rules than the smaller mode. */
1883
1884static int
1885modes_equiv_for_class_p (allocate_mode, other_mode, class)
1886 enum machine_mode allocate_mode, other_mode;
1887 enum reg_class class;
1888{
1889 register int regno;
1890 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1891 {
1892 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
1893 && HARD_REGNO_MODE_OK (regno, allocate_mode)
1894 && ! HARD_REGNO_MODE_OK (regno, other_mode))
1895 return 0;
1896 }
1897 return 1;
1898}
1899
1900/* Add a new register to the tables of available spill-registers
1901 (as well as spilling all pseudos allocated to the register).
1902 I is the index of this register in potential_reload_regs.
1903 CLASS is the regclass whose need is being satisfied.
1904 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
1905 so that this register can count off against them.
1906 MAX_NONGROUPS is 0 if this register is part of a group.
1907 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
1908
1909static int
1910new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
1911 int i;
1912 int class;
1913 int *max_needs;
1914 int *max_nongroups;
1915 int global;
1916 FILE *dumpfile;
1917{
1918 register enum reg_class *p;
1919 int val;
1920 int regno = potential_reload_regs[i];
1921
1922 if (i >= FIRST_PSEUDO_REGISTER)
1923 abort (); /* Caller failed to find any register. */
1924
1925 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
1926 fatal ("fixed or forbidden register was spilled.\n\
1927This may be due to a compiler bug or to impossible asm statements.");
1928
1929 /* Make reg REGNO an additional reload reg. */
1930
1931 potential_reload_regs[i] = -1;
1932 spill_regs[n_spills] = regno;
1933 spill_reg_order[regno] = n_spills;
1934 if (dumpfile)
1935 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
1936
1937 /* Clear off the needs we just satisfied. */
1938
1939 max_needs[class]--;
1940 p = reg_class_superclasses[class];
1941 while (*p != LIM_REG_CLASSES)
1942 max_needs[(int) *p++]--;
1943
1944 if (max_nongroups && max_nongroups[class] > 0)
1945 {
1946 SET_HARD_REG_BIT (counted_for_nongroups, regno);
1947 max_nongroups[class]--;
1948 p = reg_class_superclasses[class];
1949 while (*p != LIM_REG_CLASSES)
1950 max_nongroups[(int) *p++]--;
1951 }
1952
1953 /* Spill every pseudo reg that was allocated to this reg
1954 or to something that overlaps this reg. */
1955
1956 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
1957
1958 /* If there are some registers still to eliminate and this register
1959 wasn't ever used before, additional stack space may have to be
1960 allocated to store this register. Thus, we may have changed the offset
1961 between the stack and frame pointers, so mark that something has changed.
1962 (If new pseudos were spilled, thus requiring more space, VAL would have
1963 been set non-zero by the call to spill_hard_reg above since additional
1964 reloads may be needed in that case.
1965
1966 One might think that we need only set VAL to 1 if this is a call-used
1967 register. However, the set of registers that must be saved by the
1968 prologue is not identical to the call-used set. For example, the
1969 register used by the call insn for the return PC is a call-used register,
1970 but must be saved by the prologue. */
1971 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
1972 val = 1;
1973
1974 regs_ever_live[spill_regs[n_spills]] = 1;
1975 n_spills++;
1976
1977 return val;
1978}
1979\f
1980/* Delete an unneeded INSN and any previous insns who sole purpose is loading
1981 data that is dead in INSN. */
1982
1983static void
1984delete_dead_insn (insn)
1985 rtx insn;
1986{
1987 rtx prev = prev_real_insn (insn);
1988 rtx prev_dest;
1989
1990 /* If the previous insn sets a register that dies in our insn, delete it
1991 too. */
1992 if (prev && GET_CODE (PATTERN (prev)) == SET
1993 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
1994 && reg_mentioned_p (prev_dest, PATTERN (insn))
1995 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
1996 delete_dead_insn (prev);
1997
1998 PUT_CODE (insn, NOTE);
1999 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2000 NOTE_SOURCE_FILE (insn) = 0;
2001}
2002
2003/* Modify the home of pseudo-reg I.
2004 The new home is present in reg_renumber[I].
2005
2006 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2007 or it may be -1, meaning there is none or it is not relevant.
2008 This is used so that all pseudos spilled from a given hard reg
2009 can share one stack slot. */
2010
2011static void
2012alter_reg (i, from_reg)
2013 register int i;
2014 int from_reg;
2015{
2016 /* When outputting an inline function, this can happen
2017 for a reg that isn't actually used. */
2018 if (regno_reg_rtx[i] == 0)
2019 return;
2020
2021 /* If the reg got changed to a MEM at rtl-generation time,
2022 ignore it. */
2023 if (GET_CODE (regno_reg_rtx[i]) != REG)
2024 return;
2025
2026 /* Modify the reg-rtx to contain the new hard reg
2027 number or else to contain its pseudo reg number. */
2028 REGNO (regno_reg_rtx[i])
2029 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2030
2031 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2032 allocate a stack slot for it. */
2033
2034 if (reg_renumber[i] < 0
2035 && reg_n_refs[i] > 0
2036 && reg_equiv_constant[i] == 0
2037 && reg_equiv_memory_loc[i] == 0)
2038 {
2039 register rtx x;
2040 int inherent_size = PSEUDO_REGNO_BYTES (i);
2041 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2042 int adjust = 0;
2043
2044 /* Each pseudo reg has an inherent size which comes from its own mode,
2045 and a total size which provides room for paradoxical subregs
2046 which refer to the pseudo reg in wider modes.
2047
2048 We can use a slot already allocated if it provides both
2049 enough inherent space and enough total space.
2050 Otherwise, we allocate a new slot, making sure that it has no less
2051 inherent space, and no less total space, then the previous slot. */
2052 if (from_reg == -1)
2053 {
2054 /* No known place to spill from => no slot to reuse. */
2055 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2056#if BYTES_BIG_ENDIAN
2057 /* Cancel the big-endian correction done in assign_stack_local.
2058 Get the address of the beginning of the slot.
2059 This is so we can do a big-endian correction unconditionally
2060 below. */
2061 adjust = inherent_size - total_size;
2062#endif
2063 }
2064 /* Reuse a stack slot if possible. */
2065 else if (spill_stack_slot[from_reg] != 0
2066 && spill_stack_slot_width[from_reg] >= total_size
2067 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2068 >= inherent_size))
2069 x = spill_stack_slot[from_reg];
2070 /* Allocate a bigger slot. */
2071 else
2072 {
2073 /* Compute maximum size needed, both for inherent size
2074 and for total size. */
2075 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2076 if (spill_stack_slot[from_reg])
2077 {
2078 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2079 > inherent_size)
2080 mode = GET_MODE (spill_stack_slot[from_reg]);
2081 if (spill_stack_slot_width[from_reg] > total_size)
2082 total_size = spill_stack_slot_width[from_reg];
2083 }
2084 /* Make a slot with that size. */
2085 x = assign_stack_local (mode, total_size, -1);
2086#if BYTES_BIG_ENDIAN
2087 /* Cancel the big-endian correction done in assign_stack_local.
2088 Get the address of the beginning of the slot.
2089 This is so we can do a big-endian correction unconditionally
2090 below. */
2091 adjust = GET_MODE_SIZE (mode) - total_size;
2092#endif
2093 spill_stack_slot[from_reg] = x;
2094 spill_stack_slot_width[from_reg] = total_size;
2095 }
2096
2097#if BYTES_BIG_ENDIAN
2098 /* On a big endian machine, the "address" of the slot
2099 is the address of the low part that fits its inherent mode. */
2100 if (inherent_size < total_size)
2101 adjust += (total_size - inherent_size);
2102#endif /* BYTES_BIG_ENDIAN */
2103
2104 /* If we have any adjustment to make, or if the stack slot is the
2105 wrong mode, make a new stack slot. */
2106 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2107 {
2108 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2109 plus_constant (XEXP (x, 0), adjust));
2110 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2111 }
2112
2113 /* Save the stack slot for later. */
2114 reg_equiv_memory_loc[i] = x;
2115 }
2116}
2117
2118/* Mark the slots in regs_ever_live for the hard regs
2119 used by pseudo-reg number REGNO. */
2120
2121void
2122mark_home_live (regno)
2123 int regno;
2124{
2125 register int i, lim;
2126 i = reg_renumber[regno];
2127 if (i < 0)
2128 return;
2129 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2130 while (i < lim)
2131 regs_ever_live[i++] = 1;
2132}
2133\f
2134/* This function handles the tracking of elimination offsets around branches.
2135
2136 X is a piece of RTL being scanned.
2137
2138 INSN is the insn that it came from, if any.
2139
2140 INITIAL_P is non-zero if we are to set the offset to be the initial
2141 offset and zero if we are setting the offset of the label to be the
2142 current offset. */
2143
2144static void
2145set_label_offsets (x, insn, initial_p)
2146 rtx x;
2147 rtx insn;
2148 int initial_p;
2149{
2150 enum rtx_code code = GET_CODE (x);
2151 rtx tem;
2152 int i;
2153 struct elim_table *p;
2154
2155 switch (code)
2156 {
2157 case LABEL_REF:
2158 x = XEXP (x, 0);
2159
2160 /* ... fall through ... */
2161
2162 case CODE_LABEL:
2163 /* If we know nothing about this label, set the desired offsets. Note
2164 that this sets the offset at a label to be the offset before a label
2165 if we don't know anything about the label. This is not correct for
2166 the label after a BARRIER, but is the best guess we can make. If
2167 we guessed wrong, we will suppress an elimination that might have
2168 been possible had we been able to guess correctly. */
2169
2170 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2171 {
2172 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2173 offsets_at[CODE_LABEL_NUMBER (x)][i]
2174 = (initial_p ? reg_eliminate[i].initial_offset
2175 : reg_eliminate[i].offset);
2176 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2177 }
2178
2179 /* Otherwise, if this is the definition of a label and it is
d45cf215 2180 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2181 that label. */
2182
2183 else if (x == insn
2184 && (tem = prev_nonnote_insn (insn)) != 0
2185 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2186 {
2187 num_not_at_initial_offset = 0;
2188 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2189 {
2190 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2191 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2192 if (reg_eliminate[i].can_eliminate
2193 && (reg_eliminate[i].offset
2194 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2195 num_not_at_initial_offset++;
2196 }
2197 }
32131a9c
RK
2198
2199 else
2200 /* If neither of the above cases is true, compare each offset
2201 with those previously recorded and suppress any eliminations
2202 where the offsets disagree. */
a8fdc208 2203
32131a9c
RK
2204 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2205 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2206 != (initial_p ? reg_eliminate[i].initial_offset
2207 : reg_eliminate[i].offset))
2208 reg_eliminate[i].can_eliminate = 0;
2209
2210 return;
2211
2212 case JUMP_INSN:
2213 set_label_offsets (PATTERN (insn), insn, initial_p);
2214
2215 /* ... fall through ... */
2216
2217 case INSN:
2218 case CALL_INSN:
2219 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2220 and hence must have all eliminations at their initial offsets. */
2221 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2222 if (REG_NOTE_KIND (tem) == REG_LABEL)
2223 set_label_offsets (XEXP (tem, 0), insn, 1);
2224 return;
2225
2226 case ADDR_VEC:
2227 case ADDR_DIFF_VEC:
2228 /* Each of the labels in the address vector must be at their initial
2229 offsets. We want the first first for ADDR_VEC and the second
2230 field for ADDR_DIFF_VEC. */
2231
2232 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2233 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2234 insn, initial_p);
2235 return;
2236
2237 case SET:
2238 /* We only care about setting PC. If the source is not RETURN,
2239 IF_THEN_ELSE, or a label, disable any eliminations not at
2240 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2241 isn't one of those possibilities. For branches to a label,
2242 call ourselves recursively.
2243
2244 Note that this can disable elimination unnecessarily when we have
2245 a non-local goto since it will look like a non-constant jump to
2246 someplace in the current function. This isn't a significant
2247 problem since such jumps will normally be when all elimination
2248 pairs are back to their initial offsets. */
2249
2250 if (SET_DEST (x) != pc_rtx)
2251 return;
2252
2253 switch (GET_CODE (SET_SRC (x)))
2254 {
2255 case PC:
2256 case RETURN:
2257 return;
2258
2259 case LABEL_REF:
2260 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2261 return;
2262
2263 case IF_THEN_ELSE:
2264 tem = XEXP (SET_SRC (x), 1);
2265 if (GET_CODE (tem) == LABEL_REF)
2266 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2267 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2268 break;
2269
2270 tem = XEXP (SET_SRC (x), 2);
2271 if (GET_CODE (tem) == LABEL_REF)
2272 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2273 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2274 break;
2275 return;
2276 }
2277
2278 /* If we reach here, all eliminations must be at their initial
2279 offset because we are doing a jump to a variable address. */
2280 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2281 if (p->offset != p->initial_offset)
2282 p->can_eliminate = 0;
2283 }
2284}
2285\f
2286/* Used for communication between the next two function to properly share
2287 the vector for an ASM_OPERANDS. */
2288
2289static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2290
a8fdc208 2291/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2292 replacement (such as sp), plus an offset.
2293
2294 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2295 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2296 MEM, we are allowed to replace a sum of a register and the constant zero
2297 with the register, which we cannot do outside a MEM. In addition, we need
2298 to record the fact that a register is referenced outside a MEM.
2299
2300 If INSN is nonzero, it is the insn containing X. If we replace a REG
2301 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2302 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2303 that the REG is being modified.
2304
2305 If we see a modification to a register we know about, take the
2306 appropriate action (see case SET, below).
2307
2308 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2309 replacements done assuming all offsets are at their initial values. If
2310 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2311 encounter, return the actual location so that find_reloads will do
2312 the proper thing. */
2313
2314rtx
2315eliminate_regs (x, mem_mode, insn)
2316 rtx x;
2317 enum machine_mode mem_mode;
2318 rtx insn;
2319{
2320 enum rtx_code code = GET_CODE (x);
2321 struct elim_table *ep;
2322 int regno;
2323 rtx new;
2324 int i, j;
2325 char *fmt;
2326 int copied = 0;
2327
2328 switch (code)
2329 {
2330 case CONST_INT:
2331 case CONST_DOUBLE:
2332 case CONST:
2333 case SYMBOL_REF:
2334 case CODE_LABEL:
2335 case PC:
2336 case CC0:
2337 case ASM_INPUT:
2338 case ADDR_VEC:
2339 case ADDR_DIFF_VEC:
2340 case RETURN:
2341 return x;
2342
2343 case REG:
2344 regno = REGNO (x);
2345
2346 /* First handle the case where we encounter a bare register that
2347 is eliminable. Replace it with a PLUS. */
2348 if (regno < FIRST_PSEUDO_REGISTER)
2349 {
2350 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2351 ep++)
2352 if (ep->from_rtx == x && ep->can_eliminate)
2353 {
2354 if (! mem_mode)
2355 ep->ref_outside_mem = 1;
2356 return plus_constant (ep->to_rtx, ep->previous_offset);
2357 }
2358
2359 }
2360 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2361 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2362 {
2363 /* In this case, find_reloads would attempt to either use an
2364 incorrect address (if something is not at its initial offset)
2365 or substitute an replaced address into an insn (which loses
2366 if the offset is changed by some later action). So we simply
2367 return the replaced stack slot (assuming it is changed by
2368 elimination) and ignore the fact that this is actually a
2369 reference to the pseudo. Ensure we make a copy of the
2370 address in case it is shared. */
2371 new = eliminate_regs (reg_equiv_memory_loc[regno], mem_mode, 0);
2372 if (new != reg_equiv_memory_loc[regno])
2373 return copy_rtx (new);
2374 }
2375 return x;
2376
2377 case PLUS:
2378 /* If this is the sum of an eliminable register and a constant, rework
2379 the sum. */
2380 if (GET_CODE (XEXP (x, 0)) == REG
2381 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2382 && CONSTANT_P (XEXP (x, 1)))
2383 {
2384 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2385 ep++)
2386 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2387 {
2388 if (! mem_mode)
2389 ep->ref_outside_mem = 1;
2390
2391 /* The only time we want to replace a PLUS with a REG (this
2392 occurs when the constant operand of the PLUS is the negative
2393 of the offset) is when we are inside a MEM. We won't want
2394 to do so at other times because that would change the
2395 structure of the insn in a way that reload can't handle.
2396 We special-case the commonest situation in
2397 eliminate_regs_in_insn, so just replace a PLUS with a
2398 PLUS here, unless inside a MEM. */
2399 if (mem_mode && GET_CODE (XEXP (x, 1)) == CONST_INT
2400 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2401 return ep->to_rtx;
2402 else
2403 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2404 plus_constant (XEXP (x, 1),
2405 ep->previous_offset));
2406 }
2407
2408 /* If the register is not eliminable, we are done since the other
2409 operand is a constant. */
2410 return x;
2411 }
2412
2413 /* If this is part of an address, we want to bring any constant to the
2414 outermost PLUS. We will do this by doing register replacement in
2415 our operands and seeing if a constant shows up in one of them.
2416
2417 We assume here this is part of an address (or a "load address" insn)
2418 since an eliminable register is not likely to appear in any other
2419 context.
2420
2421 If we have (plus (eliminable) (reg)), we want to produce
2422 (plus (plus (replacement) (reg) (const))). If this was part of a
2423 normal add insn, (plus (replacement) (reg)) will be pushed as a
2424 reload. This is the desired action. */
2425
2426 {
2427 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2428 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, 0);
2429
2430 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2431 {
2432 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2433 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2434 we must replace the constant here since it may no longer
2435 be in the position of any operand. */
2436 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2437 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2438 && reg_renumber[REGNO (new1)] < 0
2439 && reg_equiv_constant != 0
2440 && reg_equiv_constant[REGNO (new1)] != 0)
2441 new1 = reg_equiv_constant[REGNO (new1)];
2442 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2443 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2444 && reg_renumber[REGNO (new0)] < 0
2445 && reg_equiv_constant[REGNO (new0)] != 0)
2446 new0 = reg_equiv_constant[REGNO (new0)];
2447
2448 new = form_sum (new0, new1);
2449
2450 /* As above, if we are not inside a MEM we do not want to
2451 turn a PLUS into something else. We might try to do so here
2452 for an addition of 0 if we aren't optimizing. */
2453 if (! mem_mode && GET_CODE (new) != PLUS)
2454 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2455 else
2456 return new;
2457 }
2458 }
2459 return x;
2460
2461 case EXPR_LIST:
2462 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2463 if (XEXP (x, 0))
2464 {
2465 new = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2466 if (new != XEXP (x, 0))
2467 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2468 }
2469
2470 /* ... fall through ... */
2471
2472 case INSN_LIST:
2473 /* Now do eliminations in the rest of the chain. If this was
2474 an EXPR_LIST, this might result in allocating more memory than is
2475 strictly needed, but it simplifies the code. */
2476 if (XEXP (x, 1))
2477 {
2478 new = eliminate_regs (XEXP (x, 1), mem_mode, 0);
2479 if (new != XEXP (x, 1))
2480 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2481 }
2482 return x;
2483
2484 case CALL:
2485 case COMPARE:
2486 case MINUS:
2487 case MULT:
2488 case DIV: case UDIV:
2489 case MOD: case UMOD:
2490 case AND: case IOR: case XOR:
2491 case LSHIFT: case ASHIFT: case ROTATE:
2492 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2493 case NE: case EQ:
2494 case GE: case GT: case GEU: case GTU:
2495 case LE: case LT: case LEU: case LTU:
2496 {
2497 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2498 rtx new1 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, 0) : 0;
2499
2500 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2501 return gen_rtx (code, GET_MODE (x), new0, new1);
2502 }
2503 return x;
2504
2505 case PRE_INC:
2506 case POST_INC:
2507 case PRE_DEC:
2508 case POST_DEC:
2509 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2510 if (ep->to_rtx == XEXP (x, 0))
2511 {
2512 if (code == PRE_DEC || code == POST_DEC)
2513 ep->offset += GET_MODE_SIZE (mem_mode);
2514 else
2515 ep->offset -= GET_MODE_SIZE (mem_mode);
2516 }
2517
2518 /* Fall through to generic unary operation case. */
2519 case USE:
2520 case STRICT_LOW_PART:
2521 case NEG: case NOT:
2522 case SIGN_EXTEND: case ZERO_EXTEND:
2523 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2524 case FLOAT: case FIX:
2525 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2526 case ABS:
2527 case SQRT:
2528 case FFS:
2529 new = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2530 if (new != XEXP (x, 0))
2531 return gen_rtx (code, GET_MODE (x), new);
2532 return x;
2533
2534 case SUBREG:
2535 /* Similar to above processing, but preserve SUBREG_WORD.
2536 Convert (subreg (mem)) to (mem) if not paradoxical.
2537 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2538 pseudo didn't get a hard reg, we must replace this with the
2539 eliminated version of the memory location because push_reloads
2540 may do the replacement in certain circumstances. */
2541 if (GET_CODE (SUBREG_REG (x)) == REG
2542 && (GET_MODE_SIZE (GET_MODE (x))
2543 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2544 && reg_equiv_memory_loc != 0
2545 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2546 {
2547 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2548 mem_mode, 0);
2549
2550 /* If we didn't change anything, we must retain the pseudo. */
2551 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2552 new = XEXP (x, 0);
2553 else
2554 /* Otherwise, ensure NEW isn't shared in case we have to reload
2555 it. */
2556 new = copy_rtx (new);
2557 }
2558 else
2559 new = eliminate_regs (SUBREG_REG (x), mem_mode, 0);
2560
2561 if (new != XEXP (x, 0))
2562 {
2563 if (GET_CODE (new) == MEM
2564 && (GET_MODE_SIZE (GET_MODE (x))
2565 <= GET_MODE_SIZE (GET_MODE (new))))
2566 {
2567 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2568 enum machine_mode mode = GET_MODE (x);
2569
2570#if BYTES_BIG_ENDIAN
2571 offset += (MIN (UNITS_PER_WORD,
2572 GET_MODE_SIZE (GET_MODE (new)))
2573 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2574#endif
2575
2576 PUT_MODE (new, mode);
2577 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2578 return new;
2579 }
2580 else
2581 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2582 }
2583
2584 return x;
2585
2586 case CLOBBER:
2587 /* If clobbering a register that is the replacement register for an
d45cf215 2588 elimination we still think can be performed, note that it cannot
32131a9c
RK
2589 be performed. Otherwise, we need not be concerned about it. */
2590 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2591 if (ep->to_rtx == XEXP (x, 0))
2592 ep->can_eliminate = 0;
2593
2594 return x;
2595
2596 case ASM_OPERANDS:
2597 {
2598 rtx *temp_vec;
2599 /* Properly handle sharing input and constraint vectors. */
2600 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2601 {
2602 /* When we come to a new vector not seen before,
2603 scan all its elements; keep the old vector if none
2604 of them changes; otherwise, make a copy. */
2605 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2606 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2607 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2608 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2609 mem_mode, 0);
2610
2611 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2612 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2613 break;
2614
2615 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2616 new_asm_operands_vec = old_asm_operands_vec;
2617 else
2618 new_asm_operands_vec
2619 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2620 }
2621
2622 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2623 if (new_asm_operands_vec == old_asm_operands_vec)
2624 return x;
2625
2626 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2627 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2628 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2629 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2630 ASM_OPERANDS_SOURCE_FILE (x),
2631 ASM_OPERANDS_SOURCE_LINE (x));
2632 new->volatil = x->volatil;
2633 return new;
2634 }
2635
2636 case SET:
2637 /* Check for setting a register that we know about. */
2638 if (GET_CODE (SET_DEST (x)) == REG)
2639 {
2640 /* See if this is setting the replacement register for an
a8fdc208 2641 elimination.
32131a9c
RK
2642
2643 If DEST is the frame pointer, we do nothing because we assume that
2644 all assignments to the frame pointer are for non-local gotos and
2645 are being done at a time when they are valid and do not disturb
2646 anything else. Some machines want to eliminate a fake argument
2647 pointer with either the frame or stack pointer. Assignments to
2648 the frame pointer must not prevent this elimination. */
2649
2650 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2651 ep++)
2652 if (ep->to_rtx == SET_DEST (x)
2653 && SET_DEST (x) != frame_pointer_rtx)
2654 {
6dc42e49 2655 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2656 this elimination can't be done. */
2657 rtx src = SET_SRC (x);
2658
2659 if (GET_CODE (src) == PLUS
2660 && XEXP (src, 0) == SET_DEST (x)
2661 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2662 ep->offset -= INTVAL (XEXP (src, 1));
2663 else
2664 ep->can_eliminate = 0;
2665 }
2666
2667 /* Now check to see we are assigning to a register that can be
2668 eliminated. If so, it must be as part of a PARALLEL, since we
2669 will not have been called if this is a single SET. So indicate
2670 that we can no longer eliminate this reg. */
2671 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2672 ep++)
2673 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2674 ep->can_eliminate = 0;
2675 }
2676
2677 /* Now avoid the loop below in this common case. */
2678 {
2679 rtx new0 = eliminate_regs (SET_DEST (x), 0, 0);
2680 rtx new1 = eliminate_regs (SET_SRC (x), 0, 0);
2681
2682 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2683 write a CLOBBER insn. */
2684 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
2685 && insn != 0)
2686 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
2687
2688 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
2689 return gen_rtx (SET, VOIDmode, new0, new1);
2690 }
2691
2692 return x;
2693
2694 case MEM:
2695 /* Our only special processing is to pass the mode of the MEM to our
2696 recursive call and copy the flags. While we are here, handle this
2697 case more efficiently. */
2698 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), 0);
2699 if (new != XEXP (x, 0))
2700 {
2701 new = gen_rtx (MEM, GET_MODE (x), new);
2702 new->volatil = x->volatil;
2703 new->unchanging = x->unchanging;
2704 new->in_struct = x->in_struct;
2705 return new;
2706 }
2707 else
2708 return x;
2709 }
2710
2711 /* Process each of our operands recursively. If any have changed, make a
2712 copy of the rtx. */
2713 fmt = GET_RTX_FORMAT (code);
2714 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2715 {
2716 if (*fmt == 'e')
2717 {
2718 new = eliminate_regs (XEXP (x, i), mem_mode, 0);
2719 if (new != XEXP (x, i) && ! copied)
2720 {
2721 rtx new_x = rtx_alloc (code);
2722 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2723 + (sizeof (new_x->fld[0])
2724 * GET_RTX_LENGTH (code))));
2725 x = new_x;
2726 copied = 1;
2727 }
2728 XEXP (x, i) = new;
2729 }
2730 else if (*fmt == 'E')
2731 {
2732 int copied_vec = 0;
2733 for (j = 0; j < XVECLEN (x, i); j++)
2734 {
2735 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2736 if (new != XVECEXP (x, i, j) && ! copied_vec)
2737 {
2738 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2739 &XVECEXP (x, i, 0));
2740 if (! copied)
2741 {
2742 rtx new_x = rtx_alloc (code);
2743 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2744 + (sizeof (new_x->fld[0])
2745 * GET_RTX_LENGTH (code))));
2746 x = new_x;
2747 copied = 1;
2748 }
2749 XVEC (x, i) = new_v;
2750 copied_vec = 1;
2751 }
2752 XVECEXP (x, i, j) = new;
2753 }
2754 }
2755 }
2756
2757 return x;
2758}
2759\f
2760/* Scan INSN and eliminate all eliminable registers in it.
2761
2762 If REPLACE is nonzero, do the replacement destructively. Also
2763 delete the insn as dead it if it is setting an eliminable register.
2764
2765 If REPLACE is zero, do all our allocations in reload_obstack.
2766
2767 If no eliminations were done and this insn doesn't require any elimination
2768 processing (these are not identical conditions: it might be updating sp,
2769 but not referencing fp; this needs to be seen during reload_as_needed so
2770 that the offset between fp and sp can be taken into consideration), zero
2771 is returned. Otherwise, 1 is returned. */
2772
2773static int
2774eliminate_regs_in_insn (insn, replace)
2775 rtx insn;
2776 int replace;
2777{
2778 rtx old_body = PATTERN (insn);
2779 rtx new_body;
2780 int val = 0;
2781 struct elim_table *ep;
2782
2783 if (! replace)
2784 push_obstacks (&reload_obstack, &reload_obstack);
2785
2786 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
2787 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
2788 {
2789 /* Check for setting an eliminable register. */
2790 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2791 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
2792 {
2793 /* In this case this insn isn't serving a useful purpose. We
2794 will delete it in reload_as_needed once we know that this
2795 elimination is, in fact, being done.
2796
2797 If REPLACE isn't set, we can't delete this insn, but neededn't
2798 process it since it won't be used unless something changes. */
2799 if (replace)
2800 delete_dead_insn (insn);
2801 val = 1;
2802 goto done;
2803 }
2804
2805 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
2806 in the insn is the negative of the offset in FROM. Substitute
2807 (set (reg) (reg to)) for the insn and change its code.
2808
2809 We have to do this here, rather than in eliminate_regs, do that we can
2810 change the insn code. */
2811
2812 if (GET_CODE (SET_SRC (old_body)) == PLUS
2813 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
2814 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
2815 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2816 ep++)
2817 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
2818 && ep->can_eliminate
2819 && ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
2820 {
2821 PATTERN (insn) = gen_rtx (SET, VOIDmode,
2822 SET_DEST (old_body), ep->to_rtx);
2823 INSN_CODE (insn) = -1;
2824 val = 1;
2825 goto done;
2826 }
2827 }
2828
2829 old_asm_operands_vec = 0;
2830
2831 /* Replace the body of this insn with a substituted form. If we changed
2832 something, return non-zero. If this is the final call for this
2833 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
2834
2835 If we are replacing a body that was a (set X (plus Y Z)), try to
2836 re-recognize the insn. We do this in case we had a simple addition
2837 but now can do this as a load-address. This saves an insn in this
2838 common case. */
2839
2840 new_body = eliminate_regs (old_body, 0, replace ? insn : 0);
2841 if (new_body != old_body)
2842 {
2843 if (GET_CODE (old_body) != SET || GET_CODE (SET_SRC (old_body)) != PLUS
2844 || ! validate_change (insn, &PATTERN (insn), new_body, 0))
2845 PATTERN (insn) = new_body;
2846
2847 if (replace && REG_NOTES (insn))
2848 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, 0);
2849 val = 1;
2850 }
a8fdc208 2851
32131a9c
RK
2852 /* Loop through all elimination pairs. See if any have changed and
2853 recalculate the number not at initial offset.
2854
a8efe40d
RK
2855 Compute the maximum offset (minimum offset if the stack does not
2856 grow downward) for each elimination pair.
2857
32131a9c
RK
2858 We also detect a cases where register elimination cannot be done,
2859 namely, if a register would be both changed and referenced outside a MEM
2860 in the resulting insn since such an insn is often undefined and, even if
2861 not, we cannot know what meaning will be given to it. Note that it is
2862 valid to have a register used in an address in an insn that changes it
2863 (presumably with a pre- or post-increment or decrement).
2864
2865 If anything changes, return nonzero. */
2866
2867 num_not_at_initial_offset = 0;
2868 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2869 {
2870 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
2871 ep->can_eliminate = 0;
2872
2873 ep->ref_outside_mem = 0;
2874
2875 if (ep->previous_offset != ep->offset)
2876 val = 1;
2877
2878 ep->previous_offset = ep->offset;
2879 if (ep->can_eliminate && ep->offset != ep->initial_offset)
2880 num_not_at_initial_offset++;
a8efe40d
RK
2881
2882#ifdef STACK_GROWS_DOWNWARD
2883 ep->max_offset = MAX (ep->max_offset, ep->offset);
2884#else
2885 ep->max_offset = MIN (ep->max_offset, ep->offset);
2886#endif
32131a9c
RK
2887 }
2888
2889 done:
2890 if (! replace)
2891 pop_obstacks ();
2892
2893 return val;
2894}
2895
2896/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
2897 replacement we currently believe is valid, mark it as not eliminable if X
2898 modifies DEST in any way other than by adding a constant integer to it.
2899
2900 If DEST is the frame pointer, we do nothing because we assume that
2901 all assignments to the frame pointer are nonlocal gotos and are being done
2902 at a time when they are valid and do not disturb anything else.
2903 Some machines want to eliminate a fake argument pointer with either the
2904 frame or stack pointer. Assignments to the frame pointer must not prevent
2905 this elimination.
2906
2907 Called via note_stores from reload before starting its passes to scan
2908 the insns of the function. */
2909
2910static void
2911mark_not_eliminable (dest, x)
2912 rtx dest;
2913 rtx x;
2914{
2915 register int i;
2916
2917 /* A SUBREG of a hard register here is just changing its mode. We should
2918 not see a SUBREG of an eliminable hard register, but check just in
2919 case. */
2920 if (GET_CODE (dest) == SUBREG)
2921 dest = SUBREG_REG (dest);
2922
2923 if (dest == frame_pointer_rtx)
2924 return;
2925
2926 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2927 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
2928 && (GET_CODE (x) != SET
2929 || GET_CODE (SET_SRC (x)) != PLUS
2930 || XEXP (SET_SRC (x), 0) != dest
2931 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
2932 {
2933 reg_eliminate[i].can_eliminate_previous
2934 = reg_eliminate[i].can_eliminate = 0;
2935 num_eliminable--;
2936 }
2937}
2938\f
2939/* Kick all pseudos out of hard register REGNO.
2940 If GLOBAL is nonzero, try to find someplace else to put them.
2941 If DUMPFILE is nonzero, log actions taken on that file.
2942
2943 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
2944 because we found we can't eliminate some register. In the case, no pseudos
2945 are allowed to be in the register, even if they are only in a block that
2946 doesn't require spill registers, unlike the case when we are spilling this
2947 hard reg to produce another spill register.
2948
2949 Return nonzero if any pseudos needed to be kicked out. */
2950
2951static int
2952spill_hard_reg (regno, global, dumpfile, cant_eliminate)
2953 register int regno;
2954 int global;
2955 FILE *dumpfile;
2956 int cant_eliminate;
2957{
2958 int something_changed = 0;
2959 register int i;
2960
2961 SET_HARD_REG_BIT (forbidden_regs, regno);
2962
2963 /* Spill every pseudo reg that was allocated to this reg
2964 or to something that overlaps this reg. */
2965
2966 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2967 if (reg_renumber[i] >= 0
2968 && reg_renumber[i] <= regno
a8fdc208 2969 && (reg_renumber[i]
32131a9c
RK
2970 + HARD_REGNO_NREGS (reg_renumber[i],
2971 PSEUDO_REGNO_MODE (i))
2972 > regno))
2973 {
2974 enum reg_class class = REGNO_REG_CLASS (regno);
2975
2976 /* If this register belongs solely to a basic block which needed no
2977 spilling of any class that this register is contained in,
2978 leave it be, unless we are spilling this register because
2979 it was a hard register that can't be eliminated. */
2980
2981 if (! cant_eliminate
2982 && basic_block_needs[0]
2983 && reg_basic_block[i] >= 0
2984 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
2985 {
2986 enum reg_class *p;
2987
2988 for (p = reg_class_superclasses[(int) class];
2989 *p != LIM_REG_CLASSES; p++)
2990 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
2991 break;
a8fdc208 2992
32131a9c
RK
2993 if (*p == LIM_REG_CLASSES)
2994 continue;
2995 }
2996
2997 /* Mark it as no longer having a hard register home. */
2998 reg_renumber[i] = -1;
2999 /* We will need to scan everything again. */
3000 something_changed = 1;
3001 if (global)
3002 retry_global_alloc (i, forbidden_regs);
3003
3004 alter_reg (i, regno);
3005 if (dumpfile)
3006 {
3007 if (reg_renumber[i] == -1)
3008 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3009 else
3010 fprintf (dumpfile, " Register %d now in %d.\n\n",
3011 i, reg_renumber[i]);
3012 }
3013 }
3014
3015 return something_changed;
3016}
3017\f
3018/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3019
3020static void
3021scan_paradoxical_subregs (x)
3022 register rtx x;
3023{
3024 register int i;
3025 register char *fmt;
3026 register enum rtx_code code = GET_CODE (x);
3027
3028 switch (code)
3029 {
3030 case CONST_INT:
3031 case CONST:
3032 case SYMBOL_REF:
3033 case LABEL_REF:
3034 case CONST_DOUBLE:
3035 case CC0:
3036 case PC:
3037 case REG:
3038 case USE:
3039 case CLOBBER:
3040 return;
3041
3042 case SUBREG:
3043 if (GET_CODE (SUBREG_REG (x)) == REG
3044 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3045 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3046 = GET_MODE_SIZE (GET_MODE (x));
3047 return;
3048 }
3049
3050 fmt = GET_RTX_FORMAT (code);
3051 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3052 {
3053 if (fmt[i] == 'e')
3054 scan_paradoxical_subregs (XEXP (x, i));
3055 else if (fmt[i] == 'E')
3056 {
3057 register int j;
3058 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3059 scan_paradoxical_subregs (XVECEXP (x, i, j));
3060 }
3061 }
3062}
3063\f
3064struct hard_reg_n_uses { int regno; int uses; };
3065
3066static int
3067hard_reg_use_compare (p1, p2)
3068 struct hard_reg_n_uses *p1, *p2;
3069{
3070 int tem = p1->uses - p2->uses;
3071 if (tem != 0) return tem;
3072 /* If regs are equally good, sort by regno,
3073 so that the results of qsort leave nothing to chance. */
3074 return p1->regno - p2->regno;
3075}
3076
3077/* Choose the order to consider regs for use as reload registers
3078 based on how much trouble would be caused by spilling one.
3079 Store them in order of decreasing preference in potential_reload_regs. */
3080
3081static void
3082order_regs_for_reload ()
3083{
3084 register int i;
3085 register int o = 0;
3086 int large = 0;
3087
3088 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3089
3090 CLEAR_HARD_REG_SET (bad_spill_regs);
3091
3092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3093 potential_reload_regs[i] = -1;
3094
3095 /* Count number of uses of each hard reg by pseudo regs allocated to it
3096 and then order them by decreasing use. */
3097
3098 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3099 {
3100 hard_reg_n_uses[i].uses = 0;
3101 hard_reg_n_uses[i].regno = i;
3102 }
3103
3104 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3105 {
3106 int regno = reg_renumber[i];
3107 if (regno >= 0)
3108 {
3109 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3110 while (regno < lim)
3111 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3112 }
3113 large += reg_n_refs[i];
3114 }
3115
3116 /* Now fixed registers (which cannot safely be used for reloading)
3117 get a very high use count so they will be considered least desirable.
3118 Registers used explicitly in the rtl code are almost as bad. */
3119
3120 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3121 {
3122 if (fixed_regs[i])
3123 {
3124 hard_reg_n_uses[i].uses += 2 * large + 2;
3125 SET_HARD_REG_BIT (bad_spill_regs, i);
3126 }
3127 else if (regs_explicitly_used[i])
3128 {
3129 hard_reg_n_uses[i].uses += large + 1;
3130 /* ??? We are doing this here because of the potential that
3131 bad code may be generated if a register explicitly used in
3132 an insn was used as a spill register for that insn. But
3133 not using these are spill registers may lose on some machine.
3134 We'll have to see how this works out. */
3135 SET_HARD_REG_BIT (bad_spill_regs, i);
3136 }
3137 }
3138 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3139 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3140
3141#ifdef ELIMINABLE_REGS
3142 /* If registers other than the frame pointer are eliminable, mark them as
3143 poor choices. */
3144 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3145 {
3146 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3147 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3148 }
3149#endif
3150
3151 /* Prefer registers not so far used, for use in temporary loading.
3152 Among them, if REG_ALLOC_ORDER is defined, use that order.
3153 Otherwise, prefer registers not preserved by calls. */
3154
3155#ifdef REG_ALLOC_ORDER
3156 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3157 {
3158 int regno = reg_alloc_order[i];
3159
3160 if (hard_reg_n_uses[regno].uses == 0)
3161 potential_reload_regs[o++] = regno;
3162 }
3163#else
3164 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3165 {
3166 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3167 potential_reload_regs[o++] = i;
3168 }
3169 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3170 {
3171 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3172 potential_reload_regs[o++] = i;
3173 }
3174#endif
3175
3176 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3177 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3178
3179 /* Now add the regs that are already used,
3180 preferring those used less often. The fixed and otherwise forbidden
3181 registers will be at the end of this list. */
3182
3183 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3184 if (hard_reg_n_uses[i].uses != 0)
3185 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3186}
3187\f
3188/* Reload pseudo-registers into hard regs around each insn as needed.
3189 Additional register load insns are output before the insn that needs it
3190 and perhaps store insns after insns that modify the reloaded pseudo reg.
3191
3192 reg_last_reload_reg and reg_reloaded_contents keep track of
3193 which pseudo-registers are already available in reload registers.
3194 We update these for the reloads that we perform,
3195 as the insns are scanned. */
3196
3197static void
3198reload_as_needed (first, live_known)
3199 rtx first;
3200 int live_known;
3201{
3202 register rtx insn;
3203 register int i;
3204 int this_block = 0;
3205 rtx x;
3206 rtx after_call = 0;
3207
3208 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3209 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3210 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3211 reg_has_output_reload = (char *) alloca (max_regno);
3212 for (i = 0; i < n_spills; i++)
3213 {
3214 reg_reloaded_contents[i] = -1;
3215 reg_reloaded_insn[i] = 0;
3216 }
3217
3218 /* Reset all offsets on eliminable registers to their initial values. */
3219#ifdef ELIMINABLE_REGS
3220 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3221 {
3222 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3223 reg_eliminate[i].initial_offset)
3224 reg_eliminate[i].previous_offset
3225 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3226 }
3227#else
3228 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3229 reg_eliminate[0].previous_offset
3230 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3231#endif
3232
3233 num_not_at_initial_offset = 0;
3234
3235 for (insn = first; insn;)
3236 {
3237 register rtx next = NEXT_INSN (insn);
3238
3239 /* Notice when we move to a new basic block. */
aa2c50d6 3240 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3241 && insn == basic_block_head[this_block+1])
3242 ++this_block;
3243
3244 /* If we pass a label, copy the offsets from the label information
3245 into the current offsets of each elimination. */
3246 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3247 {
3248 num_not_at_initial_offset = 0;
3249 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3250 {
3251 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3252 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3253 if (reg_eliminate[i].can_eliminate
3254 && (reg_eliminate[i].offset
3255 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3256 num_not_at_initial_offset++;
3257 }
3258 }
32131a9c
RK
3259
3260 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3261 {
3262 rtx avoid_return_reg = 0;
3263
3264#ifdef SMALL_REGISTER_CLASSES
3265 /* Set avoid_return_reg if this is an insn
3266 that might use the value of a function call. */
3267 if (GET_CODE (insn) == CALL_INSN)
3268 {
3269 if (GET_CODE (PATTERN (insn)) == SET)
3270 after_call = SET_DEST (PATTERN (insn));
3271 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3272 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3273 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3274 else
3275 after_call = 0;
3276 }
3277 else if (after_call != 0
3278 && !(GET_CODE (PATTERN (insn)) == SET
3279 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3280 {
3281 if (reg_mentioned_p (after_call, PATTERN (insn)))
3282 avoid_return_reg = after_call;
3283 after_call = 0;
3284 }
3285#endif /* SMALL_REGISTER_CLASSES */
3286
2758481d
RS
3287 /* If this is a USE and CLOBBER of a MEM, ensure that any
3288 references to eliminable registers have been removed. */
3289
3290 if ((GET_CODE (PATTERN (insn)) == USE
3291 || GET_CODE (PATTERN (insn)) == CLOBBER)
3292 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3293 XEXP (XEXP (PATTERN (insn), 0), 0)
3294 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3295 GET_MODE (XEXP (PATTERN (insn), 0)), 0);
3296
32131a9c
RK
3297 /* If we need to do register elimination processing, do so.
3298 This might delete the insn, in which case we are done. */
3299 if (num_eliminable && GET_MODE (insn) == QImode)
3300 {
3301 eliminate_regs_in_insn (insn, 1);
3302 if (GET_CODE (insn) == NOTE)
3303 {
3304 insn = next;
3305 continue;
3306 }
3307 }
3308
3309 if (GET_MODE (insn) == VOIDmode)
3310 n_reloads = 0;
3311 /* First find the pseudo regs that must be reloaded for this insn.
3312 This info is returned in the tables reload_... (see reload.h).
3313 Also modify the body of INSN by substituting RELOAD
3314 rtx's for those pseudo regs. */
3315 else
3316 {
3317 bzero (reg_has_output_reload, max_regno);
3318 CLEAR_HARD_REG_SET (reg_is_output_reload);
3319
3320 find_reloads (insn, 1, spill_indirect_levels, live_known,
3321 spill_reg_order);
3322 }
3323
3324 if (n_reloads > 0)
3325 {
3326 int class;
3327
3328 /* If this block has not had spilling done for a
a8fdc208 3329 particular class, deactivate any optional reloads
32131a9c
RK
3330 of that class lest they try to use a spill-reg which isn't
3331 available here. If we have any non-optionals that need a
3332 spill reg, abort. */
3333
3334 for (class = 0; class < N_REG_CLASSES; class++)
3335 if (basic_block_needs[class] != 0
3336 && basic_block_needs[class][this_block] == 0)
3337 for (i = 0; i < n_reloads; i++)
3338 if (class == (int) reload_reg_class[i])
3339 {
3340 if (reload_optional[i])
3341 reload_in[i] = reload_out[i] = reload_reg_rtx[i] = 0;
3342 else if (reload_reg_rtx[i] == 0)
3343 abort ();
3344 }
3345
3346 /* Now compute which reload regs to reload them into. Perhaps
3347 reusing reload regs from previous insns, or else output
3348 load insns to reload them. Maybe output store insns too.
3349 Record the choices of reload reg in reload_reg_rtx. */
3350 choose_reload_regs (insn, avoid_return_reg);
3351
3352 /* Generate the insns to reload operands into or out of
3353 their reload regs. */
3354 emit_reload_insns (insn);
3355
3356 /* Substitute the chosen reload regs from reload_reg_rtx
3357 into the insn's body (or perhaps into the bodies of other
3358 load and store insn that we just made for reloading
3359 and that we moved the structure into). */
3360 subst_reloads ();
3361 }
3362 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3363 is no longer validly lying around to save a future reload.
3364 Note that this does not detect pseudos that were reloaded
3365 for this insn in order to be stored in
3366 (obeying register constraints). That is correct; such reload
3367 registers ARE still valid. */
3368 note_stores (PATTERN (insn), forget_old_reloads_1);
3369
3370 /* There may have been CLOBBER insns placed after INSN. So scan
3371 between INSN and NEXT and use them to forget old reloads. */
3372 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3373 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3374 note_stores (PATTERN (x), forget_old_reloads_1);
3375
3376#ifdef AUTO_INC_DEC
3377 /* Likewise for regs altered by auto-increment in this insn.
3378 But note that the reg-notes are not changed by reloading:
3379 they still contain the pseudo-regs, not the spill regs. */
3380 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3381 if (REG_NOTE_KIND (x) == REG_INC)
3382 {
3383 /* See if this pseudo reg was reloaded in this insn.
3384 If so, its last-reload info is still valid
3385 because it is based on this insn's reload. */
3386 for (i = 0; i < n_reloads; i++)
3387 if (reload_out[i] == XEXP (x, 0))
3388 break;
3389
3390 if (i != n_reloads)
3391 forget_old_reloads_1 (XEXP (x, 0));
3392 }
3393#endif
3394 }
3395 /* A reload reg's contents are unknown after a label. */
3396 if (GET_CODE (insn) == CODE_LABEL)
3397 for (i = 0; i < n_spills; i++)
3398 {
3399 reg_reloaded_contents[i] = -1;
3400 reg_reloaded_insn[i] = 0;
3401 }
3402
3403 /* Don't assume a reload reg is still good after a call insn
3404 if it is a call-used reg. */
3405 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == CALL_INSN)
3406 for (i = 0; i < n_spills; i++)
3407 if (call_used_regs[spill_regs[i]])
3408 {
3409 reg_reloaded_contents[i] = -1;
3410 reg_reloaded_insn[i] = 0;
3411 }
3412
3413 /* In case registers overlap, allow certain insns to invalidate
3414 particular hard registers. */
3415
3416#ifdef INSN_CLOBBERS_REGNO_P
3417 for (i = 0 ; i < n_spills ; i++)
3418 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3419 {
3420 reg_reloaded_contents[i] = -1;
3421 reg_reloaded_insn[i] = 0;
3422 }
3423#endif
3424
3425 insn = next;
3426
3427#ifdef USE_C_ALLOCA
3428 alloca (0);
3429#endif
3430 }
3431}
3432
3433/* Discard all record of any value reloaded from X,
3434 or reloaded in X from someplace else;
3435 unless X is an output reload reg of the current insn.
3436
3437 X may be a hard reg (the reload reg)
3438 or it may be a pseudo reg that was reloaded from. */
3439
3440static void
3441forget_old_reloads_1 (x)
3442 rtx x;
3443{
3444 register int regno;
3445 int nr;
3446
3447 if (GET_CODE (x) != REG)
3448 return;
3449
3450 regno = REGNO (x);
3451
3452 if (regno >= FIRST_PSEUDO_REGISTER)
3453 nr = 1;
3454 else
3455 {
3456 int i;
3457 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3458 /* Storing into a spilled-reg invalidates its contents.
3459 This can happen if a block-local pseudo is allocated to that reg
3460 and it wasn't spilled because this block's total need is 0.
3461 Then some insn might have an optional reload and use this reg. */
3462 for (i = 0; i < nr; i++)
3463 if (spill_reg_order[regno + i] >= 0
3464 /* But don't do this if the reg actually serves as an output
3465 reload reg in the current instruction. */
3466 && (n_reloads == 0
3467 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3468 {
3469 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3470 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3471 }
3472 }
3473
3474 /* Since value of X has changed,
3475 forget any value previously copied from it. */
3476
3477 while (nr-- > 0)
3478 /* But don't forget a copy if this is the output reload
3479 that establishes the copy's validity. */
3480 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3481 reg_last_reload_reg[regno + nr] = 0;
3482}
3483\f
3484/* For each reload, the mode of the reload register. */
3485static enum machine_mode reload_mode[MAX_RELOADS];
3486
3487/* For each reload, the largest number of registers it will require. */
3488static int reload_nregs[MAX_RELOADS];
3489
3490/* Comparison function for qsort to decide which of two reloads
3491 should be handled first. *P1 and *P2 are the reload numbers. */
3492
3493static int
3494reload_reg_class_lower (p1, p2)
3495 short *p1, *p2;
3496{
3497 register int r1 = *p1, r2 = *p2;
3498 register int t;
a8fdc208 3499
32131a9c
RK
3500 /* Consider required reloads before optional ones. */
3501 t = reload_optional[r1] - reload_optional[r2];
3502 if (t != 0)
3503 return t;
3504
3505 /* Count all solitary classes before non-solitary ones. */
3506 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3507 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3508 if (t != 0)
3509 return t;
3510
3511 /* Aside from solitaires, consider all multi-reg groups first. */
3512 t = reload_nregs[r2] - reload_nregs[r1];
3513 if (t != 0)
3514 return t;
3515
3516 /* Consider reloads in order of increasing reg-class number. */
3517 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3518 if (t != 0)
3519 return t;
3520
3521 /* If reloads are equally urgent, sort by reload number,
3522 so that the results of qsort leave nothing to chance. */
3523 return r1 - r2;
3524}
3525\f
3526/* The following HARD_REG_SETs indicate when each hard register is
3527 used for a reload of various parts of the current insn. */
3528
3529/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3530static HARD_REG_SET reload_reg_used;
3531/* If reg is in use for a RELOAD_FOR_INPUT_RELOAD_ADDRESS reload. */
3532static HARD_REG_SET reload_reg_used_in_input_addr;
3533/* If reg is in use for a RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reload. */
3534static HARD_REG_SET reload_reg_used_in_output_addr;
3535/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3536static HARD_REG_SET reload_reg_used_in_op_addr;
3537/* If reg is in use for a RELOAD_FOR_INPUT reload. */
3538static HARD_REG_SET reload_reg_used_in_input;
3539/* If reg is in use for a RELOAD_FOR_OUTPUT reload. */
3540static HARD_REG_SET reload_reg_used_in_output;
3541
3542/* If reg is in use as a reload reg for any sort of reload. */
3543static HARD_REG_SET reload_reg_used_at_all;
3544
3545/* Mark reg REGNO as in use for a reload of the sort spec'd by WHEN_NEEDED.
3546 MODE is used to indicate how many consecutive regs are actually used. */
3547
3548static void
3549mark_reload_reg_in_use (regno, when_needed, mode)
3550 int regno;
3551 enum reload_when_needed when_needed;
3552 enum machine_mode mode;
3553{
3554 int nregs = HARD_REGNO_NREGS (regno, mode);
3555 int i;
3556
3557 for (i = regno; i < nregs + regno; i++)
3558 {
3559 switch (when_needed)
3560 {
3561 case RELOAD_OTHER:
3562 SET_HARD_REG_BIT (reload_reg_used, i);
3563 break;
3564
3565 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3566 SET_HARD_REG_BIT (reload_reg_used_in_input_addr, i);
3567 break;
3568
3569 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3570 SET_HARD_REG_BIT (reload_reg_used_in_output_addr, i);
3571 break;
3572
3573 case RELOAD_FOR_OPERAND_ADDRESS:
3574 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3575 break;
3576
3577 case RELOAD_FOR_INPUT:
3578 SET_HARD_REG_BIT (reload_reg_used_in_input, i);
3579 break;
3580
3581 case RELOAD_FOR_OUTPUT:
3582 SET_HARD_REG_BIT (reload_reg_used_in_output, i);
3583 break;
3584 }
3585
3586 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3587 }
3588}
3589
3590/* 1 if reg REGNO is free as a reload reg for a reload of the sort
3591 specified by WHEN_NEEDED. */
3592
3593static int
3594reload_reg_free_p (regno, when_needed)
3595 int regno;
3596 enum reload_when_needed when_needed;
3597{
3598 /* In use for a RELOAD_OTHER means it's not available for anything. */
3599 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
3600 return 0;
3601 switch (when_needed)
3602 {
3603 case RELOAD_OTHER:
3604 /* In use for anything means not available for a RELOAD_OTHER. */
3605 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
3606
3607 /* The other kinds of use can sometimes share a register. */
3608 case RELOAD_FOR_INPUT:
3609 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3610 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3611 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno));
3612 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3613 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno)
3614 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno));
3615 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3616 return (! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3617 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3618 case RELOAD_FOR_OPERAND_ADDRESS:
3619 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3620 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3621 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3622 case RELOAD_FOR_OUTPUT:
3623 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3624 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3625 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3626 }
3627 abort ();
3628}
3629
3630/* Return 1 if the value in reload reg REGNO, as used by a reload
3631 needed for the part of the insn specified by WHEN_NEEDED,
3632 is not in use for a reload in any prior part of the insn.
3633
3634 We can assume that the reload reg was already tested for availability
3635 at the time it is needed, and we should not check this again,
3636 in case the reg has already been marked in use. */
3637
3638static int
3639reload_reg_free_before_p (regno, when_needed)
3640 int regno;
3641 enum reload_when_needed when_needed;
3642{
3643 switch (when_needed)
3644 {
3645 case RELOAD_OTHER:
3646 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3647 its use starts from the beginning, so nothing can use it earlier. */
3648 return 1;
3649
3650 /* If this use is for part of the insn,
3651 check the reg is not in use for any prior part. */
3652 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3653 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
3654 return 0;
3655 case RELOAD_FOR_OUTPUT:
3656 if (TEST_HARD_REG_BIT (reload_reg_used_in_input, regno))
3657 return 0;
3658 case RELOAD_FOR_OPERAND_ADDRESS:
3659 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno))
3660 return 0;
3661 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3662 case RELOAD_FOR_INPUT:
3663 return 1;
3664 }
3665 abort ();
3666}
3667
3668/* Return 1 if the value in reload reg REGNO, as used by a reload
3669 needed for the part of the insn specified by WHEN_NEEDED,
3670 is still available in REGNO at the end of the insn.
3671
3672 We can assume that the reload reg was already tested for availability
3673 at the time it is needed, and we should not check this again,
3674 in case the reg has already been marked in use. */
3675
3676static int
3677reload_reg_reaches_end_p (regno, when_needed)
3678 int regno;
3679 enum reload_when_needed when_needed;
3680{
3681 switch (when_needed)
3682 {
3683 case RELOAD_OTHER:
3684 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3685 its value must reach the end. */
3686 return 1;
3687
3688 /* If this use is for part of the insn,
3689 its value reaches if no subsequent part uses the same register. */
3690 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3691 case RELOAD_FOR_INPUT:
3692 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3693 || TEST_HARD_REG_BIT (reload_reg_used_in_output, regno))
3694 return 0;
3695 case RELOAD_FOR_OPERAND_ADDRESS:
3696 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno))
3697 return 0;
3698 case RELOAD_FOR_OUTPUT:
3699 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3700 return 1;
3701 }
3702 abort ();
3703}
3704\f
3705/* Vector of reload-numbers showing the order in which the reloads should
3706 be processed. */
3707short reload_order[MAX_RELOADS];
3708
3709/* Indexed by reload number, 1 if incoming value
3710 inherited from previous insns. */
3711char reload_inherited[MAX_RELOADS];
3712
3713/* For an inherited reload, this is the insn the reload was inherited from,
3714 if we know it. Otherwise, this is 0. */
3715rtx reload_inheritance_insn[MAX_RELOADS];
3716
3717/* If non-zero, this is a place to get the value of the reload,
3718 rather than using reload_in. */
3719rtx reload_override_in[MAX_RELOADS];
3720
3721/* For each reload, the index in spill_regs of the spill register used,
3722 or -1 if we did not need one of the spill registers for this reload. */
3723int reload_spill_index[MAX_RELOADS];
3724
3725/* Index of last register assigned as a spill register. We allocate in
3726 a round-robin fashio. */
3727
3728static last_spill_reg = 0;
3729
3730/* Find a spill register to use as a reload register for reload R.
3731 LAST_RELOAD is non-zero if this is the last reload for the insn being
3732 processed.
3733
3734 Set reload_reg_rtx[R] to the register allocated.
3735
3736 If NOERROR is nonzero, we return 1 if successful,
3737 or 0 if we couldn't find a spill reg and we didn't change anything. */
3738
3739static int
3740allocate_reload_reg (r, insn, last_reload, noerror)
3741 int r;
3742 rtx insn;
3743 int last_reload;
3744 int noerror;
3745{
3746 int i;
3747 int pass;
3748 int count;
3749 rtx new;
3750 int regno;
3751
3752 /* If we put this reload ahead, thinking it is a group,
3753 then insist on finding a group. Otherwise we can grab a
a8fdc208 3754 reg that some other reload needs.
32131a9c
RK
3755 (That can happen when we have a 68000 DATA_OR_FP_REG
3756 which is a group of data regs or one fp reg.)
3757 We need not be so restrictive if there are no more reloads
3758 for this insn.
3759
3760 ??? Really it would be nicer to have smarter handling
3761 for that kind of reg class, where a problem like this is normal.
3762 Perhaps those classes should be avoided for reloading
3763 by use of more alternatives. */
3764
3765 int force_group = reload_nregs[r] > 1 && ! last_reload;
3766
3767 /* If we want a single register and haven't yet found one,
3768 take any reg in the right class and not in use.
3769 If we want a consecutive group, here is where we look for it.
3770
3771 We use two passes so we can first look for reload regs to
3772 reuse, which are already in use for other reloads in this insn,
3773 and only then use additional registers.
3774 I think that maximizing reuse is needed to make sure we don't
3775 run out of reload regs. Suppose we have three reloads, and
3776 reloads A and B can share regs. These need two regs.
3777 Suppose A and B are given different regs.
3778 That leaves none for C. */
3779 for (pass = 0; pass < 2; pass++)
3780 {
3781 /* I is the index in spill_regs.
3782 We advance it round-robin between insns to use all spill regs
3783 equally, so that inherited reloads have a chance
3784 of leapfrogging each other. */
3785
3786 for (count = 0, i = last_spill_reg; count < n_spills; count++)
3787 {
3788 int class = (int) reload_reg_class[r];
3789
3790 i = (i + 1) % n_spills;
3791
3792 if (reload_reg_free_p (spill_regs[i], reload_when_needed[r])
3793 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
3794 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
3795 /* Look first for regs to share, then for unshared. */
3796 && (pass || TEST_HARD_REG_BIT (reload_reg_used_at_all,
3797 spill_regs[i])))
3798 {
3799 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
3800 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
3801 (on 68000) got us two FP regs. If NR is 1,
3802 we would reject both of them. */
3803 if (force_group)
3804 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
3805 /* If we need only one reg, we have already won. */
3806 if (nr == 1)
3807 {
3808 /* But reject a single reg if we demand a group. */
3809 if (force_group)
3810 continue;
3811 break;
3812 }
3813 /* Otherwise check that as many consecutive regs as we need
3814 are available here.
3815 Also, don't use for a group registers that are
3816 needed for nongroups. */
3817 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
3818 while (nr > 1)
3819 {
3820 regno = spill_regs[i] + nr - 1;
3821 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
3822 && spill_reg_order[regno] >= 0
3823 && reload_reg_free_p (regno, reload_when_needed[r])
3824 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
3825 regno)))
3826 break;
3827 nr--;
3828 }
3829 if (nr == 1)
3830 break;
3831 }
3832 }
3833
3834 /* If we found something on pass 1, omit pass 2. */
3835 if (count < n_spills)
3836 break;
3837 }
3838
3839 /* We should have found a spill register by now. */
3840 if (count == n_spills)
3841 {
3842 if (noerror)
3843 return 0;
3844 abort ();
3845 }
3846
3847 last_spill_reg = i;
3848
3849 /* Mark as in use for this insn the reload regs we use for this. */
3850 mark_reload_reg_in_use (spill_regs[i], reload_when_needed[r],
3851 reload_mode[r]);
3852
3853 new = spill_reg_rtx[i];
3854
3855 if (new == 0 || GET_MODE (new) != reload_mode[r])
3856 spill_reg_rtx[i] = new = gen_rtx (REG, reload_mode[r], spill_regs[i]);
3857
3858 reload_reg_rtx[r] = new;
3859 reload_spill_index[r] = i;
3860 regno = true_regnum (new);
3861
3862 /* Detect when the reload reg can't hold the reload mode.
3863 This used to be one `if', but Sequent compiler can't handle that. */
3864 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
3865 {
3866 enum machine_mode test_mode = VOIDmode;
3867 if (reload_in[r])
3868 test_mode = GET_MODE (reload_in[r]);
3869 /* If reload_in[r] has VOIDmode, it means we will load it
3870 in whatever mode the reload reg has: to wit, reload_mode[r].
3871 We have already tested that for validity. */
3872 /* Aside from that, we need to test that the expressions
3873 to reload from or into have modes which are valid for this
3874 reload register. Otherwise the reload insns would be invalid. */
3875 if (! (reload_in[r] != 0 && test_mode != VOIDmode
3876 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
3877 if (! (reload_out[r] != 0
3878 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
3879 /* The reg is OK. */
3880 return 1;
3881 }
3882
3883 /* The reg is not OK. */
3884 if (noerror)
3885 return 0;
3886
3887 if (asm_noperands (PATTERN (insn)) < 0)
3888 /* It's the compiler's fault. */
3889 abort ();
3890
3891 /* It's the user's fault; the operand's mode and constraint
3892 don't match. Disable this reload so we don't crash in final. */
3893 error_for_asm (insn,
3894 "`asm' operand constraint incompatible with operand size");
3895 reload_in[r] = 0;
3896 reload_out[r] = 0;
3897 reload_reg_rtx[r] = 0;
3898 reload_optional[r] = 1;
3899 reload_secondary_p[r] = 1;
3900
3901 return 1;
3902}
3903\f
3904/* Assign hard reg targets for the pseudo-registers we must reload
3905 into hard regs for this insn.
3906 Also output the instructions to copy them in and out of the hard regs.
3907
3908 For machines with register classes, we are responsible for
3909 finding a reload reg in the proper class. */
3910
3911static void
3912choose_reload_regs (insn, avoid_return_reg)
3913 rtx insn;
3914 /* This argument is currently ignored. */
3915 rtx avoid_return_reg;
3916{
3917 register int i, j;
3918 int max_group_size = 1;
3919 enum reg_class group_class = NO_REGS;
3920 int inheritance;
3921
3922 rtx save_reload_reg_rtx[MAX_RELOADS];
3923 char save_reload_inherited[MAX_RELOADS];
3924 rtx save_reload_inheritance_insn[MAX_RELOADS];
3925 rtx save_reload_override_in[MAX_RELOADS];
3926 int save_reload_spill_index[MAX_RELOADS];
3927 HARD_REG_SET save_reload_reg_used;
3928 HARD_REG_SET save_reload_reg_used_in_input_addr;
3929 HARD_REG_SET save_reload_reg_used_in_output_addr;
3930 HARD_REG_SET save_reload_reg_used_in_op_addr;
3931 HARD_REG_SET save_reload_reg_used_in_input;
3932 HARD_REG_SET save_reload_reg_used_in_output;
3933 HARD_REG_SET save_reload_reg_used_at_all;
3934
3935 bzero (reload_inherited, MAX_RELOADS);
3936 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
3937 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
3938
3939 CLEAR_HARD_REG_SET (reload_reg_used);
3940 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
3941 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr);
3942 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr);
3943 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
3944 CLEAR_HARD_REG_SET (reload_reg_used_in_output);
3945 CLEAR_HARD_REG_SET (reload_reg_used_in_input);
3946
3947 /* Distinguish output-only and input-only reloads
3948 because they can overlap with other things. */
3949 for (j = 0; j < n_reloads; j++)
3950 if (reload_when_needed[j] == RELOAD_OTHER
3951 && ! reload_needed_for_multiple[j])
3952 {
3953 if (reload_in[j] == 0)
3954 {
3955 /* But earlyclobber operands must stay as RELOAD_OTHER. */
3956 for (i = 0; i < n_earlyclobbers; i++)
3957 if (rtx_equal_p (reload_out[j], reload_earlyclobbers[i]))
3958 break;
3959 if (i == n_earlyclobbers)
3960 reload_when_needed[j] = RELOAD_FOR_OUTPUT;
3961 }
3962 if (reload_out[j] == 0)
3963 reload_when_needed[j] = RELOAD_FOR_INPUT;
3964
3965 if (reload_secondary_reload[j] >= 0
3966 && ! reload_needed_for_multiple[reload_secondary_reload[j]])
3967 reload_when_needed[reload_secondary_reload[j]]
3968 = reload_when_needed[j];
3969 }
3970
3971#ifdef SMALL_REGISTER_CLASSES
3972 /* Don't bother with avoiding the return reg
3973 if we have no mandatory reload that could use it. */
3974 if (avoid_return_reg)
3975 {
3976 int do_avoid = 0;
3977 int regno = REGNO (avoid_return_reg);
3978 int nregs
3979 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
3980 int r;
3981
3982 for (r = regno; r < regno + nregs; r++)
3983 if (spill_reg_order[r] >= 0)
3984 for (j = 0; j < n_reloads; j++)
3985 if (!reload_optional[j] && reload_reg_rtx[j] == 0
3986 && (reload_in[j] != 0 || reload_out[j] != 0
3987 || reload_secondary_p[j])
3988 &&
3989 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
3990 do_avoid = 1;
3991 if (!do_avoid)
3992 avoid_return_reg = 0;
3993 }
3994#endif /* SMALL_REGISTER_CLASSES */
3995
3996#if 0 /* Not needed, now that we can always retry without inheritance. */
3997 /* See if we have more mandatory reloads than spill regs.
3998 If so, then we cannot risk optimizations that could prevent
a8fdc208 3999 reloads from sharing one spill register.
32131a9c
RK
4000
4001 Since we will try finding a better register than reload_reg_rtx
4002 unless it is equal to reload_in or reload_out, count such reloads. */
4003
4004 {
4005 int tem = 0;
4006#ifdef SMALL_REGISTER_CLASSES
4007 int tem = (avoid_return_reg != 0);
a8fdc208 4008#endif
32131a9c
RK
4009 for (j = 0; j < n_reloads; j++)
4010 if (! reload_optional[j]
4011 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4012 && (reload_reg_rtx[j] == 0
4013 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4014 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4015 tem++;
4016 if (tem > n_spills)
4017 must_reuse = 1;
4018 }
4019#endif
4020
4021#ifdef SMALL_REGISTER_CLASSES
4022 /* Don't use the subroutine call return reg for a reload
4023 if we are supposed to avoid it. */
4024 if (avoid_return_reg)
4025 {
4026 int regno = REGNO (avoid_return_reg);
4027 int nregs
4028 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4029 int r;
4030
4031 for (r = regno; r < regno + nregs; r++)
4032 if (spill_reg_order[r] >= 0)
4033 SET_HARD_REG_BIT (reload_reg_used, r);
4034 }
4035#endif /* SMALL_REGISTER_CLASSES */
4036
4037 /* In order to be certain of getting the registers we need,
4038 we must sort the reloads into order of increasing register class.
4039 Then our grabbing of reload registers will parallel the process
a8fdc208 4040 that provided the reload registers.
32131a9c
RK
4041
4042 Also note whether any of the reloads wants a consecutive group of regs.
4043 If so, record the maximum size of the group desired and what
4044 register class contains all the groups needed by this insn. */
4045
4046 for (j = 0; j < n_reloads; j++)
4047 {
4048 reload_order[j] = j;
4049 reload_spill_index[j] = -1;
4050
4051 reload_mode[j]
4052 = (reload_strict_low[j] && reload_out[j]
4053 ? GET_MODE (SUBREG_REG (reload_out[j]))
4054 : (reload_inmode[j] == VOIDmode
4055 || (GET_MODE_SIZE (reload_outmode[j])
4056 > GET_MODE_SIZE (reload_inmode[j])))
4057 ? reload_outmode[j] : reload_inmode[j]);
4058
4059 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4060
4061 if (reload_nregs[j] > 1)
4062 {
4063 max_group_size = MAX (reload_nregs[j], max_group_size);
4064 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4065 }
4066
4067 /* If we have already decided to use a certain register,
4068 don't use it in another way. */
4069 if (reload_reg_rtx[j])
4070 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]),
4071 reload_when_needed[j], reload_mode[j]);
4072 }
4073
4074 if (n_reloads > 1)
4075 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4076
4077 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4078 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4079 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4080 sizeof reload_inheritance_insn);
4081 bcopy (reload_override_in, save_reload_override_in,
4082 sizeof reload_override_in);
4083 bcopy (reload_spill_index, save_reload_spill_index,
4084 sizeof reload_spill_index);
4085 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4086 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4087 COPY_HARD_REG_SET (save_reload_reg_used_in_output,
4088 reload_reg_used_in_output);
4089 COPY_HARD_REG_SET (save_reload_reg_used_in_input,
4090 reload_reg_used_in_input);
4091 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr,
4092 reload_reg_used_in_input_addr);
4093 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr,
4094 reload_reg_used_in_output_addr);
4095 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4096 reload_reg_used_in_op_addr);
4097
4098 /* Try first with inheritance, then turning it off. */
4099
4100 for (inheritance = 1; inheritance >= 0; inheritance--)
4101 {
4102 /* Process the reloads in order of preference just found.
4103 Beyond this point, subregs can be found in reload_reg_rtx.
4104
4105 This used to look for an existing reloaded home for all
4106 of the reloads, and only then perform any new reloads.
4107 But that could lose if the reloads were done out of reg-class order
4108 because a later reload with a looser constraint might have an old
4109 home in a register needed by an earlier reload with a tighter constraint.
4110
4111 To solve this, we make two passes over the reloads, in the order
4112 described above. In the first pass we try to inherit a reload
4113 from a previous insn. If there is a later reload that needs a
4114 class that is a proper subset of the class being processed, we must
4115 also allocate a spill register during the first pass.
4116
4117 Then make a second pass over the reloads to allocate any reloads
4118 that haven't been given registers yet. */
4119
4120 for (j = 0; j < n_reloads; j++)
4121 {
4122 register int r = reload_order[j];
4123
4124 /* Ignore reloads that got marked inoperative. */
4125 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4126 continue;
4127
4128 /* If find_reloads chose a to use reload_in or reload_out as a reload
4129 register, we don't need to chose one. Otherwise, try even if it found
4130 one since we might save an insn if we find the value lying around. */
4131 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4132 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4133 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4134 continue;
4135
4136#if 0 /* No longer needed for correct operation.
4137 It might give better code, or might not; worth an experiment? */
4138 /* If this is an optional reload, we can't inherit from earlier insns
4139 until we are sure that any non-optional reloads have been allocated.
4140 The following code takes advantage of the fact that optional reloads
4141 are at the end of reload_order. */
4142 if (reload_optional[r] != 0)
4143 for (i = 0; i < j; i++)
4144 if ((reload_out[reload_order[i]] != 0
4145 || reload_in[reload_order[i]] != 0
4146 || reload_secondary_p[reload_order[i]])
4147 && ! reload_optional[reload_order[i]]
4148 && reload_reg_rtx[reload_order[i]] == 0)
4149 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4150#endif
4151
4152 /* First see if this pseudo is already available as reloaded
4153 for a previous insn. We cannot try to inherit for reloads
4154 that are smaller than the maximum number of registers needed
4155 for groups unless the register we would allocate cannot be used
4156 for the groups.
4157
4158 We could check here to see if this is a secondary reload for
4159 an object that is already in a register of the desired class.
4160 This would avoid the need for the secondary reload register.
4161 But this is complex because we can't easily determine what
4162 objects might want to be loaded via this reload. So let a register
4163 be allocated here. In `emit_reload_insns' we suppress one of the
4164 loads in the case described above. */
4165
4166 if (inheritance)
4167 {
4168 register int regno = -1;
4169
4170 if (reload_in[r] == 0)
4171 ;
4172 else if (GET_CODE (reload_in[r]) == REG)
4173 regno = REGNO (reload_in[r]);
4174 else if (GET_CODE (reload_in_reg[r]) == REG)
4175 regno = REGNO (reload_in_reg[r]);
4176#if 0
4177 /* This won't work, since REGNO can be a pseudo reg number.
4178 Also, it takes much more hair to keep track of all the things
4179 that can invalidate an inherited reload of part of a pseudoreg. */
4180 else if (GET_CODE (reload_in[r]) == SUBREG
4181 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4182 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4183#endif
4184
4185 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4186 {
4187 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4188
4189 if (reg_reloaded_contents[i] == regno
4190 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4191 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4192 spill_regs[i])
4193 && (reload_nregs[r] == max_group_size
4194 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4195 spill_regs[i]))
4196 && reload_reg_free_p (spill_regs[i], reload_when_needed[r])
4197 && reload_reg_free_before_p (spill_regs[i],
4198 reload_when_needed[r]))
4199 {
4200 /* If a group is needed, verify that all the subsequent
4201 registers still have their values intact. */
4202 int nr
4203 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4204 int k;
4205
4206 for (k = 1; k < nr; k++)
4207 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4208 != regno)
4209 break;
4210
4211 if (k == nr)
4212 {
4213 /* Mark the register as in use for this part of
4214 the insn. */
4215 mark_reload_reg_in_use (spill_regs[i],
4216 reload_when_needed[r],
4217 reload_mode[r]);
4218 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4219 reload_inherited[r] = 1;
4220 reload_inheritance_insn[r] = reg_reloaded_insn[i];
4221 reload_spill_index[r] = i;
4222 }
4223 }
4224 }
4225 }
4226
4227 /* Here's another way to see if the value is already lying around. */
4228 if (inheritance
4229 && reload_in[r] != 0
4230 && ! reload_inherited[r]
4231 && reload_out[r] == 0
4232 && (CONSTANT_P (reload_in[r])
4233 || GET_CODE (reload_in[r]) == PLUS
4234 || GET_CODE (reload_in[r]) == REG
4235 || GET_CODE (reload_in[r]) == MEM)
4236 && (reload_nregs[r] == max_group_size
4237 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4238 {
4239 register rtx equiv
4240 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
4241 -1, 0, 0, reload_mode[r]);
4242 int regno;
4243
4244 if (equiv != 0)
4245 {
4246 if (GET_CODE (equiv) == REG)
4247 regno = REGNO (equiv);
4248 else if (GET_CODE (equiv) == SUBREG)
4249 {
4250 regno = REGNO (SUBREG_REG (equiv));
4251 if (regno < FIRST_PSEUDO_REGISTER)
4252 regno += SUBREG_WORD (equiv);
4253 }
4254 else
4255 abort ();
4256 }
4257
4258 /* If we found a spill reg, reject it unless it is free
4259 and of the desired class. */
4260 if (equiv != 0
4261 && ((spill_reg_order[regno] >= 0
4262 && ! reload_reg_free_before_p (regno,
4263 reload_when_needed[r]))
4264 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4265 regno)))
4266 equiv = 0;
4267
4268 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4269 equiv = 0;
4270
4271 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4272 equiv = 0;
4273
4274 /* We found a register that contains the value we need.
4275 If this register is the same as an `earlyclobber' operand
4276 of the current insn, just mark it as a place to reload from
4277 since we can't use it as the reload register itself. */
4278
4279 if (equiv != 0)
4280 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
4281 if (reg_overlap_mentioned_for_reload_p (equiv,
4282 reload_earlyclobbers[i]))
32131a9c
RK
4283 {
4284 reload_override_in[r] = equiv;
4285 equiv = 0;
4286 break;
4287 }
4288
4289 /* JRV: If the equiv register we have found is explicitly
4290 clobbered in the current insn, mark but don't use, as above. */
4291
4292 if (equiv != 0 && regno_clobbered_p (regno, insn))
4293 {
4294 reload_override_in[r] = equiv;
4295 equiv = 0;
4296 }
4297
4298 /* If we found an equivalent reg, say no code need be generated
4299 to load it, and use it as our reload reg. */
4300 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4301 {
4302 reload_reg_rtx[r] = equiv;
4303 reload_inherited[r] = 1;
4304 /* If it is a spill reg,
4305 mark the spill reg as in use for this insn. */
4306 i = spill_reg_order[regno];
4307 if (i >= 0)
4308 mark_reload_reg_in_use (regno, reload_when_needed[r],
4309 reload_mode[r]);
4310 }
4311 }
4312
4313 /* If we found a register to use already, or if this is an optional
4314 reload, we are done. */
4315 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
4316 continue;
4317
4318#if 0 /* No longer needed for correct operation. Might or might not
4319 give better code on the average. Want to experiment? */
4320
4321 /* See if there is a later reload that has a class different from our
4322 class that intersects our class or that requires less register
4323 than our reload. If so, we must allocate a register to this
4324 reload now, since that reload might inherit a previous reload
4325 and take the only available register in our class. Don't do this
4326 for optional reloads since they will force all previous reloads
4327 to be allocated. Also don't do this for reloads that have been
4328 turned off. */
4329
4330 for (i = j + 1; i < n_reloads; i++)
4331 {
4332 int s = reload_order[i];
4333
d45cf215
RS
4334 if ((reload_in[s] == 0 && reload_out[s] == 0
4335 && ! reload_secondary_p[s])
32131a9c
RK
4336 || reload_optional[s])
4337 continue;
4338
4339 if ((reload_reg_class[s] != reload_reg_class[r]
4340 && reg_classes_intersect_p (reload_reg_class[r],
4341 reload_reg_class[s]))
4342 || reload_nregs[s] < reload_nregs[r])
4343 break;
4344 }
4345
4346 if (i == n_reloads)
4347 continue;
4348
4349 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
4350#endif
4351 }
4352
4353 /* Now allocate reload registers for anything non-optional that
4354 didn't get one yet. */
4355 for (j = 0; j < n_reloads; j++)
4356 {
4357 register int r = reload_order[j];
4358
4359 /* Ignore reloads that got marked inoperative. */
4360 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4361 continue;
4362
4363 /* Skip reloads that already have a register allocated or are
4364 optional. */
4365 if (reload_reg_rtx[r] != 0 || reload_optional[r])
4366 continue;
4367
4368 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
4369 break;
4370 }
4371
4372 /* If that loop got all the way, we have won. */
4373 if (j == n_reloads)
4374 break;
4375
4376 fail:
4377 /* Loop around and try without any inheritance. */
4378 /* First undo everything done by the failed attempt
4379 to allocate with inheritance. */
4380 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
4381 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
4382 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
4383 sizeof reload_inheritance_insn);
4384 bcopy (save_reload_override_in, reload_override_in,
4385 sizeof reload_override_in);
4386 bcopy (save_reload_spill_index, reload_spill_index,
4387 sizeof reload_spill_index);
4388 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
4389 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
4390 COPY_HARD_REG_SET (reload_reg_used_in_input,
4391 save_reload_reg_used_in_input);
4392 COPY_HARD_REG_SET (reload_reg_used_in_output,
4393 save_reload_reg_used_in_output);
4394 COPY_HARD_REG_SET (reload_reg_used_in_input_addr,
4395 save_reload_reg_used_in_input_addr);
4396 COPY_HARD_REG_SET (reload_reg_used_in_output_addr,
4397 save_reload_reg_used_in_output_addr);
4398 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
4399 save_reload_reg_used_in_op_addr);
4400 }
4401
4402 /* If we thought we could inherit a reload, because it seemed that
4403 nothing else wanted the same reload register earlier in the insn,
4404 verify that assumption, now that all reloads have been assigned. */
4405
4406 for (j = 0; j < n_reloads; j++)
4407 {
4408 register int r = reload_order[j];
4409
4410 if (reload_inherited[r] && reload_reg_rtx[r] != 0
4411 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
4412 reload_when_needed[r]))
4413 reload_inherited[r] = 0;
4414
4415 /* If we found a better place to reload from,
4416 validate it in the same fashion, if it is a reload reg. */
4417 if (reload_override_in[r]
4418 && (GET_CODE (reload_override_in[r]) == REG
4419 || GET_CODE (reload_override_in[r]) == SUBREG))
4420 {
4421 int regno = true_regnum (reload_override_in[r]);
4422 if (spill_reg_order[regno] >= 0
4423 && ! reload_reg_free_before_p (regno, reload_when_needed[r]))
4424 reload_override_in[r] = 0;
4425 }
4426 }
4427
4428 /* Now that reload_override_in is known valid,
4429 actually override reload_in. */
4430 for (j = 0; j < n_reloads; j++)
4431 if (reload_override_in[j])
4432 reload_in[j] = reload_override_in[j];
4433
4434 /* If this reload won't be done because it has been cancelled or is
4435 optional and not inherited, clear reload_reg_rtx so other
4436 routines (such as subst_reloads) don't get confused. */
4437 for (j = 0; j < n_reloads; j++)
4438 if ((reload_optional[j] && ! reload_inherited[j])
4439 || (reload_in[j] == 0 && reload_out[j] == 0
4440 && ! reload_secondary_p[j]))
4441 reload_reg_rtx[j] = 0;
4442
4443 /* Record which pseudos and which spill regs have output reloads. */
4444 for (j = 0; j < n_reloads; j++)
4445 {
4446 register int r = reload_order[j];
4447
4448 i = reload_spill_index[r];
4449
4450 /* I is nonneg if this reload used one of the spill regs.
4451 If reload_reg_rtx[r] is 0, this is an optional reload
4452 that we opted to ignore. */
4453 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
4454 && reload_reg_rtx[r] != 0)
4455 {
4456 register int nregno = REGNO (reload_out[r]);
4457 int nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
4458
4459 while (--nr >= 0)
4460 {
4461 reg_has_output_reload[nregno + nr] = 1;
4462 if (i >= 0)
4463 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
4464 }
4465
4466 if (reload_when_needed[r] != RELOAD_OTHER
4467 && reload_when_needed[r] != RELOAD_FOR_OUTPUT)
4468 abort ();
4469 }
4470 }
4471}
4472\f
4473/* Output insns to reload values in and out of the chosen reload regs. */
4474
4475static void
4476emit_reload_insns (insn)
4477 rtx insn;
4478{
4479 register int j;
4480 rtx following_insn = NEXT_INSN (insn);
a8efe40d 4481 rtx before_insn = insn;
32131a9c
RK
4482 rtx first_output_reload_insn = NEXT_INSN (insn);
4483 rtx first_other_reload_insn = insn;
4484 rtx first_operand_address_reload_insn = insn;
4485 int special;
4486 /* Values to be put in spill_reg_store are put here first. */
4487 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
278f7e9e 4488 int is_asm = asm_noperands (PATTERN (insn)) >= 0;
32131a9c 4489
d45cf215 4490 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
4491 must go in front of the first USE insn, not in front of INSN. */
4492
4493 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
4494 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
4495 while (GET_CODE (PREV_INSN (before_insn)) == INSN
4496 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
4497 first_other_reload_insn = first_operand_address_reload_insn
4498 = before_insn = PREV_INSN (before_insn);
4499
32131a9c
RK
4500 /* Now output the instructions to copy the data into and out of the
4501 reload registers. Do these in the order that the reloads were reported,
4502 since reloads of base and index registers precede reloads of operands
4503 and the operands may need the base and index registers reloaded. */
4504
4505 for (j = 0; j < n_reloads; j++)
4506 {
4507 register rtx old;
4508 rtx oldequiv_reg = 0;
4509 rtx this_reload_insn = 0;
4510 rtx store_insn = 0;
4511
4512 old = reload_in[j];
4513 if (old != 0 && ! reload_inherited[j]
4514 && ! rtx_equal_p (reload_reg_rtx[j], old)
4515 && reload_reg_rtx[j] != 0)
4516 {
4517 register rtx reloadreg = reload_reg_rtx[j];
4518 rtx oldequiv = 0;
4519 enum machine_mode mode;
4520 rtx where;
d445b551 4521 rtx reload_insn;
32131a9c
RK
4522
4523 /* Determine the mode to reload in.
4524 This is very tricky because we have three to choose from.
4525 There is the mode the insn operand wants (reload_inmode[J]).
4526 There is the mode of the reload register RELOADREG.
4527 There is the intrinsic mode of the operand, which we could find
4528 by stripping some SUBREGs.
4529 It turns out that RELOADREG's mode is irrelevant:
4530 we can change that arbitrarily.
4531
4532 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
4533 then the reload reg may not support QImode moves, so use SImode.
4534 If foo is in memory due to spilling a pseudo reg, this is safe,
4535 because the QImode value is in the least significant part of a
4536 slot big enough for a SImode. If foo is some other sort of
4537 memory reference, then it is impossible to reload this case,
4538 so previous passes had better make sure this never happens.
4539
4540 Then consider a one-word union which has SImode and one of its
4541 members is a float, being fetched as (SUBREG:SF union:SI).
4542 We must fetch that as SFmode because we could be loading into
4543 a float-only register. In this case OLD's mode is correct.
4544
4545 Consider an immediate integer: it has VOIDmode. Here we need
4546 to get a mode from something else.
4547
4548 In some cases, there is a fourth mode, the operand's
4549 containing mode. If the insn specifies a containing mode for
4550 this operand, it overrides all others.
4551
4552 I am not sure whether the algorithm here is always right,
4553 but it does the right things in those cases. */
4554
4555 mode = GET_MODE (old);
4556 if (mode == VOIDmode)
4557 mode = reload_inmode[j];
4558 if (reload_strict_low[j])
4559 mode = GET_MODE (SUBREG_REG (reload_in[j]));
4560
4561#ifdef SECONDARY_INPUT_RELOAD_CLASS
4562 /* If we need a secondary register for this operation, see if
4563 the value is already in a register in that class. Don't
4564 do this if the secondary register will be used as a scratch
4565 register. */
4566
4567 if (reload_secondary_reload[j] >= 0
4568 && reload_secondary_icode[j] == CODE_FOR_nothing)
4569 oldequiv
4570 = find_equiv_reg (old, insn,
4571 reload_reg_class[reload_secondary_reload[j]],
4572 -1, 0, 0, mode);
4573#endif
4574
4575 /* If reloading from memory, see if there is a register
4576 that already holds the same value. If so, reload from there.
4577 We can pass 0 as the reload_reg_p argument because
4578 any other reload has either already been emitted,
4579 in which case find_equiv_reg will see the reload-insn,
4580 or has yet to be emitted, in which case it doesn't matter
4581 because we will use this equiv reg right away. */
4582
4583 if (oldequiv == 0
4584 && (GET_CODE (old) == MEM
4585 || (GET_CODE (old) == REG
4586 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4587 && reg_renumber[REGNO (old)] < 0)))
4588 oldequiv = find_equiv_reg (old, insn, GENERAL_REGS,
4589 -1, 0, 0, mode);
4590
4591 if (oldequiv)
4592 {
4593 int regno = true_regnum (oldequiv);
4594
4595 /* If OLDEQUIV is a spill register, don't use it for this
4596 if any other reload needs it at an earlier stage of this insn
a8fdc208 4597 or at this stage. */
32131a9c
RK
4598 if (spill_reg_order[regno] >= 0
4599 && (! reload_reg_free_p (regno, reload_when_needed[j])
4600 || ! reload_reg_free_before_p (regno,
4601 reload_when_needed[j])))
4602 oldequiv = 0;
4603
4604 /* If OLDEQUIV is not a spill register,
4605 don't use it if any other reload wants it. */
4606 if (spill_reg_order[regno] < 0)
4607 {
4608 int k;
4609 for (k = 0; k < n_reloads; k++)
4610 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
4611 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
4612 oldequiv))
32131a9c
RK
4613 {
4614 oldequiv = 0;
4615 break;
4616 }
4617 }
4618 }
4619
4620 if (oldequiv == 0)
4621 oldequiv = old;
4622 else if (GET_CODE (oldequiv) == REG)
4623 oldequiv_reg = oldequiv;
4624 else if (GET_CODE (oldequiv) == SUBREG)
4625 oldequiv_reg = SUBREG_REG (oldequiv);
4626
4627 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
4628 then load RELOADREG from OLDEQUIV. */
4629
4630 if (GET_MODE (reloadreg) != mode)
4631 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
4632 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
4633 oldequiv = SUBREG_REG (oldequiv);
4634 if (GET_MODE (oldequiv) != VOIDmode
4635 && mode != GET_MODE (oldequiv))
4636 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
4637
4638 /* Decide where to put reload insn for this reload. */
4639 switch (reload_when_needed[j])
4640 {
4641 case RELOAD_FOR_INPUT:
4642 case RELOAD_OTHER:
4643 where = first_operand_address_reload_insn;
4644 break;
4645 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
4646 where = first_other_reload_insn;
4647 break;
4648 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
4649 where = first_output_reload_insn;
4650 break;
4651 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4652 where = before_insn;
32131a9c
RK
4653 }
4654
4655 special = 0;
4656
4657 /* Auto-increment addresses must be reloaded in a special way. */
4658 if (GET_CODE (oldequiv) == POST_INC
4659 || GET_CODE (oldequiv) == POST_DEC
4660 || GET_CODE (oldequiv) == PRE_INC
4661 || GET_CODE (oldequiv) == PRE_DEC)
4662 {
4663 /* We are not going to bother supporting the case where a
4664 incremented register can't be copied directly from
4665 OLDEQUIV since this seems highly unlikely. */
4666 if (reload_secondary_reload[j] >= 0)
4667 abort ();
4668 /* Prevent normal processing of this reload. */
4669 special = 1;
4670 /* Output a special code sequence for this case. */
4671 this_reload_insn
4672 = inc_for_reload (reloadreg, oldequiv, reload_inc[j], where);
4673 }
4674
4675 /* If we are reloading a pseudo-register that was set by the previous
4676 insn, see if we can get rid of that pseudo-register entirely
4677 by redirecting the previous insn into our reload register. */
4678
4679 else if (optimize && GET_CODE (old) == REG
4680 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4681 && dead_or_set_p (insn, old)
4682 /* This is unsafe if some other reload
4683 uses the same reg first. */
4684 && (reload_when_needed[j] == RELOAD_OTHER
4685 || reload_when_needed[j] == RELOAD_FOR_INPUT
4686 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS))
4687 {
4688 rtx temp = PREV_INSN (insn);
4689 while (temp && GET_CODE (temp) == NOTE)
4690 temp = PREV_INSN (temp);
4691 if (temp
4692 && GET_CODE (temp) == INSN
4693 && GET_CODE (PATTERN (temp)) == SET
4694 && SET_DEST (PATTERN (temp)) == old
4695 /* Make sure we can access insn_operand_constraint. */
4696 && asm_noperands (PATTERN (temp)) < 0
4697 /* This is unsafe if prev insn rejects our reload reg. */
4698 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
4699 reloadreg)
4700 /* This is unsafe if operand occurs more than once in current
4701 insn. Perhaps some occurrences aren't reloaded. */
4702 && count_occurrences (PATTERN (insn), old) == 1
4703 /* Don't risk splitting a matching pair of operands. */
4704 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
4705 {
4706 /* Store into the reload register instead of the pseudo. */
4707 SET_DEST (PATTERN (temp)) = reloadreg;
4708 /* If these are the only uses of the pseudo reg,
4709 pretend for GDB it lives in the reload reg we used. */
4710 if (reg_n_deaths[REGNO (old)] == 1
4711 && reg_n_sets[REGNO (old)] == 1)
4712 {
4713 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
4714 alter_reg (REGNO (old), -1);
4715 }
4716 special = 1;
4717 }
4718 }
4719
4720 /* We can't do that, so output an insn to load RELOADREG.
4721 Keep them in the following order:
4722 all reloads for input reload addresses,
4723 all reloads for ordinary input operands,
4724 all reloads for addresses of non-reloaded operands,
4725 the insn being reloaded,
4726 all reloads for addresses of output reloads,
4727 the output reloads. */
4728 if (! special)
4729 {
4730#ifdef SECONDARY_INPUT_RELOAD_CLASS
4731 rtx second_reload_reg = 0;
4732 enum insn_code icode;
4733
4734 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
4735 and icode, if any. If OLDEQUIV and OLD are different or
4736 if this is an in-out reload, recompute whether or not we
4737 still need a secondary register and what the icode should
4738 be. If we still need a secondary register and the class or
4739 icode is different, go back to reloading from OLD if using
4740 OLDEQUIV means that we got the wrong type of register. We
4741 cannot have different class or icode due to an in-out reload
4742 because we don't make such reloads when both the input and
4743 output need secondary reload registers. */
32131a9c
RK
4744
4745 if (reload_secondary_reload[j] >= 0)
4746 {
4747 int secondary_reload = reload_secondary_reload[j];
1554c2c6
RK
4748 rtx real_oldequiv = oldequiv;
4749 rtx real_old = old;
4750
4751 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
4752 and similarly for OLD.
4753 See comments in find_secondary_reload in reload.c. */
4754 if (GET_CODE (oldequiv) == REG
4755 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
4756 && reg_equiv_mem[REGNO (oldequiv)] != 0)
4757 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
4758
4759 if (GET_CODE (old) == REG
4760 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4761 && reg_equiv_mem[REGNO (old)] != 0)
4762 real_old = reg_equiv_mem[REGNO (old)];
4763
32131a9c
RK
4764 second_reload_reg = reload_reg_rtx[secondary_reload];
4765 icode = reload_secondary_icode[j];
4766
d445b551
RK
4767 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
4768 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
4769 {
4770 enum reg_class new_class
4771 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 4772 mode, real_oldequiv);
32131a9c
RK
4773
4774 if (new_class == NO_REGS)
4775 second_reload_reg = 0;
4776 else
4777 {
4778 enum insn_code new_icode;
4779 enum machine_mode new_mode;
4780
4781 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
4782 REGNO (second_reload_reg)))
1554c2c6 4783 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
4784 else
4785 {
4786 new_icode = reload_in_optab[(int) mode];
4787 if (new_icode != CODE_FOR_nothing
4788 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 4789 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 4790 (reloadreg, mode)))
a8fdc208
RS
4791 || (insn_operand_predicate[(int) new_icode][1]
4792 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 4793 (real_oldequiv, mode)))))
32131a9c
RK
4794 new_icode = CODE_FOR_nothing;
4795
4796 if (new_icode == CODE_FOR_nothing)
4797 new_mode = mode;
4798 else
4799 new_mode = insn_operand_mode[new_icode][2];
4800
4801 if (GET_MODE (second_reload_reg) != new_mode)
4802 {
4803 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
4804 new_mode))
1554c2c6 4805 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
4806 else
4807 second_reload_reg
4808 = gen_reg_rtx (REG, new_mode,
4809 REGNO (second_reload_reg));
4810 }
4811 }
4812 }
4813 }
4814
4815 /* If we still need a secondary reload register, check
4816 to see if it is being used as a scratch or intermediate
1554c2c6
RK
4817 register and generate code appropriately. If we need
4818 a scratch register, use REAL_OLDEQUIV since the form of
4819 the insn may depend on the actual address if it is
4820 a MEM. */
32131a9c
RK
4821
4822 if (second_reload_reg)
4823 {
4824 if (icode != CODE_FOR_nothing)
4825 {
d445b551 4826 reload_insn = emit_insn_before (GEN_FCN (icode)
1554c2c6
RK
4827 (reloadreg,
4828 real_oldequiv,
d445b551
RK
4829 second_reload_reg),
4830 where);
4831 if (this_reload_insn == 0)
4832 this_reload_insn = reload_insn;
32131a9c
RK
4833 special = 1;
4834 }
4835 else
4836 {
4837 /* See if we need a scratch register to load the
4838 intermediate register (a tertiary reload). */
4839 enum insn_code tertiary_icode
4840 = reload_secondary_icode[secondary_reload];
4841
4842 if (tertiary_icode != CODE_FOR_nothing)
4843 {
4844 rtx third_reload_reg
4845 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
4846
d445b551
RK
4847 reload_insn
4848 = emit_insn_before ((GEN_FCN (tertiary_icode)
4849 (second_reload_reg,
1554c2c6 4850 real_oldequiv,
d445b551
RK
4851 third_reload_reg)),
4852 where);
4853 if (this_reload_insn == 0)
4854 this_reload_insn = reload_insn;
32131a9c
RK
4855 }
4856 else
4857 {
d445b551
RK
4858 reload_insn
4859 = gen_input_reload (second_reload_reg,
278f7e9e
RS
4860 oldequiv, where, is_asm);
4861 /* If we can't create the reload insn,
4862 report an error and give up. */
4863 if (reload_insn == 0)
4864 {
4865 error_for_asm (insn,
4866 "`asm' operand requires impossible reload");
4867 return;
4868 }
d445b551
RK
4869 if (this_reload_insn == 0)
4870 this_reload_insn = reload_insn;
32131a9c
RK
4871 oldequiv = second_reload_reg;
4872 }
4873 }
4874 }
4875 }
4876#endif
4877
4878 if (! special)
d445b551
RK
4879 {
4880 reload_insn = gen_input_reload (reloadreg,
278f7e9e
RS
4881 oldequiv, where, is_asm);
4882 /* If we can't create the reload insn,
4883 report an error and give up. */
4884 if (reload_insn == 0)
4885 {
4886 error_for_asm (insn,
4887 "`asm' operand requires impossible reload");
4888 return;
4889 }
d445b551
RK
4890 if (this_reload_insn == 0)
4891 this_reload_insn = reload_insn;
4892 }
32131a9c
RK
4893
4894#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
4895 /* We may have to make a REG_DEAD note for the secondary reload
4896 register in the insns we just made. Find the last insn that
4897 mentioned the register. */
4898 if (! special && second_reload_reg
4899 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
4900 {
4901 rtx prev;
4902
4903 for (prev = where;
4904 prev != PREV_INSN (this_reload_insn);
4905 prev = PREV_INSN (prev))
4906 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
4907 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
4908 PATTERN (prev)))
32131a9c
RK
4909 {
4910 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
4911 second_reload_reg,
4912 REG_NOTES (prev));
4913 break;
4914 }
4915 }
4916#endif
4917 }
4918
4919 /* Update where to put other reload insns. */
4920 if (this_reload_insn)
4921 switch (reload_when_needed[j])
4922 {
4923 case RELOAD_FOR_INPUT:
4924 case RELOAD_OTHER:
4925 if (first_other_reload_insn == first_operand_address_reload_insn)
4926 first_other_reload_insn = this_reload_insn;
4927 break;
4928 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4929 if (first_operand_address_reload_insn == before_insn)
32131a9c 4930 first_operand_address_reload_insn = this_reload_insn;
a8efe40d 4931 if (first_other_reload_insn == before_insn)
32131a9c
RK
4932 first_other_reload_insn = this_reload_insn;
4933 }
4934
4935 /* reload_inc[j] was formerly processed here. */
4936 }
4937
4938 /* Add a note saying the input reload reg
4939 dies in this insn, if anyone cares. */
4940#ifdef PRESERVE_DEATH_INFO_REGNO_P
4941 if (old != 0
4942 && reload_reg_rtx[j] != old
4943 && reload_reg_rtx[j] != 0
4944 && reload_out[j] == 0
4945 && ! reload_inherited[j]
4946 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
4947 {
4948 register rtx reloadreg = reload_reg_rtx[j];
4949
a8fdc208 4950#if 0
32131a9c
RK
4951 /* We can't abort here because we need to support this for sched.c.
4952 It's not terrible to miss a REG_DEAD note, but we should try
4953 to figure out how to do this correctly. */
4954 /* The code below is incorrect for address-only reloads. */
4955 if (reload_when_needed[j] != RELOAD_OTHER
4956 && reload_when_needed[j] != RELOAD_FOR_INPUT)
4957 abort ();
4958#endif
4959
4960 /* Add a death note to this insn, for an input reload. */
4961
4962 if ((reload_when_needed[j] == RELOAD_OTHER
4963 || reload_when_needed[j] == RELOAD_FOR_INPUT)
4964 && ! dead_or_set_p (insn, reloadreg))
4965 REG_NOTES (insn)
4966 = gen_rtx (EXPR_LIST, REG_DEAD,
4967 reloadreg, REG_NOTES (insn));
4968 }
4969
4970 /* When we inherit a reload, the last marked death of the reload reg
4971 may no longer really be a death. */
4972 if (reload_reg_rtx[j] != 0
4973 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
4974 && reload_inherited[j])
4975 {
4976 /* Handle inheriting an output reload.
4977 Remove the death note from the output reload insn. */
4978 if (reload_spill_index[j] >= 0
4979 && GET_CODE (reload_in[j]) == REG
4980 && spill_reg_store[reload_spill_index[j]] != 0
4981 && find_regno_note (spill_reg_store[reload_spill_index[j]],
4982 REG_DEAD, REGNO (reload_reg_rtx[j])))
4983 remove_death (REGNO (reload_reg_rtx[j]),
4984 spill_reg_store[reload_spill_index[j]]);
4985 /* Likewise for input reloads that were inherited. */
4986 else if (reload_spill_index[j] >= 0
4987 && GET_CODE (reload_in[j]) == REG
4988 && spill_reg_store[reload_spill_index[j]] == 0
4989 && reload_inheritance_insn[j] != 0
a8fdc208 4990 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
4991 REGNO (reload_reg_rtx[j])))
4992 remove_death (REGNO (reload_reg_rtx[j]),
4993 reload_inheritance_insn[j]);
4994 else
4995 {
4996 rtx prev;
4997
4998 /* We got this register from find_equiv_reg.
4999 Search back for its last death note and get rid of it.
5000 But don't search back too far.
5001 Don't go past a place where this reg is set,
5002 since a death note before that remains valid. */
5003 for (prev = PREV_INSN (insn);
5004 prev && GET_CODE (prev) != CODE_LABEL;
5005 prev = PREV_INSN (prev))
5006 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5007 && dead_or_set_p (prev, reload_reg_rtx[j]))
5008 {
5009 if (find_regno_note (prev, REG_DEAD,
5010 REGNO (reload_reg_rtx[j])))
5011 remove_death (REGNO (reload_reg_rtx[j]), prev);
5012 break;
5013 }
5014 }
5015 }
5016
5017 /* We might have used find_equiv_reg above to choose an alternate
5018 place from which to reload. If so, and it died, we need to remove
5019 that death and move it to one of the insns we just made. */
5020
5021 if (oldequiv_reg != 0
5022 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5023 {
5024 rtx prev, prev1;
5025
5026 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5027 prev = PREV_INSN (prev))
5028 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5029 && dead_or_set_p (prev, oldequiv_reg))
5030 {
5031 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5032 {
5033 for (prev1 = this_reload_insn;
5034 prev1; prev1 = PREV_INSN (prev1))
5035 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5036 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5037 PATTERN (prev1)))
32131a9c
RK
5038 {
5039 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5040 oldequiv_reg,
5041 REG_NOTES (prev1));
5042 break;
5043 }
5044 remove_death (REGNO (oldequiv_reg), prev);
5045 }
5046 break;
5047 }
5048 }
5049#endif
5050
5051 /* If we are reloading a register that was recently stored in with an
5052 output-reload, see if we can prove there was
5053 actually no need to store the old value in it. */
5054
5055 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5056 /* This is unsafe if some other reload uses the same reg first. */
5057 && (reload_when_needed[j] == RELOAD_OTHER
5058 || reload_when_needed[j] == RELOAD_FOR_INPUT
5059 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS)
5060 && GET_CODE (reload_in[j]) == REG
5061#if 0
5062 /* There doesn't seem to be any reason to restrict this to pseudos
5063 and doing so loses in the case where we are copying from a
5064 register of the wrong class. */
5065 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5066#endif
5067 && spill_reg_store[reload_spill_index[j]] != 0
5068 && dead_or_set_p (insn, reload_in[j])
5069 /* This is unsafe if operand occurs more than once in current
5070 insn. Perhaps some occurrences weren't reloaded. */
5071 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5072 delete_output_reload (insn, j,
5073 spill_reg_store[reload_spill_index[j]]);
5074
5075 /* Input-reloading is done. Now do output-reloading,
5076 storing the value from the reload-register after the main insn
5077 if reload_out[j] is nonzero.
5078
5079 ??? At some point we need to support handling output reloads of
5080 JUMP_INSNs or insns that set cc0. */
5081 old = reload_out[j];
5082 if (old != 0
5083 && reload_reg_rtx[j] != old
5084 && reload_reg_rtx[j] != 0)
5085 {
5086 register rtx reloadreg = reload_reg_rtx[j];
5087 register rtx second_reloadreg = 0;
5088 rtx prev_insn = PREV_INSN (first_output_reload_insn);
5089 rtx note, p;
5090 enum machine_mode mode;
5091 int special = 0;
5092
5093 /* An output operand that dies right away does need a reload,
5094 but need not be copied from it. Show the new location in the
5095 REG_UNUSED note. */
5096 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5097 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5098 {
5099 XEXP (note, 0) = reload_reg_rtx[j];
5100 continue;
5101 }
5102 else if (GET_CODE (old) == SCRATCH)
5103 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5104 but we don't want to make an output reload. */
5105 continue;
5106
5107#if 0
5108 /* Strip off of OLD any size-increasing SUBREGs such as
5109 (SUBREG:SI foo:QI 0). */
5110
5111 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5112 && (GET_MODE_SIZE (GET_MODE (old))
5113 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5114 old = SUBREG_REG (old);
5115#endif
5116
5117 /* If is a JUMP_INSN, we can't support output reloads yet. */
5118 if (GET_CODE (insn) == JUMP_INSN)
5119 abort ();
5120
5121 /* Determine the mode to reload in.
5122 See comments above (for input reloading). */
5123
5124 mode = GET_MODE (old);
5125 if (mode == VOIDmode)
5126 abort (); /* Should never happen for an output. */
5127
5128 /* A strict-low-part output operand needs to be reloaded
5129 in the mode of the entire value. */
5130 if (reload_strict_low[j])
5131 {
5132 mode = GET_MODE (SUBREG_REG (reload_out[j]));
5133 /* Encapsulate OLD into that mode. */
5134 /* If OLD is a subreg, then strip it, since the subreg will
5135 be altered by this very reload. */
5136 while (GET_CODE (old) == SUBREG && GET_MODE (old) != mode)
5137 old = SUBREG_REG (old);
5138 if (GET_MODE (old) != VOIDmode
5139 && mode != GET_MODE (old))
5140 old = gen_rtx (SUBREG, mode, old, 0);
5141 }
5142
5143 if (GET_MODE (reloadreg) != mode)
5144 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5145
5146#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5147
5148 /* If we need two reload regs, set RELOADREG to the intermediate
5149 one, since it will be stored into OUT. We might need a secondary
5150 register only for an input reload, so check again here. */
5151
1554c2c6 5152 if (reload_secondary_reload[j] >= 0)
32131a9c 5153 {
1554c2c6 5154 rtx real_old = old;
32131a9c 5155
1554c2c6
RK
5156 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5157 && reg_equiv_mem[REGNO (old)] != 0)
5158 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 5159
1554c2c6
RK
5160 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5161 mode, real_old)
5162 != NO_REGS))
5163 {
5164 second_reloadreg = reloadreg;
5165 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
32131a9c 5166
1554c2c6
RK
5167 /* See if RELOADREG is to be used as a scratch register
5168 or as an intermediate register. */
5169 if (reload_secondary_icode[j] != CODE_FOR_nothing)
32131a9c 5170 {
1554c2c6
RK
5171 emit_insn_before ((GEN_FCN (reload_secondary_icode[j])
5172 (real_old, second_reloadreg,
5173 reloadreg)),
5174 first_output_reload_insn);
5175 special = 1;
32131a9c
RK
5176 }
5177 else
1554c2c6
RK
5178 {
5179 /* See if we need both a scratch and intermediate reload
5180 register. */
5181 int secondary_reload = reload_secondary_reload[j];
5182 enum insn_code tertiary_icode
5183 = reload_secondary_icode[secondary_reload];
5184 rtx pat;
32131a9c 5185
1554c2c6
RK
5186 if (GET_MODE (reloadreg) != mode)
5187 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5188
5189 if (tertiary_icode != CODE_FOR_nothing)
5190 {
5191 rtx third_reloadreg
5192 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5193 pat = (GEN_FCN (tertiary_icode)
5194 (reloadreg, second_reloadreg, third_reloadreg));
5195 }
5196 else
5197 pat = gen_move_insn (reloadreg, second_reloadreg);
5198
5199 emit_insn_before (pat, first_output_reload_insn);
5200 }
32131a9c
RK
5201 }
5202 }
5203#endif
5204
5205 /* Output the last reload insn. */
5206 if (! special)
5207 emit_insn_before (gen_move_insn (old, reloadreg),
5208 first_output_reload_insn);
5209
5210#ifdef PRESERVE_DEATH_INFO_REGNO_P
5211 /* If final will look at death notes for this reg,
5212 put one on the last output-reload insn to use it. Similarly
5213 for any secondary register. */
5214 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
5215 for (p = PREV_INSN (first_output_reload_insn);
5216 p != prev_insn; p = PREV_INSN (p))
5217 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5218 && reg_overlap_mentioned_for_reload_p (reloadreg,
5219 PATTERN (p)))
32131a9c
RK
5220 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5221 reloadreg, REG_NOTES (p));
5222
5223#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5224 if (! special
5225 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
5226 for (p = PREV_INSN (first_output_reload_insn);
5227 p != prev_insn; p = PREV_INSN (p))
5228 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5229 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
5230 PATTERN (p)))
32131a9c
RK
5231 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5232 second_reloadreg, REG_NOTES (p));
5233#endif
5234#endif
5235 /* Look at all insns we emitted, just to be safe. */
5236 for (p = NEXT_INSN (prev_insn); p != first_output_reload_insn;
5237 p = NEXT_INSN (p))
5238 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
5239 {
5240 /* If this output reload doesn't come from a spill reg,
5241 clear any memory of reloaded copies of the pseudo reg.
5242 If this output reload comes from a spill reg,
5243 reg_has_output_reload will make this do nothing. */
5244 note_stores (PATTERN (p), forget_old_reloads_1);
5245
5246 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
5247 store_insn = p;
5248 }
5249
5250 first_output_reload_insn = NEXT_INSN (prev_insn);
5251 }
5252
5253 if (reload_spill_index[j] >= 0)
5254 new_spill_reg_store[reload_spill_index[j]] = store_insn;
5255 }
5256
32131a9c
RK
5257 /* Move death notes from INSN
5258 to output-operand-address and output reload insns. */
5259#ifdef PRESERVE_DEATH_INFO_REGNO_P
5260 {
5261 rtx insn1;
5262 /* Loop over those insns, last ones first. */
5263 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
5264 insn1 = PREV_INSN (insn1))
5265 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
5266 {
5267 rtx source = SET_SRC (PATTERN (insn1));
5268 rtx dest = SET_DEST (PATTERN (insn1));
5269
5270 /* The note we will examine next. */
5271 rtx reg_notes = REG_NOTES (insn);
5272 /* The place that pointed to this note. */
5273 rtx *prev_reg_note = &REG_NOTES (insn);
5274
5275 /* If the note is for something used in the source of this
5276 reload insn, or in the output address, move the note. */
5277 while (reg_notes)
5278 {
5279 rtx next_reg_notes = XEXP (reg_notes, 1);
5280 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
5281 && GET_CODE (XEXP (reg_notes, 0)) == REG
5282 && ((GET_CODE (dest) != REG
bfa30b22
RK
5283 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5284 dest))
5285 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5286 source)))
32131a9c
RK
5287 {
5288 *prev_reg_note = next_reg_notes;
5289 XEXP (reg_notes, 1) = REG_NOTES (insn1);
5290 REG_NOTES (insn1) = reg_notes;
5291 }
5292 else
5293 prev_reg_note = &XEXP (reg_notes, 1);
5294
5295 reg_notes = next_reg_notes;
5296 }
5297 }
5298 }
5299#endif
5300
5301 /* For all the spill regs newly reloaded in this instruction,
5302 record what they were reloaded from, so subsequent instructions
d445b551
RK
5303 can inherit the reloads.
5304
5305 Update spill_reg_store for the reloads of this insn.
e9e79d69 5306 Copy the elements that were updated in the loop above. */
32131a9c
RK
5307
5308 for (j = 0; j < n_reloads; j++)
5309 {
5310 register int r = reload_order[j];
5311 register int i = reload_spill_index[r];
5312
5313 /* I is nonneg if this reload used one of the spill regs.
5314 If reload_reg_rtx[r] is 0, this is an optional reload
5315 that we opted to ignore. */
d445b551 5316
32131a9c
RK
5317 if (i >= 0 && reload_reg_rtx[r] != 0)
5318 {
5319 /* First, clear out memory of what used to be in this spill reg.
5320 If consecutive registers are used, clear them all. */
5321 int nr
5322 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
5323 int k;
5324
5325 for (k = 0; k < nr; k++)
5326 {
5327 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
5328 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
5329 }
5330
5331 /* Maybe the spill reg contains a copy of reload_out. */
5332 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5333 {
5334 register int nregno = REGNO (reload_out[r]);
d445b551
RK
5335
5336 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 5337 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 5338
32131a9c
RK
5339 for (k = 0; k < nr; k++)
5340 {
5341 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5342 = nregno;
5343 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
5344 }
5345 }
d445b551 5346
32131a9c
RK
5347 /* Maybe the spill reg contains a copy of reload_in. */
5348 else if (reload_out[r] == 0
5349 && reload_in[r] != 0
5350 && (GET_CODE (reload_in[r]) == REG
5351 || GET_CODE (reload_in_reg[r]) == REG))
5352 {
5353 register int nregno;
5354 if (GET_CODE (reload_in[r]) == REG)
5355 nregno = REGNO (reload_in[r]);
5356 else
5357 nregno = REGNO (reload_in_reg[r]);
5358
5359 /* If there are two separate reloads (one in and one out)
5360 for the same (hard or pseudo) reg,
a8fdc208 5361 leave reg_last_reload_reg set
32131a9c
RK
5362 based on the output reload.
5363 Otherwise, set it from this input reload. */
5364 if (!reg_has_output_reload[nregno]
5365 /* But don't do so if another input reload
5366 will clobber this one's value. */
5367 && reload_reg_reaches_end_p (spill_regs[i],
5368 reload_when_needed[r]))
5369 {
5370 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551
RK
5371
5372 /* Unless we inherited this reload, show we haven't
5373 recently done a store. */
5374 if (! reload_inherited[r])
5375 spill_reg_store[i] = 0;
5376
32131a9c
RK
5377 for (k = 0; k < nr; k++)
5378 {
5379 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5380 = nregno;
5381 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
5382 = insn;
5383 }
5384 }
5385 }
5386 }
5387
5388 /* The following if-statement was #if 0'd in 1.34 (or before...).
5389 It's reenabled in 1.35 because supposedly nothing else
5390 deals with this problem. */
5391
5392 /* If a register gets output-reloaded from a non-spill register,
5393 that invalidates any previous reloaded copy of it.
5394 But forget_old_reloads_1 won't get to see it, because
5395 it thinks only about the original insn. So invalidate it here. */
5396 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5397 {
5398 register int nregno = REGNO (reload_out[r]);
5399 reg_last_reload_reg[nregno] = 0;
5400 }
5401 }
5402}
5403\f
5404/* Emit code before BEFORE_INSN to perform an input reload of IN to RELOADREG.
278f7e9e
RS
5405 Returns first insn emitted.
5406
5407 If IS_ASM, check the emitted insns for validity.
5408 If they are invalid, delete them and return 0. */
32131a9c
RK
5409
5410rtx
278f7e9e 5411gen_input_reload (reloadreg, in, before_insn, is_asm)
32131a9c
RK
5412 rtx reloadreg;
5413 rtx in;
5414 rtx before_insn;
278f7e9e 5415 int is_asm;
32131a9c
RK
5416{
5417 register rtx prev_insn = PREV_INSN (before_insn);
5418
a8fdc208 5419 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
5420 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
5421 register that didn't get a hard register. In that case we can just
5422 call emit_move_insn.
5423
5424 We can also be asked to reload a PLUS that adds either two registers or
5425 a register and a constant or MEM. This can occur during frame pointer
5426 elimination. That case if handled by trying to emit a single insn
5427 to perform the add. If it is not valid, we use a two insn sequence.
5428
5429 Finally, we could be called to handle an 'o' constraint by putting
5430 an address into a register. In that case, we first try to do this
5431 with a named pattern of "reload_load_address". If no such pattern
5432 exists, we just emit a SET insn and hope for the best (it will normally
5433 be valid on machines that use 'o').
5434
5435 This entire process is made complex because reload will never
5436 process the insns we generate here and so we must ensure that
5437 they will fit their constraints and also by the fact that parts of
5438 IN might be being reloaded separately and replaced with spill registers.
5439 Because of this, we are, in some sense, just guessing the right approach
5440 here. The one listed above seems to work.
5441
5442 ??? At some point, this whole thing needs to be rethought. */
5443
5444 if (GET_CODE (in) == PLUS
5445 && GET_CODE (XEXP (in, 0)) == REG
5446 && (GET_CODE (XEXP (in, 1)) == REG
5447 || CONSTANT_P (XEXP (in, 1))
5448 || GET_CODE (XEXP (in, 1)) == MEM))
5449 {
5450 /* We need to compute the sum of what is either a register and a
5451 constant, a register and memory, or a hard register and a pseudo
5452 register and put it into the reload register. The best possible way
5453 of doing this is if the machine has a three-operand ADD insn that
5454 accepts the required operands.
5455
5456 The simplest approach is to try to generate such an insn and see if it
5457 is recognized and matches its constraints. If so, it can be used.
5458
5459 It might be better not to actually emit the insn unless it is valid,
0009eff2
RK
5460 but we need to pass the insn as an operand to `recog' and
5461 `insn_extract'and it is simpler to emit and then delete the insn if
5462 not valid than to dummy things up. */
a8fdc208 5463
af929c62 5464 rtx op0, op1, tem, insn;
32131a9c 5465 int code;
a8fdc208 5466
af929c62
RK
5467 op0 = find_replacement (&XEXP (in, 0));
5468 op1 = find_replacement (&XEXP (in, 1));
5469
32131a9c
RK
5470 /* Since constraint checking is strict, commutativity won't be
5471 checked, so we need to do that here to avoid spurious failure
5472 if the add instruction is two-address and the second operand
5473 of the add is the same as the reload reg, which is frequently
5474 the case. If the insn would be A = B + A, rearrange it so
5475 it will be A = A + B as constrain_operands expects. */
a8fdc208 5476
32131a9c
RK
5477 if (GET_CODE (XEXP (in, 1)) == REG
5478 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
5479 tem = op0, op0 = op1, op1 = tem;
5480
5481 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
5482 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c
RK
5483
5484 insn = emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in),
5485 before_insn);
5486 code = recog_memoized (insn);
5487
5488 if (code >= 0)
5489 {
5490 insn_extract (insn);
5491 /* We want constrain operands to treat this insn strictly in
5492 its validity determination, i.e., the way it would after reload
5493 has completed. */
5494 if (constrain_operands (code, 1))
5495 return insn;
5496 }
5497
5498 if (PREV_INSN (insn))
5499 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
5500 if (NEXT_INSN (insn))
5501 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
5502
5503 /* If that failed, we must use a conservative two-insn sequence.
5504 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
5505 register since "move" will be able to handle an arbitrary operand,
5506 unlike add which can't, in general. Then add the registers.
32131a9c
RK
5507
5508 If there is another way to do this for a specific machine, a
5509 DEFINE_PEEPHOLE should be specified that recognizes the sequence
5510 we emit below. */
5511
af929c62
RK
5512 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
5513 || (GET_CODE (op1) == REG
5514 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
5515 tem = op0, op0 = op1, op1 = tem;
32131a9c 5516
af929c62
RK
5517 emit_insn_before (gen_move_insn (reloadreg, op0), before_insn);
5518 emit_insn_before (gen_add2_insn (reloadreg, op1), before_insn);
32131a9c
RK
5519 }
5520
5521 /* If IN is a simple operand, use gen_move_insn. */
5522 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
278f7e9e
RS
5523 {
5524 rtx x = emit_insn_before (gen_move_insn (reloadreg, in), before_insn);
3b998c11
RK
5525 if (is_asm
5526 && (recog_memoized (x) < 0
5527 || (insn_extract (x), ! constrain_operands (INSN_CODE (x), 1))))
278f7e9e
RS
5528 {
5529 delete_insn (x);
5530 return 0;
5531 }
5532 }
32131a9c
RK
5533
5534#ifdef HAVE_reload_load_address
5535 else if (HAVE_reload_load_address)
278f7e9e
RS
5536 {
5537 rtx x = emit_insn_before (gen_reload_load_address (reloadreg, in),
5538 before_insn);
3b998c11
RK
5539 if (is_asm
5540 && (recog_memoized (x) < 0
5541 || (insn_extract (x), ! constrain_operands (INSN_CODE (x), 1))))
278f7e9e
RS
5542 {
5543 delete_insn (x);
5544 return 0;
5545 }
5546 }
32131a9c
RK
5547#endif
5548
5549 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
5550 else
278f7e9e
RS
5551 {
5552 rtx x = emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in),
5553 before_insn);
3b998c11
RK
5554 if (is_asm
5555 && (recog_memoized (x) < 0
5556 || (insn_extract (x), ! constrain_operands (INSN_CODE (x), 1))))
278f7e9e
RS
5557 {
5558 delete_insn (x);
5559 return 0;
5560 }
5561 }
32131a9c
RK
5562
5563 /* Return the first insn emitted.
5564 We can not just return PREV_INSN (before_insn), because there may have
5565 been multiple instructions emitted. Also note that gen_move_insn may
5566 emit more than one insn itself, so we can not assume that there is one
5567 insn emitted per emit_insn_before call. */
5568
5569 return NEXT_INSN (prev_insn);
5570}
5571\f
5572/* Delete a previously made output-reload
5573 whose result we now believe is not needed.
5574 First we double-check.
5575
5576 INSN is the insn now being processed.
5577 OUTPUT_RELOAD_INSN is the insn of the output reload.
5578 J is the reload-number for this insn. */
5579
5580static void
5581delete_output_reload (insn, j, output_reload_insn)
5582 rtx insn;
5583 int j;
5584 rtx output_reload_insn;
5585{
5586 register rtx i1;
5587
5588 /* Get the raw pseudo-register referred to. */
5589
5590 rtx reg = reload_in[j];
5591 while (GET_CODE (reg) == SUBREG)
5592 reg = SUBREG_REG (reg);
5593
5594 /* If the pseudo-reg we are reloading is no longer referenced
5595 anywhere between the store into it and here,
5596 and no jumps or labels intervene, then the value can get
5597 here through the reload reg alone.
5598 Otherwise, give up--return. */
5599 for (i1 = NEXT_INSN (output_reload_insn);
5600 i1 != insn; i1 = NEXT_INSN (i1))
5601 {
5602 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
5603 return;
5604 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
5605 && reg_mentioned_p (reg, PATTERN (i1)))
5606 return;
5607 }
5608
5609 /* If this insn will store in the pseudo again,
5610 the previous store can be removed. */
5611 if (reload_out[j] == reload_in[j])
5612 delete_insn (output_reload_insn);
5613
5614 /* See if the pseudo reg has been completely replaced
5615 with reload regs. If so, delete the store insn
5616 and forget we had a stack slot for the pseudo. */
5617 else if (reg_n_deaths[REGNO (reg)] == 1
5618 && reg_basic_block[REGNO (reg)] >= 0
5619 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
5620 {
5621 rtx i2;
5622
5623 /* We know that it was used only between here
5624 and the beginning of the current basic block.
5625 (We also know that the last use before INSN was
5626 the output reload we are thinking of deleting, but never mind that.)
5627 Search that range; see if any ref remains. */
5628 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5629 {
d445b551
RK
5630 rtx set = single_set (i2);
5631
32131a9c
RK
5632 /* Uses which just store in the pseudo don't count,
5633 since if they are the only uses, they are dead. */
d445b551 5634 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5635 continue;
5636 if (GET_CODE (i2) == CODE_LABEL
5637 || GET_CODE (i2) == JUMP_INSN)
5638 break;
5639 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
5640 && reg_mentioned_p (reg, PATTERN (i2)))
5641 /* Some other ref remains;
5642 we can't do anything. */
5643 return;
5644 }
5645
5646 /* Delete the now-dead stores into this pseudo. */
5647 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5648 {
d445b551
RK
5649 rtx set = single_set (i2);
5650
5651 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5652 delete_insn (i2);
5653 if (GET_CODE (i2) == CODE_LABEL
5654 || GET_CODE (i2) == JUMP_INSN)
5655 break;
5656 }
5657
5658 /* For the debugging info,
5659 say the pseudo lives in this reload reg. */
5660 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
5661 alter_reg (REGNO (reg), -1);
5662 }
5663}
5664
5665\f
a8fdc208 5666/* Output reload-insns to reload VALUE into RELOADREG.
32131a9c
RK
5667 VALUE is a autoincrement or autodecrement RTX whose operand
5668 is a register or memory location;
5669 so reloading involves incrementing that location.
5670
5671 INC_AMOUNT is the number to increment or decrement by (always positive).
5672 This cannot be deduced from VALUE.
5673
5674 INSN is the insn before which the new insns should be emitted.
5675
5676 The return value is the first of the insns emitted. */
5677
5678static rtx
5679inc_for_reload (reloadreg, value, inc_amount, insn)
5680 rtx reloadreg;
5681 rtx value;
5682 int inc_amount;
5683 rtx insn;
5684{
5685 /* REG or MEM to be copied and incremented. */
5686 rtx incloc = XEXP (value, 0);
5687 /* Nonzero if increment after copying. */
5688 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
0009eff2
RK
5689 rtx prev = PREV_INSN (insn);
5690 rtx inc;
5691 rtx add_insn;
5692 int code;
32131a9c
RK
5693
5694 /* No hard register is equivalent to this register after
5695 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
5696 we could inc/dec that register as well (maybe even using it for
5697 the source), but I'm not sure it's worth worrying about. */
5698 if (GET_CODE (incloc) == REG)
5699 reg_last_reload_reg[REGNO (incloc)] = 0;
5700
5701 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
5702 inc_amount = - inc_amount;
5703
0009eff2
RK
5704 inc = gen_rtx (CONST_INT, VOIDmode, inc_amount);
5705
5706 /* If this is post-increment, first copy the location to the reload reg. */
5707 if (post)
5708 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5709
5710 /* See if we can directly increment INCLOC. Use a method similar to that
5711 in gen_input_reload. */
5712
5713 add_insn = emit_insn_before (gen_rtx (SET, VOIDmode, incloc,
5714 gen_rtx (PLUS, GET_MODE (incloc),
5715 incloc, inc)), insn);
5716
5717 code = recog_memoized (add_insn);
5718 if (code >= 0)
32131a9c 5719 {
0009eff2
RK
5720 insn_extract (add_insn);
5721 if (constrain_operands (code, 1))
32131a9c 5722 {
0009eff2
RK
5723 /* If this is a pre-increment and we have incremented the value
5724 where it lives, copy the incremented value to RELOADREG to
5725 be used as an address. */
5726
5727 if (! post)
5728 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5729 return NEXT_INSN (prev);
32131a9c
RK
5730 }
5731 }
0009eff2
RK
5732
5733 if (PREV_INSN (add_insn))
5734 NEXT_INSN (PREV_INSN (add_insn)) = NEXT_INSN (add_insn);
5735 if (NEXT_INSN (add_insn))
5736 PREV_INSN (NEXT_INSN (add_insn)) = PREV_INSN (add_insn);
5737
5738 /* If couldn't do the increment directly, must increment in RELOADREG.
5739 The way we do this depends on whether this is pre- or post-increment.
5740 For pre-increment, copy INCLOC to the reload register, increment it
5741 there, then save back. */
5742
5743 if (! post)
5744 {
5745 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5746 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5747 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
5748 }
32131a9c
RK
5749 else
5750 {
0009eff2
RK
5751 /* Postincrement.
5752 Because this might be a jump insn or a compare, and because RELOADREG
5753 may not be available after the insn in an input reload, we must do
5754 the incrementation before the insn being reloaded for.
5755
5756 We have already copied INCLOC to RELOADREG. Increment the copy in
5757 RELOADREG, save that back, then decrement RELOADREG so it has
5758 the original value. */
5759
5760 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5761 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
5762 emit_insn_before (gen_add2_insn (reloadreg,
5763 gen_rtx (CONST_INT, VOIDmode,
5764 -inc_amount)),
5765 insn);
32131a9c 5766 }
0009eff2
RK
5767
5768 return NEXT_INSN (prev);
32131a9c
RK
5769}
5770\f
5771/* Return 1 if we are certain that the constraint-string STRING allows
5772 the hard register REG. Return 0 if we can't be sure of this. */
5773
5774static int
5775constraint_accepts_reg_p (string, reg)
5776 char *string;
5777 rtx reg;
5778{
5779 int value = 0;
5780 int regno = true_regnum (reg);
5781 int c;
5782
5783 /* Initialize for first alternative. */
5784 value = 0;
5785 /* Check that each alternative contains `g' or `r'. */
5786 while (1)
5787 switch (c = *string++)
5788 {
5789 case 0:
5790 /* If an alternative lacks `g' or `r', we lose. */
5791 return value;
5792 case ',':
5793 /* If an alternative lacks `g' or `r', we lose. */
5794 if (value == 0)
5795 return 0;
5796 /* Initialize for next alternative. */
5797 value = 0;
5798 break;
5799 case 'g':
5800 case 'r':
5801 /* Any general reg wins for this alternative. */
5802 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
5803 value = 1;
5804 break;
5805 default:
5806 /* Any reg in specified class wins for this alternative. */
5807 {
0009eff2 5808 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 5809
0009eff2 5810 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
5811 value = 1;
5812 }
5813 }
5814}
5815\f
d445b551
RK
5816/* Return the number of places FIND appears within X, but don't count
5817 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
5818
5819static int
5820count_occurrences (x, find)
5821 register rtx x, find;
5822{
5823 register int i, j;
5824 register enum rtx_code code;
5825 register char *format_ptr;
5826 int count;
5827
5828 if (x == find)
5829 return 1;
5830 if (x == 0)
5831 return 0;
5832
5833 code = GET_CODE (x);
5834
5835 switch (code)
5836 {
5837 case REG:
5838 case QUEUED:
5839 case CONST_INT:
5840 case CONST_DOUBLE:
5841 case SYMBOL_REF:
5842 case CODE_LABEL:
5843 case PC:
5844 case CC0:
5845 return 0;
d445b551
RK
5846
5847 case SET:
5848 if (SET_DEST (x) == find)
5849 return count_occurrences (SET_SRC (x), find);
5850 break;
32131a9c
RK
5851 }
5852
5853 format_ptr = GET_RTX_FORMAT (code);
5854 count = 0;
5855
5856 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5857 {
5858 switch (*format_ptr++)
5859 {
5860 case 'e':
5861 count += count_occurrences (XEXP (x, i), find);
5862 break;
5863
5864 case 'E':
5865 if (XVEC (x, i) != NULL)
5866 {
5867 for (j = 0; j < XVECLEN (x, i); j++)
5868 count += count_occurrences (XVECEXP (x, i, j), find);
5869 }
5870 break;
5871 }
5872 }
5873 return count;
5874}
This page took 0.635929 seconds and 5 git commands to generate.