]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
*** empty log message ***
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
a8efe40d 2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "obstack.h"
24#include "insn-config.h"
25#include "insn-flags.h"
26#include "insn-codes.h"
27#include "flags.h"
28#include "expr.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "reload.h"
32#include "recog.h"
33#include "basic-block.h"
34#include "output.h"
35#include <stdio.h>
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69\f
70/* During reload_as_needed, element N contains a REG rtx for the hard reg
71 into which pseudo reg N has been reloaded (perhaps for a previous insn). */
72static rtx *reg_last_reload_reg;
73
74/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
75 for an output reload that stores into reg N. */
76static char *reg_has_output_reload;
77
78/* Indicates which hard regs are reload-registers for an output reload
79 in the current insn. */
80static HARD_REG_SET reg_is_output_reload;
81
82/* Element N is the constant value to which pseudo reg N is equivalent,
83 or zero if pseudo reg N is not equivalent to a constant.
84 find_reloads looks at this in order to replace pseudo reg N
85 with the constant it stands for. */
86rtx *reg_equiv_constant;
87
88/* Element N is a memory location to which pseudo reg N is equivalent,
89 prior to any register elimination (such as frame pointer to stack
90 pointer). Depending on whether or not it is a valid address, this value
91 is transferred to either reg_equiv_address or reg_equiv_mem. */
92static rtx *reg_equiv_memory_loc;
93
94/* Element N is the address of stack slot to which pseudo reg N is equivalent.
95 This is used when the address is not valid as a memory address
96 (because its displacement is too big for the machine.) */
97rtx *reg_equiv_address;
98
99/* Element N is the memory slot to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a memory slot. */
101rtx *reg_equiv_mem;
102
103/* Widest width in which each pseudo reg is referred to (via subreg). */
104static int *reg_max_ref_width;
105
106/* Element N is the insn that initialized reg N from its equivalent
107 constant or memory slot. */
108static rtx *reg_equiv_init;
109
110/* During reload_as_needed, element N contains the last pseudo regno
111 reloaded into the Nth reload register. This vector is in parallel
112 with spill_regs. If that pseudo reg occupied more than one register,
113 reg_reloaded_contents points to that pseudo for each spill register in
114 use; all of these must remain set for an inheritance to occur. */
115static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
116
117/* During reload_as_needed, element N contains the insn for which
118 the Nth reload register was last used. This vector is in parallel
119 with spill_regs, and its contents are significant only when
120 reg_reloaded_contents is significant. */
121static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
122
123/* Number of spill-regs so far; number of valid elements of spill_regs. */
124static int n_spills;
125
126/* In parallel with spill_regs, contains REG rtx's for those regs.
127 Holds the last rtx used for any given reg, or 0 if it has never
128 been used for spilling yet. This rtx is reused, provided it has
129 the proper mode. */
130static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
131
132/* In parallel with spill_regs, contains nonzero for a spill reg
133 that was stored after the last time it was used.
134 The precise value is the insn generated to do the store. */
135static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
136
137/* This table is the inverse mapping of spill_regs:
138 indexed by hard reg number,
139 it contains the position of that reg in spill_regs,
140 or -1 for something that is not in spill_regs. */
141static short spill_reg_order[FIRST_PSEUDO_REGISTER];
142
143/* This reg set indicates registers that may not be used for retrying global
144 allocation. The registers that may not be used include all spill registers
145 and the frame pointer (if we are using one). */
146HARD_REG_SET forbidden_regs;
147
148/* This reg set indicates registers that are not good for spill registers.
149 They will not be used to complete groups of spill registers. This includes
150 all fixed registers, registers that may be eliminated, and registers
151 explicitly used in the rtl.
152
153 (spill_reg_order prevents these registers from being used to start a
154 group.) */
155static HARD_REG_SET bad_spill_regs;
156
157/* Describes order of use of registers for reloading
158 of spilled pseudo-registers. `spills' is the number of
159 elements that are actually valid; new ones are added at the end. */
160static short spill_regs[FIRST_PSEUDO_REGISTER];
161
162/* Describes order of preference for putting regs into spill_regs.
163 Contains the numbers of all the hard regs, in order most preferred first.
164 This order is different for each function.
165 It is set up by order_regs_for_reload.
166 Empty elements at the end contain -1. */
167static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
168
169/* 1 for a hard register that appears explicitly in the rtl
170 (for example, function value registers, special registers
171 used by insns, structure value pointer registers). */
172static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
173
174/* Indicates if a register was counted against the need for
175 groups. 0 means it can count against max_nongroup instead. */
176static HARD_REG_SET counted_for_groups;
177
178/* Indicates if a register was counted against the need for
179 non-groups. 0 means it can become part of a new group.
180 During choose_reload_regs, 1 here means don't use this reg
181 as part of a group, even if it seems to be otherwise ok. */
182static HARD_REG_SET counted_for_nongroups;
183
184/* Nonzero if indirect addressing is supported on the machine; this means
185 that spilling (REG n) does not require reloading it into a register in
186 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
187 value indicates the level of indirect addressing supported, e.g., two
188 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
189 a hard register. */
190
191static char spill_indirect_levels;
192
193/* Nonzero if indirect addressing is supported when the innermost MEM is
194 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
195 which these are valid is the same as spill_indirect_levels, above. */
196
197char indirect_symref_ok;
198
199/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
200
201char double_reg_address_ok;
202
203/* Record the stack slot for each spilled hard register. */
204
205static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
206
207/* Width allocated so far for that stack slot. */
208
209static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
210
211/* Indexed by register class and basic block number, nonzero if there is
212 any need for a spill register of that class in that basic block.
213 The pointer is 0 if we did stupid allocation and don't know
214 the structure of basic blocks. */
215
216char *basic_block_needs[N_REG_CLASSES];
217
218/* First uid used by insns created by reload in this function.
219 Used in find_equiv_reg. */
220int reload_first_uid;
221
222/* Flag set by local-alloc or global-alloc if anything is live in
223 a call-clobbered reg across calls. */
224
225int caller_save_needed;
226
227/* Set to 1 while reload_as_needed is operating.
228 Required by some machines to handle any generated moves differently. */
229
230int reload_in_progress = 0;
231
232/* These arrays record the insn_code of insns that may be needed to
233 perform input and output reloads of special objects. They provide a
234 place to pass a scratch register. */
235
236enum insn_code reload_in_optab[NUM_MACHINE_MODES];
237enum insn_code reload_out_optab[NUM_MACHINE_MODES];
238
239/* This obstack is used for allocation of rtl during register elmination.
240 The allocated storage can be freed once find_reloads has processed the
241 insn. */
242
243struct obstack reload_obstack;
244char *reload_firstobj;
245
246#define obstack_chunk_alloc xmalloc
247#define obstack_chunk_free free
248
249extern int xmalloc ();
250extern void free ();
251
252/* List of labels that must never be deleted. */
253extern rtx forced_labels;
254\f
255/* This structure is used to record information about register eliminations.
256 Each array entry describes one possible way of eliminating a register
257 in favor of another. If there is more than one way of eliminating a
258 particular register, the most preferred should be specified first. */
259
260static struct elim_table
261{
262 int from; /* Register number to be eliminated. */
263 int to; /* Register number used as replacement. */
264 int initial_offset; /* Initial difference between values. */
265 int can_eliminate; /* Non-zero if this elimination can be done. */
266 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
267 insns made by reload. */
268 int offset; /* Current offset between the two regs. */
a8efe40d 269 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
270 int previous_offset; /* Offset at end of previous insn. */
271 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
272 rtx from_rtx; /* REG rtx for the register to be eliminated.
273 We cannot simply compare the number since
274 we might then spuriously replace a hard
275 register corresponding to a pseudo
276 assigned to the reg to be eliminated. */
277 rtx to_rtx; /* REG rtx for the replacement. */
278} reg_eliminate[] =
279
280/* If a set of eliminable registers was specified, define the table from it.
281 Otherwise, default to the normal case of the frame pointer being
282 replaced by the stack pointer. */
283
284#ifdef ELIMINABLE_REGS
285 ELIMINABLE_REGS;
286#else
287 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
288#endif
289
290#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
291
292/* Record the number of pending eliminations that have an offset not equal
293 to their initial offset. If non-zero, we use a new copy of each
294 replacement result in any insns encountered. */
295static int num_not_at_initial_offset;
296
297/* Count the number of registers that we may be able to eliminate. */
298static int num_eliminable;
299
300/* For each label, we record the offset of each elimination. If we reach
301 a label by more than one path and an offset differs, we cannot do the
302 elimination. This information is indexed by the number of the label.
303 The first table is an array of flags that records whether we have yet
304 encountered a label and the second table is an array of arrays, one
305 entry in the latter array for each elimination. */
306
307static char *offsets_known_at;
308static int (*offsets_at)[NUM_ELIMINABLE_REGS];
309
310/* Number of labels in the current function. */
311
312static int num_labels;
313\f
314void mark_home_live ();
315static void count_possible_groups ();
316static int possible_group_p ();
317static void scan_paradoxical_subregs ();
318static void reload_as_needed ();
319static int modes_equiv_for_class_p ();
320static void alter_reg ();
321static void delete_dead_insn ();
322static int new_spill_reg();
323static void set_label_offsets ();
324static int eliminate_regs_in_insn ();
325static void mark_not_eliminable ();
326static int spill_hard_reg ();
327static void choose_reload_regs ();
328static void emit_reload_insns ();
329static void delete_output_reload ();
330static void forget_old_reloads_1 ();
331static void order_regs_for_reload ();
332static rtx inc_for_reload ();
333static int constraint_accepts_reg_p ();
334static int count_occurrences ();
335
336extern void remove_death ();
337extern rtx adj_offsettable_operand ();
338extern rtx form_sum ();
339\f
340void
341init_reload ()
342{
343 register int i;
344
345 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
346 Set spill_indirect_levels to the number of levels such addressing is
347 permitted, zero if it is not permitted at all. */
348
349 register rtx tem
350 = gen_rtx (MEM, Pmode,
351 gen_rtx (PLUS, Pmode,
352 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
353 gen_rtx (CONST_INT, VOIDmode, 4)));
354 spill_indirect_levels = 0;
355
356 while (memory_address_p (QImode, tem))
357 {
358 spill_indirect_levels++;
359 tem = gen_rtx (MEM, Pmode, tem);
360 }
361
362 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
363
364 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
365 indirect_symref_ok = memory_address_p (QImode, tem);
366
367 /* See if reg+reg is a valid (and offsettable) address. */
368
a8fdc208 369 tem = gen_rtx (PLUS, Pmode,
32131a9c
RK
370 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
371 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM));
372 /* This way, we make sure that reg+reg is an offsettable address. */
373 tem = plus_constant (tem, 4);
374
375 double_reg_address_ok = memory_address_p (QImode, tem);
376
377 /* Initialize obstack for our rtl allocation. */
378 gcc_obstack_init (&reload_obstack);
379 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
380
381#ifdef HAVE_SECONDARY_RELOADS
382
383 /* Initialize the optabs for doing special input and output reloads. */
384
385 for (i = 0; i < NUM_MACHINE_MODES; i++)
386 reload_in_optab[i] = reload_out_optab[i] = CODE_FOR_nothing;
387
388#ifdef HAVE_reload_inqi
389 if (HAVE_reload_inqi)
390 reload_in_optab[(int) QImode] = CODE_FOR_reload_inqi;
391#endif
392#ifdef HAVE_reload_inhi
393 if (HAVE_reload_inhi)
394 reload_in_optab[(int) HImode] = CODE_FOR_reload_inhi;
395#endif
396#ifdef HAVE_reload_insi
397 if (HAVE_reload_insi)
398 reload_in_optab[(int) SImode] = CODE_FOR_reload_insi;
399#endif
400#ifdef HAVE_reload_indi
401 if (HAVE_reload_indi)
402 reload_in_optab[(int) DImode] = CODE_FOR_reload_indi;
403#endif
404#ifdef HAVE_reload_inti
405 if (HAVE_reload_inti)
406 reload_in_optab[(int) TImode] = CODE_FOR_reload_inti;
407#endif
408#ifdef HAVE_reload_insf
409 if (HAVE_reload_insf)
410 reload_in_optab[(int) SFmode] = CODE_FOR_reload_insf;
411#endif
412#ifdef HAVE_reload_indf
413 if (HAVE_reload_indf)
414 reload_in_optab[(int) DFmode] = CODE_FOR_reload_indf;
415#endif
416#ifdef HAVE_reload_inxf
417 if (HAVE_reload_inxf)
418 reload_in_optab[(int) XFmode] = CODE_FOR_reload_inxf;
419#endif
420#ifdef HAVE_reload_intf
421 if (HAVE_reload_intf)
422 reload_in_optab[(int) TFmode] = CODE_FOR_reload_intf;
423#endif
424
425#ifdef HAVE_reload_outqi
426 if (HAVE_reload_outqi)
427 reload_out_optab[(int) QImode] = CODE_FOR_reload_outqi;
428#endif
429#ifdef HAVE_reload_outhi
430 if (HAVE_reload_outhi)
431 reload_out_optab[(int) HImode] = CODE_FOR_reload_outhi;
432#endif
433#ifdef HAVE_reload_outsi
434 if (HAVE_reload_outsi)
435 reload_out_optab[(int) SImode] = CODE_FOR_reload_outsi;
436#endif
437#ifdef HAVE_reload_outdi
438 if (HAVE_reload_outdi)
439 reload_out_optab[(int) DImode] = CODE_FOR_reload_outdi;
440#endif
441#ifdef HAVE_reload_outti
442 if (HAVE_reload_outti)
443 reload_out_optab[(int) TImode] = CODE_FOR_reload_outti;
444#endif
445#ifdef HAVE_reload_outsf
446 if (HAVE_reload_outsf)
447 reload_out_optab[(int) SFmode] = CODE_FOR_reload_outsf;
448#endif
449#ifdef HAVE_reload_outdf
450 if (HAVE_reload_outdf)
451 reload_out_optab[(int) DFmode] = CODE_FOR_reload_outdf;
452#endif
453#ifdef HAVE_reload_outxf
454 if (HAVE_reload_outxf)
455 reload_out_optab[(int) XFmode] = CODE_FOR_reload_outxf;
456#endif
457#ifdef HAVE_reload_outtf
458 if (HAVE_reload_outtf)
459 reload_out_optab[(int) TFmode] = CODE_FOR_reload_outtf;
460#endif
461
462#endif /* HAVE_SECONDARY_RELOADS */
463
464}
465
466/* Main entry point for the reload pass, and only entry point
467 in this file.
468
469 FIRST is the first insn of the function being compiled.
470
471 GLOBAL nonzero means we were called from global_alloc
472 and should attempt to reallocate any pseudoregs that we
473 displace from hard regs we will use for reloads.
474 If GLOBAL is zero, we do not have enough information to do that,
475 so any pseudo reg that is spilled must go to the stack.
476
477 DUMPFILE is the global-reg debugging dump file stream, or 0.
478 If it is nonzero, messages are written to it to describe
479 which registers are seized as reload regs, which pseudo regs
480 are spilled from them, and where the pseudo regs are reallocated to. */
481
482void
483reload (first, global, dumpfile)
484 rtx first;
485 int global;
486 FILE *dumpfile;
487{
488 register int class;
489 register int i;
490 register rtx insn;
491 register struct elim_table *ep;
492
493 int something_changed;
494 int something_needs_reloads;
495 int something_needs_elimination;
496 int new_basic_block_needs;
a8efe40d
RK
497 enum reg_class caller_save_spill_class = NO_REGS;
498 int caller_save_group_size = 1;
32131a9c
RK
499
500 /* The basic block number currently being processed for INSN. */
501 int this_block;
502
503 /* Make sure even insns with volatile mem refs are recognizable. */
504 init_recog ();
505
506 /* Enable find_equiv_reg to distinguish insns made by reload. */
507 reload_first_uid = get_max_uid ();
508
509 for (i = 0; i < N_REG_CLASSES; i++)
510 basic_block_needs[i] = 0;
511
512 /* Remember which hard regs appear explicitly
513 before we merge into `regs_ever_live' the ones in which
514 pseudo regs have been allocated. */
515 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
516
517 /* We don't have a stack slot for any spill reg yet. */
518 bzero (spill_stack_slot, sizeof spill_stack_slot);
519 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
520
a8efe40d
RK
521 /* Initialize the save area information for caller-save, in case some
522 are needed. */
523 init_save_areas ();
a8fdc208 524
32131a9c
RK
525 /* Compute which hard registers are now in use
526 as homes for pseudo registers.
527 This is done here rather than (eg) in global_alloc
528 because this point is reached even if not optimizing. */
529
530 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
531 mark_home_live (i);
532
533 /* Make sure that the last insn in the chain
534 is not something that needs reloading. */
535 emit_note (0, NOTE_INSN_DELETED);
536
537 /* Find all the pseudo registers that didn't get hard regs
538 but do have known equivalent constants or memory slots.
539 These include parameters (known equivalent to parameter slots)
540 and cse'd or loop-moved constant memory addresses.
541
542 Record constant equivalents in reg_equiv_constant
543 so they will be substituted by find_reloads.
544 Record memory equivalents in reg_mem_equiv so they can
545 be substituted eventually by altering the REG-rtx's. */
546
547 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
548 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
549 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
550 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
551 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
552 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
553 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
554 bzero (reg_equiv_init, max_regno * sizeof (rtx));
555 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
556 bzero (reg_equiv_address, max_regno * sizeof (rtx));
557 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
558 bzero (reg_max_ref_width, max_regno * sizeof (int));
559
560 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
561 Also find all paradoxical subregs
562 and find largest such for each pseudo. */
563
564 for (insn = first; insn; insn = NEXT_INSN (insn))
565 {
566 rtx set = single_set (insn);
567
568 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
569 {
570 rtx note = find_reg_note (insn, REG_EQUIV, 0);
a8efe40d
RK
571 if (note
572#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 573 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
574 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
575#endif
576 )
32131a9c
RK
577 {
578 rtx x = XEXP (note, 0);
579 i = REGNO (SET_DEST (set));
580 if (i > LAST_VIRTUAL_REGISTER)
581 {
582 if (GET_CODE (x) == MEM)
583 reg_equiv_memory_loc[i] = x;
584 else if (CONSTANT_P (x))
585 {
586 if (LEGITIMATE_CONSTANT_P (x))
587 reg_equiv_constant[i] = x;
588 else
589 reg_equiv_memory_loc[i]
d445b551 590 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
591 }
592 else
593 continue;
594
595 /* If this register is being made equivalent to a MEM
596 and the MEM is not SET_SRC, the equivalencing insn
597 is one with the MEM as a SET_DEST and it occurs later.
598 So don't mark this insn now. */
599 if (GET_CODE (x) != MEM
600 || rtx_equal_p (SET_SRC (set), x))
601 reg_equiv_init[i] = insn;
602 }
603 }
604 }
605
606 /* If this insn is setting a MEM from a register equivalent to it,
607 this is the equivalencing insn. */
608 else if (set && GET_CODE (SET_DEST (set)) == MEM
609 && GET_CODE (SET_SRC (set)) == REG
610 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
611 && rtx_equal_p (SET_DEST (set),
612 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
613 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
614
615 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
616 scan_paradoxical_subregs (PATTERN (insn));
617 }
618
619 /* Does this function require a frame pointer? */
620
621 frame_pointer_needed = (! flag_omit_frame_pointer
622#ifdef EXIT_IGNORE_STACK
623 /* ?? If EXIT_IGNORE_STACK is set, we will not save
624 and restore sp for alloca. So we can't eliminate
625 the frame pointer in that case. At some point,
626 we should improve this by emitting the
627 sp-adjusting insns for this case. */
628 || (current_function_calls_alloca
629 && EXIT_IGNORE_STACK)
630#endif
631 || FRAME_POINTER_REQUIRED);
632
633 num_eliminable = 0;
634
635 /* Initialize the table of registers to eliminate. The way we do this
636 depends on how the eliminable registers were defined. */
637#ifdef ELIMINABLE_REGS
638 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
639 {
640 ep->can_eliminate = ep->can_eliminate_previous
641 = (CAN_ELIMINATE (ep->from, ep->to)
642 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
643 }
644#else
645 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
646 = ! frame_pointer_needed;
647#endif
648
649 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 650 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
651 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
652 We depend on this. */
653 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
654 {
655 num_eliminable += ep->can_eliminate;
656 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
657 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
658 }
659
660 num_labels = max_label_num () - get_first_label_num ();
661
662 /* Allocate the tables used to store offset information at labels. */
663 offsets_known_at = (char *) alloca (num_labels);
664 offsets_at
665 = (int (*)[NUM_ELIMINABLE_REGS])
666 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
667
668 offsets_known_at -= get_first_label_num ();
669 offsets_at -= get_first_label_num ();
670
671 /* Alter each pseudo-reg rtx to contain its hard reg number.
672 Assign stack slots to the pseudos that lack hard regs or equivalents.
673 Do not touch virtual registers. */
674
675 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
676 alter_reg (i, -1);
677
678 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
679 because the stack size may be a part of the offset computation for
680 register elimination. */
681 assign_stack_local (BLKmode, 0, 0);
682
683 /* If we have some registers we think can be eliminated, scan all insns to
684 see if there is an insn that sets one of these registers to something
685 other than itself plus a constant. If so, the register cannot be
686 eliminated. Doing this scan here eliminates an extra pass through the
687 main reload loop in the most common case where register elimination
688 cannot be done. */
689 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
690 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
691 || GET_CODE (insn) == CALL_INSN)
692 note_stores (PATTERN (insn), mark_not_eliminable);
693
694#ifndef REGISTER_CONSTRAINTS
695 /* If all the pseudo regs have hard regs,
696 except for those that are never referenced,
697 we know that no reloads are needed. */
698 /* But that is not true if there are register constraints, since
699 in that case some pseudos might be in the wrong kind of hard reg. */
700
701 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
702 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
703 break;
704
705 if (i == max_regno && num_eliminable = 0 && ! caller_save_needed)
706 return;
707#endif
708
709 /* Compute the order of preference for hard registers to spill.
710 Store them by decreasing preference in potential_reload_regs. */
711
712 order_regs_for_reload ();
713
714 /* So far, no hard regs have been spilled. */
715 n_spills = 0;
716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
717 spill_reg_order[i] = -1;
718
719 /* On most machines, we can't use any register explicitly used in the
720 rtl as a spill register. But on some, we have to. Those will have
721 taken care to keep the life of hard regs as short as possible. */
722
723#ifdef SMALL_REGISTER_CLASSES
724 CLEAR_HARD_REG_SET (forbidden_regs);
725#else
726 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
727#endif
728
729 /* Spill any hard regs that we know we can't eliminate. */
730 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
731 if (! ep->can_eliminate)
732 {
733 spill_hard_reg (ep->from, global, dumpfile, 1);
734 regs_ever_live[ep->from] = 1;
735 }
736
737 if (global)
738 for (i = 0; i < N_REG_CLASSES; i++)
739 {
740 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
741 bzero (basic_block_needs[i], n_basic_blocks);
742 }
743
744 /* This loop scans the entire function each go-round
745 and repeats until one repetition spills no additional hard regs. */
746
747 /* This flag is set when a psuedo reg is spilled,
748 to require another pass. Note that getting an additional reload
749 reg does not necessarily imply any pseudo reg was spilled;
750 sometimes we find a reload reg that no pseudo reg was allocated in. */
751 something_changed = 1;
752 /* This flag is set if there are any insns that require reloading. */
753 something_needs_reloads = 0;
754 /* This flag is set if there are any insns that require register
755 eliminations. */
756 something_needs_elimination = 0;
757 while (something_changed)
758 {
759 rtx after_call = 0;
760
761 /* For each class, number of reload regs needed in that class.
762 This is the maximum over all insns of the needs in that class
763 of the individual insn. */
764 int max_needs[N_REG_CLASSES];
765 /* For each class, size of group of consecutive regs
766 that is needed for the reloads of this class. */
767 int group_size[N_REG_CLASSES];
768 /* For each class, max number of consecutive groups needed.
769 (Each group contains group_size[CLASS] consecutive registers.) */
770 int max_groups[N_REG_CLASSES];
771 /* For each class, max number needed of regs that don't belong
772 to any of the groups. */
773 int max_nongroups[N_REG_CLASSES];
774 /* For each class, the machine mode which requires consecutive
775 groups of regs of that class.
776 If two different modes ever require groups of one class,
777 they must be the same size and equally restrictive for that class,
778 otherwise we can't handle the complexity. */
779 enum machine_mode group_mode[N_REG_CLASSES];
780 rtx x;
781
782 something_changed = 0;
783 bzero (max_needs, sizeof max_needs);
784 bzero (max_groups, sizeof max_groups);
785 bzero (max_nongroups, sizeof max_nongroups);
786 bzero (group_size, sizeof group_size);
787 for (i = 0; i < N_REG_CLASSES; i++)
788 group_mode[i] = VOIDmode;
789
790 /* Keep track of which basic blocks are needing the reloads. */
791 this_block = 0;
792
793 /* Remember whether any element of basic_block_needs
794 changes from 0 to 1 in this pass. */
795 new_basic_block_needs = 0;
796
797 /* Reset all offsets on eliminable registers to their initial values. */
798#ifdef ELIMINABLE_REGS
799 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
800 {
801 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
802 ep->previous_offset = ep->offset
803 = ep->max_offset = ep->initial_offset;
32131a9c
RK
804 }
805#else
806#ifdef INITIAL_FRAME_POINTER_OFFSET
807 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
808#else
809 if (!FRAME_POINTER_REQUIRED)
810 abort ();
811 reg_eliminate[0].initial_offset = 0;
812#endif
a8efe40d 813 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
814 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
815#endif
816
817 num_not_at_initial_offset = 0;
818
819 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
820
821 /* Set a known offset for each forced label to be at the initial offset
822 of each elimination. We do this because we assume that all
823 computed jumps occur from a location where each elimination is
824 at its initial offset. */
825
826 for (x = forced_labels; x; x = XEXP (x, 1))
827 if (XEXP (x, 0))
828 set_label_offsets (XEXP (x, 0), 0, 1);
829
830 /* For each pseudo register that has an equivalent location defined,
831 try to eliminate any eliminable registers (such as the frame pointer)
832 assuming initial offsets for the replacement register, which
833 is the normal case.
834
835 If the resulting location is directly addressable, substitute
836 the MEM we just got directly for the old REG.
837
838 If it is not addressable but is a constant or the sum of a hard reg
839 and constant, it is probably not addressable because the constant is
840 out of range, in that case record the address; we will generate
841 hairy code to compute the address in a register each time it is
a8fdc208 842 needed.
32131a9c
RK
843
844 If the location is not addressable, but does not have one of the
845 above forms, assign a stack slot. We have to do this to avoid the
846 potential of producing lots of reloads if, e.g., a location involves
847 a pseudo that didn't get a hard register and has an equivalent memory
848 location that also involves a pseudo that didn't get a hard register.
849
850 Perhaps at some point we will improve reload_when_needed handling
851 so this problem goes away. But that's very hairy. */
852
853 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
854 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
855 {
856 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, 0);
857
858 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
859 XEXP (x, 0)))
860 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
861 else if (CONSTANT_P (XEXP (x, 0))
862 || (GET_CODE (XEXP (x, 0)) == PLUS
863 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
864 && (REGNO (XEXP (XEXP (x, 0), 0))
865 < FIRST_PSEUDO_REGISTER)
866 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
867 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
868 else
869 {
870 /* Make a new stack slot. Then indicate that something
a8fdc208 871 changed so we go back and recompute offsets for
32131a9c
RK
872 eliminable registers because the allocation of memory
873 below might change some offset. reg_equiv_{mem,address}
874 will be set up for this pseudo on the next pass around
875 the loop. */
876 reg_equiv_memory_loc[i] = 0;
877 reg_equiv_init[i] = 0;
878 alter_reg (i, -1);
879 something_changed = 1;
880 }
881 }
a8fdc208 882
32131a9c
RK
883 /* If we allocated another psuedo to the stack, redo elimination
884 bookkeeping. */
885 if (something_changed)
886 continue;
887
a8efe40d
RK
888 /* If caller-saves needs a group, initialize the group to include
889 the size and mode required for caller-saves. */
890
891 if (caller_save_group_size > 1)
892 {
893 group_mode[(int) caller_save_spill_class] = Pmode;
894 group_size[(int) caller_save_spill_class] = caller_save_group_size;
895 }
896
32131a9c
RK
897 /* Compute the most additional registers needed by any instruction.
898 Collect information separately for each class of regs. */
899
900 for (insn = first; insn; insn = NEXT_INSN (insn))
901 {
902 if (global && this_block + 1 < n_basic_blocks
903 && insn == basic_block_head[this_block+1])
904 ++this_block;
905
906 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
907 might include REG_LABEL), we need to see what effects this
908 has on the known offsets at labels. */
909
910 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
911 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
912 && REG_NOTES (insn) != 0))
913 set_label_offsets (insn, insn, 0);
914
915 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
916 {
917 /* Nonzero means don't use a reload reg that overlaps
918 the place where a function value can be returned. */
919 rtx avoid_return_reg = 0;
920
921 rtx old_body = PATTERN (insn);
922 int old_code = INSN_CODE (insn);
923 rtx old_notes = REG_NOTES (insn);
924 int did_elimination = 0;
925
926 /* Initially, count RELOAD_OTHER reloads.
927 Later, merge in the other kinds. */
928 int insn_needs[N_REG_CLASSES];
929 int insn_groups[N_REG_CLASSES];
930 int insn_total_groups = 0;
931
932 /* Count RELOAD_FOR_INPUT_RELOAD_ADDRESS reloads. */
933 int insn_needs_for_inputs[N_REG_CLASSES];
934 int insn_groups_for_inputs[N_REG_CLASSES];
935 int insn_total_groups_for_inputs = 0;
936
937 /* Count RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reloads. */
938 int insn_needs_for_outputs[N_REG_CLASSES];
939 int insn_groups_for_outputs[N_REG_CLASSES];
940 int insn_total_groups_for_outputs = 0;
941
942 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
943 int insn_needs_for_operands[N_REG_CLASSES];
944 int insn_groups_for_operands[N_REG_CLASSES];
945 int insn_total_groups_for_operands = 0;
946
32131a9c
RK
947#if 0 /* This wouldn't work nowadays, since optimize_bit_field
948 looks for non-strict memory addresses. */
949 /* Optimization: a bit-field instruction whose field
950 happens to be a byte or halfword in memory
951 can be changed to a move instruction. */
952
953 if (GET_CODE (PATTERN (insn)) == SET)
954 {
955 rtx dest = SET_DEST (PATTERN (insn));
956 rtx src = SET_SRC (PATTERN (insn));
957
958 if (GET_CODE (dest) == ZERO_EXTRACT
959 || GET_CODE (dest) == SIGN_EXTRACT)
960 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
961 if (GET_CODE (src) == ZERO_EXTRACT
962 || GET_CODE (src) == SIGN_EXTRACT)
963 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
964 }
965#endif
966
967 /* If needed, eliminate any eliminable registers. */
968 if (num_eliminable)
969 did_elimination = eliminate_regs_in_insn (insn, 0);
970
971#ifdef SMALL_REGISTER_CLASSES
972 /* Set avoid_return_reg if this is an insn
973 that might use the value of a function call. */
974 if (GET_CODE (insn) == CALL_INSN)
975 {
976 if (GET_CODE (PATTERN (insn)) == SET)
977 after_call = SET_DEST (PATTERN (insn));
978 else if (GET_CODE (PATTERN (insn)) == PARALLEL
979 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
980 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
981 else
982 after_call = 0;
983 }
984 else if (after_call != 0
985 && !(GET_CODE (PATTERN (insn)) == SET
986 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
987 {
988 if (reg_mentioned_p (after_call, PATTERN (insn)))
989 avoid_return_reg = after_call;
990 after_call = 0;
991 }
992#endif /* SMALL_REGISTER_CLASSES */
993
994 /* Analyze the instruction. */
995 find_reloads (insn, 0, spill_indirect_levels, global,
996 spill_reg_order);
997
998 /* Remember for later shortcuts which insns had any reloads or
999 register eliminations.
1000
1001 One might think that it would be worthwhile to mark insns
1002 that need register replacements but not reloads, but this is
1003 not safe because find_reloads may do some manipulation of
1004 the insn (such as swapping commutative operands), which would
1005 be lost when we restore the old pattern after register
1006 replacement. So the actions of find_reloads must be redone in
1007 subsequent passes or in reload_as_needed.
1008
1009 However, it is safe to mark insns that need reloads
1010 but not register replacement. */
1011
1012 PUT_MODE (insn, (did_elimination ? QImode
1013 : n_reloads ? HImode
1014 : VOIDmode));
1015
1016 /* Discard any register replacements done. */
1017 if (did_elimination)
1018 {
1019 obstack_free (&reload_obstack, reload_firstobj);
1020 PATTERN (insn) = old_body;
1021 INSN_CODE (insn) = old_code;
1022 REG_NOTES (insn) = old_notes;
1023 something_needs_elimination = 1;
1024 }
1025
a8efe40d 1026 /* If this insn has no reloads, we need not do anything except
a8fdc208 1027 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1028 caller-save needs reloads. */
1029
1030 if (n_reloads == 0
1031 && ! (GET_CODE (insn) == CALL_INSN
1032 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1033 continue;
1034
1035 something_needs_reloads = 1;
1036
a8efe40d
RK
1037 for (i = 0; i < N_REG_CLASSES; i++)
1038 {
1039 insn_needs[i] = 0, insn_groups[i] = 0;
1040 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1041 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1042 insn_needs_for_operands[i] = 0, insn_groups_for_operands[i] = 0;
1043 }
1044
32131a9c
RK
1045 /* Count each reload once in every class
1046 containing the reload's own class. */
1047
1048 for (i = 0; i < n_reloads; i++)
1049 {
1050 register enum reg_class *p;
1051 int size;
1052 enum machine_mode mode;
1053 int *this_groups;
1054 int *this_needs;
1055 int *this_total_groups;
1056
1057 /* Don't count the dummy reloads, for which one of the
1058 regs mentioned in the insn can be used for reloading.
1059 Don't count optional reloads.
1060 Don't count reloads that got combined with others. */
1061 if (reload_reg_rtx[i] != 0
1062 || reload_optional[i] != 0
1063 || (reload_out[i] == 0 && reload_in[i] == 0
1064 && ! reload_secondary_p[i]))
1065 continue;
1066
1067 /* Decide which time-of-use to count this reload for. */
1068 switch (reload_when_needed[i])
1069 {
1070 case RELOAD_OTHER:
1071 case RELOAD_FOR_OUTPUT:
1072 case RELOAD_FOR_INPUT:
1073 this_needs = insn_needs;
1074 this_groups = insn_groups;
1075 this_total_groups = &insn_total_groups;
1076 break;
1077
1078 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
1079 this_needs = insn_needs_for_inputs;
1080 this_groups = insn_groups_for_inputs;
1081 this_total_groups = &insn_total_groups_for_inputs;
1082 break;
1083
1084 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
1085 this_needs = insn_needs_for_outputs;
1086 this_groups = insn_groups_for_outputs;
1087 this_total_groups = &insn_total_groups_for_outputs;
1088 break;
1089
1090 case RELOAD_FOR_OPERAND_ADDRESS:
1091 this_needs = insn_needs_for_operands;
1092 this_groups = insn_groups_for_operands;
1093 this_total_groups = &insn_total_groups_for_operands;
1094 break;
1095 }
1096
1097 mode = reload_inmode[i];
1098 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1099 mode = reload_outmode[i];
1100 size = CLASS_MAX_NREGS (reload_reg_class[i], mode);
1101 if (size > 1)
1102 {
1103 enum machine_mode other_mode, allocate_mode;
1104
1105 /* Count number of groups needed separately from
1106 number of individual regs needed. */
1107 this_groups[(int) reload_reg_class[i]]++;
1108 p = reg_class_superclasses[(int) reload_reg_class[i]];
1109 while (*p != LIM_REG_CLASSES)
1110 this_groups[(int) *p++]++;
1111 (*this_total_groups)++;
1112
1113 /* Record size and mode of a group of this class. */
1114 /* If more than one size group is needed,
1115 make all groups the largest needed size. */
1116 if (group_size[(int) reload_reg_class[i]] < size)
1117 {
1118 other_mode = group_mode[(int) reload_reg_class[i]];
1119 allocate_mode = mode;
1120
1121 group_size[(int) reload_reg_class[i]] = size;
1122 group_mode[(int) reload_reg_class[i]] = mode;
1123 }
1124 else
1125 {
1126 other_mode = mode;
1127 allocate_mode = group_mode[(int) reload_reg_class[i]];
1128 }
1129
1130 /* Crash if two dissimilar machine modes both need
1131 groups of consecutive regs of the same class. */
1132
1133 if (other_mode != VOIDmode
1134 && other_mode != allocate_mode
1135 && ! modes_equiv_for_class_p (allocate_mode,
1136 other_mode,
1137 reload_reg_class[i]))
1138 abort ();
1139 }
1140 else if (size == 1)
1141 {
1142 this_needs[(int) reload_reg_class[i]] += 1;
1143 p = reg_class_superclasses[(int) reload_reg_class[i]];
1144 while (*p != LIM_REG_CLASSES)
1145 this_needs[(int) *p++] += 1;
1146 }
1147 else
1148 abort ();
1149 }
1150
1151 /* All reloads have been counted for this insn;
1152 now merge the various times of use.
1153 This sets insn_needs, etc., to the maximum total number
1154 of registers needed at any point in this insn. */
1155
1156 for (i = 0; i < N_REG_CLASSES; i++)
1157 {
1158 int this_max;
1159 this_max = insn_needs_for_inputs[i];
1160 if (insn_needs_for_outputs[i] > this_max)
1161 this_max = insn_needs_for_outputs[i];
1162 if (insn_needs_for_operands[i] > this_max)
1163 this_max = insn_needs_for_operands[i];
1164 insn_needs[i] += this_max;
1165 this_max = insn_groups_for_inputs[i];
1166 if (insn_groups_for_outputs[i] > this_max)
1167 this_max = insn_groups_for_outputs[i];
1168 if (insn_groups_for_operands[i] > this_max)
1169 this_max = insn_groups_for_operands[i];
1170 insn_groups[i] += this_max;
32131a9c 1171 }
a8efe40d 1172
32131a9c
RK
1173 insn_total_groups += MAX (insn_total_groups_for_inputs,
1174 MAX (insn_total_groups_for_outputs,
1175 insn_total_groups_for_operands));
1176
a8efe40d
RK
1177 /* If this is a CALL_INSN and caller-saves will need
1178 a spill register, act as if the spill register is
1179 needed for this insn. However, the spill register
1180 can be used by any reload of this insn, so we only
1181 need do something if no need for that class has
a8fdc208 1182 been recorded.
a8efe40d
RK
1183
1184 The assumption that every CALL_INSN will trigger a
1185 caller-save is highly conservative, however, the number
1186 of cases where caller-saves will need a spill register but
1187 a block containing a CALL_INSN won't need a spill register
1188 of that class should be quite rare.
1189
1190 If a group is needed, the size and mode of the group will
1191 have been set up at the begining of this loop. */
1192
1193 if (GET_CODE (insn) == CALL_INSN
1194 && caller_save_spill_class != NO_REGS)
1195 {
1196 int *caller_save_needs
1197 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1198
1199 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1200 {
1201 register enum reg_class *p
1202 = reg_class_superclasses[(int) caller_save_spill_class];
1203
1204 caller_save_needs[(int) caller_save_spill_class]++;
1205
1206 while (*p != LIM_REG_CLASSES)
0aaa6af8 1207 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1208 }
1209
1210 if (caller_save_group_size > 1)
1211 insn_total_groups = MAX (insn_total_groups, 1);
1212 }
1213
1214 /* Update the basic block needs. */
1215
1216 for (i = 0; i < N_REG_CLASSES; i++)
1217 if (global && (insn_needs[i] || insn_groups[i])
1218 && ! basic_block_needs[i][this_block])
1219 {
1220 new_basic_block_needs = 1;
1221 basic_block_needs[i][this_block] = 1;
1222 }
1223
32131a9c
RK
1224#ifdef SMALL_REGISTER_CLASSES
1225 /* If this insn stores the value of a function call,
1226 and that value is in a register that has been spilled,
1227 and if the insn needs a reload in a class
1228 that might use that register as the reload register,
1229 then add add an extra need in that class.
1230 This makes sure we have a register available that does
1231 not overlap the return value. */
1232 if (avoid_return_reg)
1233 {
1234 int regno = REGNO (avoid_return_reg);
1235 int nregs
1236 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1237 int r;
1238 int inc_groups = 0;
1239 for (r = regno; r < regno + nregs; r++)
1240 if (spill_reg_order[r] >= 0)
1241 for (i = 0; i < N_REG_CLASSES; i++)
1242 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1243 {
1244 if (insn_needs[i] > 0)
1245 insn_needs[i]++;
1246 if (insn_groups[i] > 0
1247 && nregs > 1)
1248 inc_groups = 1;
1249 }
1250 if (inc_groups)
1251 insn_groups[i]++;
1252 }
1253#endif /* SMALL_REGISTER_CLASSES */
1254
1255 /* For each class, collect maximum need of any insn. */
1256
1257 for (i = 0; i < N_REG_CLASSES; i++)
1258 {
1259 if (max_needs[i] < insn_needs[i])
1260 max_needs[i] = insn_needs[i];
1261 if (max_groups[i] < insn_groups[i])
1262 max_groups[i] = insn_groups[i];
1263 if (insn_total_groups > 0)
1264 if (max_nongroups[i] < insn_needs[i])
1265 max_nongroups[i] = insn_needs[i];
1266 }
1267 }
1268 /* Note that there is a continue statement above. */
1269 }
1270
d445b551 1271 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1272 will need a spill register. */
32131a9c 1273
d445b551 1274 if (caller_save_needed
a8efe40d
RK
1275 && ! setup_save_areas (&something_changed)
1276 && caller_save_spill_class == NO_REGS)
32131a9c 1277 {
a8efe40d
RK
1278 /* The class we will need depends on whether the machine
1279 supports the sum of two registers for an address; see
1280 find_address_reloads for details. */
1281
a8fdc208 1282 caller_save_spill_class
a8efe40d
RK
1283 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1284 caller_save_group_size
1285 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1286 something_changed = 1;
32131a9c
RK
1287 }
1288
1289 /* Now deduct from the needs for the registers already
1290 available (already spilled). */
1291
1292 CLEAR_HARD_REG_SET (counted_for_groups);
1293 CLEAR_HARD_REG_SET (counted_for_nongroups);
1294
1295 /* First find all regs alone in their class
1296 and count them (if desired) for non-groups.
1297 We would be screwed if a group took the only reg in a class
d445b551 1298 for which a non-group reload is needed.
32131a9c
RK
1299 (Note there is still a bug; if a class has 2 regs,
1300 both could be stolen by groups and we would lose the same way.
1301 With luck, no machine will need a nongroup in a 2-reg class.) */
1302
1303 for (i = 0; i < n_spills; i++)
1304 {
1305 register enum reg_class *p;
1306 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1307
1308 if (reg_class_size[class] == 1 && max_nongroups[class] > 0)
1309 {
1310 max_needs[class]--;
1311 p = reg_class_superclasses[class];
1312 while (*p != LIM_REG_CLASSES)
1313 max_needs[(int) *p++]--;
1314
1315 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1316 max_nongroups[class]--;
1317 p = reg_class_superclasses[class];
1318 while (*p != LIM_REG_CLASSES)
1319 {
1320 if (max_nongroups[(int) *p] > 0)
1321 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1322 max_nongroups[(int) *p++]--;
1323 }
1324 }
1325 }
1326
1327 /* Now find all consecutive groups of spilled registers
1328 and mark each group off against the need for such groups.
1329 But don't count them against ordinary need, yet. */
1330
1331 count_possible_groups (group_size, group_mode, max_groups);
1332
1333 /* Now count all spill regs against the individual need,
a8fdc208 1334 This includes those counted above for groups,
32131a9c
RK
1335 but not those previously counted for nongroups.
1336
1337 Those that weren't counted_for_groups can also count against
1338 the not-in-group need. */
1339
1340 for (i = 0; i < n_spills; i++)
1341 {
1342 register enum reg_class *p;
1343 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1344
1345 /* Those counted at the beginning shouldn't be counted twice. */
1346 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
1347 {
1348 max_needs[class]--;
1349 p = reg_class_superclasses[class];
1350 while (*p != LIM_REG_CLASSES)
1351 max_needs[(int) *p++]--;
1352
1353 if (! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i]))
1354 {
1355 if (max_nongroups[class] > 0)
1356 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1357 max_nongroups[class]--;
1358 p = reg_class_superclasses[class];
1359 while (*p != LIM_REG_CLASSES)
1360 {
1361 if (max_nongroups[(int) *p] > 0)
1362 SET_HARD_REG_BIT (counted_for_nongroups,
1363 spill_regs[i]);
1364 max_nongroups[(int) *p++]--;
1365 }
1366 }
1367 }
1368 }
1369
1370 /* Look for the case where we have discovered that we can't replace
1371 register A with register B and that means that we will now be
1372 trying to replace register A with register C. This means we can
1373 no longer replace register C with register B and we need to disable
1374 such an elimination, if it exists. This occurs often with A == ap,
1375 B == sp, and C == fp. */
a8fdc208 1376
32131a9c
RK
1377 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1378 {
1379 struct elim_table *op;
1380 register int new_to = -1;
1381
1382 if (! ep->can_eliminate && ep->can_eliminate_previous)
1383 {
1384 /* Find the current elimination for ep->from, if there is a
1385 new one. */
1386 for (op = reg_eliminate;
1387 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1388 if (op->from == ep->from && op->can_eliminate)
1389 {
1390 new_to = op->to;
1391 break;
1392 }
1393
1394 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1395 disable it. */
1396 for (op = reg_eliminate;
1397 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1398 if (op->from == new_to && op->to == ep->to)
1399 op->can_eliminate = 0;
1400 }
1401 }
1402
1403 /* See if any registers that we thought we could eliminate the previous
1404 time are no longer eliminable. If so, something has changed and we
1405 must spill the register. Also, recompute the number of eliminable
1406 registers and see if the frame pointer is needed; it is if there is
1407 no elimination of the frame pointer that we can perform. */
1408
1409 frame_pointer_needed = 1;
1410 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1411 {
1412 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1413 frame_pointer_needed = 0;
1414
1415 if (! ep->can_eliminate && ep->can_eliminate_previous)
1416 {
1417 ep->can_eliminate_previous = 0;
1418 spill_hard_reg (ep->from, global, dumpfile, 1);
1419 regs_ever_live[ep->from] = 1;
1420 something_changed = 1;
1421 num_eliminable--;
1422 }
1423 }
1424
1425 /* If all needs are met, we win. */
1426
1427 for (i = 0; i < N_REG_CLASSES; i++)
1428 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1429 break;
1430 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1431 break;
1432
1433 /* Not all needs are met; must spill more hard regs. */
1434
1435 /* If any element of basic_block_needs changed from 0 to 1,
1436 re-spill all the regs already spilled. This may spill
1437 additional pseudos that didn't spill before. */
1438
1439 if (new_basic_block_needs)
1440 for (i = 0; i < n_spills; i++)
1441 something_changed
1442 |= spill_hard_reg (spill_regs[i], global, dumpfile, 0);
1443
1444 /* Now find more reload regs to satisfy the remaining need
1445 Do it by ascending class number, since otherwise a reg
1446 might be spilled for a big class and might fail to count
1447 for a smaller class even though it belongs to that class.
1448
1449 Count spilled regs in `spills', and add entries to
1450 `spill_regs' and `spill_reg_order'.
1451
1452 ??? Note there is a problem here.
1453 When there is a need for a group in a high-numbered class,
1454 and also need for non-group regs that come from a lower class,
1455 the non-group regs are chosen first. If there aren't many regs,
1456 they might leave no room for a group.
1457
1458 This was happening on the 386. To fix it, we added the code
1459 that calls possible_group_p, so that the lower class won't
1460 break up the last possible group.
1461
1462 Really fixing the problem would require changes above
1463 in counting the regs already spilled, and in choose_reload_regs.
1464 It might be hard to avoid introducing bugs there. */
1465
1466 for (class = 0; class < N_REG_CLASSES; class++)
1467 {
1468 /* First get the groups of registers.
1469 If we got single registers first, we might fragment
1470 possible groups. */
1471 while (max_groups[class] > 0)
1472 {
1473 /* If any single spilled regs happen to form groups,
1474 count them now. Maybe we don't really need
1475 to spill another group. */
1476 count_possible_groups (group_size, group_mode, max_groups);
1477
1478 /* Groups of size 2 (the only groups used on most machines)
1479 are treated specially. */
1480 if (group_size[class] == 2)
1481 {
1482 /* First, look for a register that will complete a group. */
1483 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1484 {
1485 int j = potential_reload_regs[i];
1486 int other;
1487 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1488 &&
1489 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1490 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1491 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1492 && HARD_REGNO_MODE_OK (other, group_mode[class])
1493 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1494 other)
1495 /* We don't want one part of another group.
1496 We could get "two groups" that overlap! */
1497 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1498 ||
1499 (j < FIRST_PSEUDO_REGISTER - 1
1500 && (other = j + 1, spill_reg_order[other] >= 0)
1501 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1502 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1503 && HARD_REGNO_MODE_OK (j, group_mode[class])
1504 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1505 other)
1506 && ! TEST_HARD_REG_BIT (counted_for_groups,
1507 other))))
1508 {
1509 register enum reg_class *p;
1510
1511 /* We have found one that will complete a group,
1512 so count off one group as provided. */
1513 max_groups[class]--;
1514 p = reg_class_superclasses[class];
1515 while (*p != LIM_REG_CLASSES)
1516 max_groups[(int) *p++]--;
1517
1518 /* Indicate both these regs are part of a group. */
1519 SET_HARD_REG_BIT (counted_for_groups, j);
1520 SET_HARD_REG_BIT (counted_for_groups, other);
1521 break;
1522 }
1523 }
1524 /* We can't complete a group, so start one. */
1525 if (i == FIRST_PSEUDO_REGISTER)
1526 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1527 {
1528 int j = potential_reload_regs[i];
1529 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1530 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1531 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1532 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1533 && HARD_REGNO_MODE_OK (j, group_mode[class])
1534 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1535 j + 1))
1536 break;
1537 }
1538
1539 /* I should be the index in potential_reload_regs
1540 of the new reload reg we have found. */
1541
1542 something_changed
1543 |= new_spill_reg (i, class, max_needs, 0,
1544 global, dumpfile);
1545 }
1546 else
1547 {
1548 /* For groups of more than 2 registers,
1549 look for a sufficient sequence of unspilled registers,
1550 and spill them all at once. */
1551 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1552 {
1553 int j = potential_reload_regs[i];
1554 int k;
1555 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1556 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1557 {
1558 /* Check each reg in the sequence. */
1559 for (k = 0; k < group_size[class]; k++)
1560 if (! (spill_reg_order[j + k] < 0
1561 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1562 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1563 break;
1564 /* We got a full sequence, so spill them all. */
1565 if (k == group_size[class])
1566 {
1567 register enum reg_class *p;
1568 for (k = 0; k < group_size[class]; k++)
1569 {
1570 int idx;
1571 SET_HARD_REG_BIT (counted_for_groups, j + k);
1572 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1573 if (potential_reload_regs[idx] == j + k)
1574 break;
1575 something_changed
1576 |= new_spill_reg (idx, class, max_needs, 0,
1577 global, dumpfile);
1578 }
1579
1580 /* We have found one that will complete a group,
1581 so count off one group as provided. */
1582 max_groups[class]--;
1583 p = reg_class_superclasses[class];
1584 while (*p != LIM_REG_CLASSES)
1585 max_groups[(int) *p++]--;
1586
1587 break;
1588 }
1589 }
1590 }
1591 }
1592 }
1593
1594 /* Now similarly satisfy all need for single registers. */
1595
1596 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1597 {
1598 /* Consider the potential reload regs that aren't
1599 yet in use as reload regs, in order of preference.
1600 Find the most preferred one that's in this class. */
1601
1602 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1603 if (potential_reload_regs[i] >= 0
1604 && TEST_HARD_REG_BIT (reg_class_contents[class],
1605 potential_reload_regs[i])
1606 /* If this reg will not be available for groups,
1607 pick one that does not foreclose possible groups.
1608 This is a kludge, and not very general,
1609 but it should be sufficient to make the 386 work,
1610 and the problem should not occur on machines with
1611 more registers. */
1612 && (max_nongroups[class] == 0
1613 || possible_group_p (potential_reload_regs[i], max_groups)))
1614 break;
1615
1616 /* I should be the index in potential_reload_regs
1617 of the new reload reg we have found. */
1618
1619 something_changed
1620 |= new_spill_reg (i, class, max_needs, max_nongroups,
1621 global, dumpfile);
1622 }
1623 }
1624 }
1625
1626 /* If global-alloc was run, notify it of any register eliminations we have
1627 done. */
1628 if (global)
1629 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1630 if (ep->can_eliminate)
1631 mark_elimination (ep->from, ep->to);
1632
1633 /* From now on, we need to emit any moves without making new pseudos. */
1634 reload_in_progress = 1;
1635
1636 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1637 around calls. Tell if what mode to use so that we will process
1638 those insns in reload_as_needed if we have to. */
32131a9c
RK
1639
1640 if (caller_save_needed)
a8efe40d
RK
1641 save_call_clobbered_regs (num_eliminable ? QImode
1642 : caller_save_spill_class != NO_REGS ? HImode
1643 : VOIDmode);
32131a9c
RK
1644
1645 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1646 If that insn didn't set the register (i.e., it copied the register to
1647 memory), just delete that insn instead of the equivalencing insn plus
1648 anything now dead. If we call delete_dead_insn on that insn, we may
1649 delete the insn that actually sets the register if the register die
1650 there and that is incorrect. */
1651
1652 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1653 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1654 && GET_CODE (reg_equiv_init[i]) != NOTE)
1655 {
1656 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1657 delete_dead_insn (reg_equiv_init[i]);
1658 else
1659 {
1660 PUT_CODE (reg_equiv_init[i], NOTE);
1661 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1662 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1663 }
1664 }
1665
1666 /* Use the reload registers where necessary
1667 by generating move instructions to move the must-be-register
1668 values into or out of the reload registers. */
1669
a8efe40d
RK
1670 if (something_needs_reloads || something_needs_elimination
1671 || (caller_save_needed && num_eliminable)
1672 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1673 reload_as_needed (first, global);
1674
1675 reload_in_progress = 0;
1676
1677 /* Now eliminate all pseudo regs by modifying them into
1678 their equivalent memory references.
1679 The REG-rtx's for the pseudos are modified in place,
1680 so all insns that used to refer to them now refer to memory.
1681
1682 For a reg that has a reg_equiv_address, all those insns
1683 were changed by reloading so that no insns refer to it any longer;
1684 but the DECL_RTL of a variable decl may refer to it,
1685 and if so this causes the debugging info to mention the variable. */
1686
1687 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1688 {
1689 rtx addr = 0;
1690 if (reg_equiv_mem[i])
1691 addr = XEXP (reg_equiv_mem[i], 0);
1692 if (reg_equiv_address[i])
1693 addr = reg_equiv_address[i];
1694 if (addr)
1695 {
1696 if (reg_renumber[i] < 0)
1697 {
1698 rtx reg = regno_reg_rtx[i];
1699 XEXP (reg, 0) = addr;
1700 REG_USERVAR_P (reg) = 0;
1701 PUT_CODE (reg, MEM);
1702 }
1703 else if (reg_equiv_mem[i])
1704 XEXP (reg_equiv_mem[i], 0) = addr;
1705 }
1706 }
1707
1708#ifdef PRESERVE_DEATH_INFO_REGNO_P
1709 /* Make a pass over all the insns and remove death notes for things that
1710 are no longer registers or no longer die in the insn (e.g., an input
1711 and output pseudo being tied). */
1712
1713 for (insn = first; insn; insn = NEXT_INSN (insn))
1714 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1715 {
1716 rtx note, next;
1717
1718 for (note = REG_NOTES (insn); note; note = next)
1719 {
1720 next = XEXP (note, 1);
1721 if (REG_NOTE_KIND (note) == REG_DEAD
1722 && (GET_CODE (XEXP (note, 0)) != REG
1723 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1724 remove_note (insn, note);
1725 }
1726 }
1727#endif
1728
1729 /* Indicate that we no longer have known memory locations or constants. */
1730 reg_equiv_constant = 0;
1731 reg_equiv_memory_loc = 0;
1732}
1733\f
1734/* Nonzero if, after spilling reg REGNO for non-groups,
1735 it will still be possible to find a group if we still need one. */
1736
1737static int
1738possible_group_p (regno, max_groups)
1739 int regno;
1740 int *max_groups;
1741{
1742 int i;
1743 int class = (int) NO_REGS;
1744
1745 for (i = 0; i < (int) N_REG_CLASSES; i++)
1746 if (max_groups[i] > 0)
1747 {
1748 class = i;
1749 break;
1750 }
1751
1752 if (class == (int) NO_REGS)
1753 return 1;
1754
1755 /* Consider each pair of consecutive registers. */
1756 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
1757 {
1758 /* Ignore pairs that include reg REGNO. */
1759 if (i == regno || i + 1 == regno)
1760 continue;
1761
1762 /* Ignore pairs that are outside the class that needs the group.
1763 ??? Here we fail to handle the case where two different classes
1764 independently need groups. But this never happens with our
1765 current machine descriptions. */
1766 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
1767 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
1768 continue;
1769
1770 /* A pair of consecutive regs we can still spill does the trick. */
1771 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
1772 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1773 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
1774 return 1;
1775
1776 /* A pair of one already spilled and one we can spill does it
1777 provided the one already spilled is not otherwise reserved. */
1778 if (spill_reg_order[i] < 0
1779 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1780 && spill_reg_order[i + 1] >= 0
1781 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
1782 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
1783 return 1;
1784 if (spill_reg_order[i + 1] < 0
1785 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
1786 && spill_reg_order[i] >= 0
1787 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
1788 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
1789 return 1;
1790 }
1791
1792 return 0;
1793}
1794\f
1795/* Count any groups that can be formed from the registers recently spilled.
1796 This is done class by class, in order of ascending class number. */
1797
1798static void
1799count_possible_groups (group_size, group_mode, max_groups)
1800 int *group_size, *max_groups;
1801 enum machine_mode *group_mode;
1802{
1803 int i;
1804 /* Now find all consecutive groups of spilled registers
1805 and mark each group off against the need for such groups.
1806 But don't count them against ordinary need, yet. */
1807
1808 for (i = 0; i < N_REG_CLASSES; i++)
1809 if (group_size[i] > 1)
1810 {
1811 char regmask[FIRST_PSEUDO_REGISTER];
1812 int j;
1813
1814 bzero (regmask, sizeof regmask);
1815 /* Make a mask of all the regs that are spill regs in class I. */
1816 for (j = 0; j < n_spills; j++)
1817 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
1818 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
1819 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1820 spill_regs[j]))
1821 regmask[spill_regs[j]] = 1;
1822 /* Find each consecutive group of them. */
1823 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
1824 if (regmask[j] && j + group_size[i] <= FIRST_PSEUDO_REGISTER
1825 /* Next line in case group-mode for this class
1826 demands an even-odd pair. */
1827 && HARD_REGNO_MODE_OK (j, group_mode[i]))
1828 {
1829 int k;
1830 for (k = 1; k < group_size[i]; k++)
1831 if (! regmask[j + k])
1832 break;
1833 if (k == group_size[i])
1834 {
1835 /* We found a group. Mark it off against this class's
1836 need for groups, and against each superclass too. */
1837 register enum reg_class *p;
1838 max_groups[i]--;
1839 p = reg_class_superclasses[i];
1840 while (*p != LIM_REG_CLASSES)
1841 max_groups[(int) *p++]--;
a8fdc208 1842 /* Don't count these registers again. */
32131a9c
RK
1843 for (k = 0; k < group_size[i]; k++)
1844 SET_HARD_REG_BIT (counted_for_groups, j + k);
1845 }
1846 j += k;
1847 }
1848 }
1849
1850}
1851\f
1852/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
1853 another mode that needs to be reloaded for the same register class CLASS.
1854 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
1855 ALLOCATE_MODE will never be smaller than OTHER_MODE.
1856
1857 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
1858 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
1859 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
1860 causes unnecessary failures on machines requiring alignment of register
1861 groups when the two modes are different sizes, because the larger mode has
1862 more strict alignment rules than the smaller mode. */
1863
1864static int
1865modes_equiv_for_class_p (allocate_mode, other_mode, class)
1866 enum machine_mode allocate_mode, other_mode;
1867 enum reg_class class;
1868{
1869 register int regno;
1870 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1871 {
1872 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
1873 && HARD_REGNO_MODE_OK (regno, allocate_mode)
1874 && ! HARD_REGNO_MODE_OK (regno, other_mode))
1875 return 0;
1876 }
1877 return 1;
1878}
1879
1880/* Add a new register to the tables of available spill-registers
1881 (as well as spilling all pseudos allocated to the register).
1882 I is the index of this register in potential_reload_regs.
1883 CLASS is the regclass whose need is being satisfied.
1884 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
1885 so that this register can count off against them.
1886 MAX_NONGROUPS is 0 if this register is part of a group.
1887 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
1888
1889static int
1890new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
1891 int i;
1892 int class;
1893 int *max_needs;
1894 int *max_nongroups;
1895 int global;
1896 FILE *dumpfile;
1897{
1898 register enum reg_class *p;
1899 int val;
1900 int regno = potential_reload_regs[i];
1901
1902 if (i >= FIRST_PSEUDO_REGISTER)
1903 abort (); /* Caller failed to find any register. */
1904
1905 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
1906 fatal ("fixed or forbidden register was spilled.\n\
1907This may be due to a compiler bug or to impossible asm statements.");
1908
1909 /* Make reg REGNO an additional reload reg. */
1910
1911 potential_reload_regs[i] = -1;
1912 spill_regs[n_spills] = regno;
1913 spill_reg_order[regno] = n_spills;
1914 if (dumpfile)
1915 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
1916
1917 /* Clear off the needs we just satisfied. */
1918
1919 max_needs[class]--;
1920 p = reg_class_superclasses[class];
1921 while (*p != LIM_REG_CLASSES)
1922 max_needs[(int) *p++]--;
1923
1924 if (max_nongroups && max_nongroups[class] > 0)
1925 {
1926 SET_HARD_REG_BIT (counted_for_nongroups, regno);
1927 max_nongroups[class]--;
1928 p = reg_class_superclasses[class];
1929 while (*p != LIM_REG_CLASSES)
1930 max_nongroups[(int) *p++]--;
1931 }
1932
1933 /* Spill every pseudo reg that was allocated to this reg
1934 or to something that overlaps this reg. */
1935
1936 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
1937
1938 /* If there are some registers still to eliminate and this register
1939 wasn't ever used before, additional stack space may have to be
1940 allocated to store this register. Thus, we may have changed the offset
1941 between the stack and frame pointers, so mark that something has changed.
1942 (If new pseudos were spilled, thus requiring more space, VAL would have
1943 been set non-zero by the call to spill_hard_reg above since additional
1944 reloads may be needed in that case.
1945
1946 One might think that we need only set VAL to 1 if this is a call-used
1947 register. However, the set of registers that must be saved by the
1948 prologue is not identical to the call-used set. For example, the
1949 register used by the call insn for the return PC is a call-used register,
1950 but must be saved by the prologue. */
1951 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
1952 val = 1;
1953
1954 regs_ever_live[spill_regs[n_spills]] = 1;
1955 n_spills++;
1956
1957 return val;
1958}
1959\f
1960/* Delete an unneeded INSN and any previous insns who sole purpose is loading
1961 data that is dead in INSN. */
1962
1963static void
1964delete_dead_insn (insn)
1965 rtx insn;
1966{
1967 rtx prev = prev_real_insn (insn);
1968 rtx prev_dest;
1969
1970 /* If the previous insn sets a register that dies in our insn, delete it
1971 too. */
1972 if (prev && GET_CODE (PATTERN (prev)) == SET
1973 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
1974 && reg_mentioned_p (prev_dest, PATTERN (insn))
1975 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
1976 delete_dead_insn (prev);
1977
1978 PUT_CODE (insn, NOTE);
1979 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1980 NOTE_SOURCE_FILE (insn) = 0;
1981}
1982
1983/* Modify the home of pseudo-reg I.
1984 The new home is present in reg_renumber[I].
1985
1986 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1987 or it may be -1, meaning there is none or it is not relevant.
1988 This is used so that all pseudos spilled from a given hard reg
1989 can share one stack slot. */
1990
1991static void
1992alter_reg (i, from_reg)
1993 register int i;
1994 int from_reg;
1995{
1996 /* When outputting an inline function, this can happen
1997 for a reg that isn't actually used. */
1998 if (regno_reg_rtx[i] == 0)
1999 return;
2000
2001 /* If the reg got changed to a MEM at rtl-generation time,
2002 ignore it. */
2003 if (GET_CODE (regno_reg_rtx[i]) != REG)
2004 return;
2005
2006 /* Modify the reg-rtx to contain the new hard reg
2007 number or else to contain its pseudo reg number. */
2008 REGNO (regno_reg_rtx[i])
2009 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2010
2011 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2012 allocate a stack slot for it. */
2013
2014 if (reg_renumber[i] < 0
2015 && reg_n_refs[i] > 0
2016 && reg_equiv_constant[i] == 0
2017 && reg_equiv_memory_loc[i] == 0)
2018 {
2019 register rtx x;
2020 int inherent_size = PSEUDO_REGNO_BYTES (i);
2021 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2022 int adjust = 0;
2023
2024 /* Each pseudo reg has an inherent size which comes from its own mode,
2025 and a total size which provides room for paradoxical subregs
2026 which refer to the pseudo reg in wider modes.
2027
2028 We can use a slot already allocated if it provides both
2029 enough inherent space and enough total space.
2030 Otherwise, we allocate a new slot, making sure that it has no less
2031 inherent space, and no less total space, then the previous slot. */
2032 if (from_reg == -1)
2033 {
2034 /* No known place to spill from => no slot to reuse. */
2035 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2036#if BYTES_BIG_ENDIAN
2037 /* Cancel the big-endian correction done in assign_stack_local.
2038 Get the address of the beginning of the slot.
2039 This is so we can do a big-endian correction unconditionally
2040 below. */
2041 adjust = inherent_size - total_size;
2042#endif
2043 }
2044 /* Reuse a stack slot if possible. */
2045 else if (spill_stack_slot[from_reg] != 0
2046 && spill_stack_slot_width[from_reg] >= total_size
2047 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2048 >= inherent_size))
2049 x = spill_stack_slot[from_reg];
2050 /* Allocate a bigger slot. */
2051 else
2052 {
2053 /* Compute maximum size needed, both for inherent size
2054 and for total size. */
2055 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2056 if (spill_stack_slot[from_reg])
2057 {
2058 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2059 > inherent_size)
2060 mode = GET_MODE (spill_stack_slot[from_reg]);
2061 if (spill_stack_slot_width[from_reg] > total_size)
2062 total_size = spill_stack_slot_width[from_reg];
2063 }
2064 /* Make a slot with that size. */
2065 x = assign_stack_local (mode, total_size, -1);
2066#if BYTES_BIG_ENDIAN
2067 /* Cancel the big-endian correction done in assign_stack_local.
2068 Get the address of the beginning of the slot.
2069 This is so we can do a big-endian correction unconditionally
2070 below. */
2071 adjust = GET_MODE_SIZE (mode) - total_size;
2072#endif
2073 spill_stack_slot[from_reg] = x;
2074 spill_stack_slot_width[from_reg] = total_size;
2075 }
2076
2077#if BYTES_BIG_ENDIAN
2078 /* On a big endian machine, the "address" of the slot
2079 is the address of the low part that fits its inherent mode. */
2080 if (inherent_size < total_size)
2081 adjust += (total_size - inherent_size);
2082#endif /* BYTES_BIG_ENDIAN */
2083
2084 /* If we have any adjustment to make, or if the stack slot is the
2085 wrong mode, make a new stack slot. */
2086 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2087 {
2088 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2089 plus_constant (XEXP (x, 0), adjust));
2090 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2091 }
2092
2093 /* Save the stack slot for later. */
2094 reg_equiv_memory_loc[i] = x;
2095 }
2096}
2097
2098/* Mark the slots in regs_ever_live for the hard regs
2099 used by pseudo-reg number REGNO. */
2100
2101void
2102mark_home_live (regno)
2103 int regno;
2104{
2105 register int i, lim;
2106 i = reg_renumber[regno];
2107 if (i < 0)
2108 return;
2109 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2110 while (i < lim)
2111 regs_ever_live[i++] = 1;
2112}
2113\f
2114/* This function handles the tracking of elimination offsets around branches.
2115
2116 X is a piece of RTL being scanned.
2117
2118 INSN is the insn that it came from, if any.
2119
2120 INITIAL_P is non-zero if we are to set the offset to be the initial
2121 offset and zero if we are setting the offset of the label to be the
2122 current offset. */
2123
2124static void
2125set_label_offsets (x, insn, initial_p)
2126 rtx x;
2127 rtx insn;
2128 int initial_p;
2129{
2130 enum rtx_code code = GET_CODE (x);
2131 rtx tem;
2132 int i;
2133 struct elim_table *p;
2134
2135 switch (code)
2136 {
2137 case LABEL_REF:
2138 x = XEXP (x, 0);
2139
2140 /* ... fall through ... */
2141
2142 case CODE_LABEL:
2143 /* If we know nothing about this label, set the desired offsets. Note
2144 that this sets the offset at a label to be the offset before a label
2145 if we don't know anything about the label. This is not correct for
2146 the label after a BARRIER, but is the best guess we can make. If
2147 we guessed wrong, we will suppress an elimination that might have
2148 been possible had we been able to guess correctly. */
2149
2150 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2151 {
2152 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2153 offsets_at[CODE_LABEL_NUMBER (x)][i]
2154 = (initial_p ? reg_eliminate[i].initial_offset
2155 : reg_eliminate[i].offset);
2156 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2157 }
2158
2159 /* Otherwise, if this is the definition of a label and it is
2160 preceeded by a BARRIER, set our offsets to the known offset of
2161 that label. */
2162
2163 else if (x == insn
2164 && (tem = prev_nonnote_insn (insn)) != 0
2165 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2166 {
2167 num_not_at_initial_offset = 0;
2168 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2169 {
2170 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2171 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2172 if (reg_eliminate[i].offset != reg_eliminate[i].initial_offset)
2173 num_not_at_initial_offset++;
2174 }
2175 }
32131a9c
RK
2176
2177 else
2178 /* If neither of the above cases is true, compare each offset
2179 with those previously recorded and suppress any eliminations
2180 where the offsets disagree. */
a8fdc208 2181
32131a9c
RK
2182 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2183 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2184 != (initial_p ? reg_eliminate[i].initial_offset
2185 : reg_eliminate[i].offset))
2186 reg_eliminate[i].can_eliminate = 0;
2187
2188 return;
2189
2190 case JUMP_INSN:
2191 set_label_offsets (PATTERN (insn), insn, initial_p);
2192
2193 /* ... fall through ... */
2194
2195 case INSN:
2196 case CALL_INSN:
2197 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2198 and hence must have all eliminations at their initial offsets. */
2199 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2200 if (REG_NOTE_KIND (tem) == REG_LABEL)
2201 set_label_offsets (XEXP (tem, 0), insn, 1);
2202 return;
2203
2204 case ADDR_VEC:
2205 case ADDR_DIFF_VEC:
2206 /* Each of the labels in the address vector must be at their initial
2207 offsets. We want the first first for ADDR_VEC and the second
2208 field for ADDR_DIFF_VEC. */
2209
2210 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2211 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2212 insn, initial_p);
2213 return;
2214
2215 case SET:
2216 /* We only care about setting PC. If the source is not RETURN,
2217 IF_THEN_ELSE, or a label, disable any eliminations not at
2218 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2219 isn't one of those possibilities. For branches to a label,
2220 call ourselves recursively.
2221
2222 Note that this can disable elimination unnecessarily when we have
2223 a non-local goto since it will look like a non-constant jump to
2224 someplace in the current function. This isn't a significant
2225 problem since such jumps will normally be when all elimination
2226 pairs are back to their initial offsets. */
2227
2228 if (SET_DEST (x) != pc_rtx)
2229 return;
2230
2231 switch (GET_CODE (SET_SRC (x)))
2232 {
2233 case PC:
2234 case RETURN:
2235 return;
2236
2237 case LABEL_REF:
2238 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2239 return;
2240
2241 case IF_THEN_ELSE:
2242 tem = XEXP (SET_SRC (x), 1);
2243 if (GET_CODE (tem) == LABEL_REF)
2244 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2245 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2246 break;
2247
2248 tem = XEXP (SET_SRC (x), 2);
2249 if (GET_CODE (tem) == LABEL_REF)
2250 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2251 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2252 break;
2253 return;
2254 }
2255
2256 /* If we reach here, all eliminations must be at their initial
2257 offset because we are doing a jump to a variable address. */
2258 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2259 if (p->offset != p->initial_offset)
2260 p->can_eliminate = 0;
2261 }
2262}
2263\f
2264/* Used for communication between the next two function to properly share
2265 the vector for an ASM_OPERANDS. */
2266
2267static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2268
a8fdc208 2269/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2270 replacement (such as sp), plus an offset.
2271
2272 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2273 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2274 MEM, we are allowed to replace a sum of a register and the constant zero
2275 with the register, which we cannot do outside a MEM. In addition, we need
2276 to record the fact that a register is referenced outside a MEM.
2277
2278 If INSN is nonzero, it is the insn containing X. If we replace a REG
2279 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2280 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2281 that the REG is being modified.
2282
2283 If we see a modification to a register we know about, take the
2284 appropriate action (see case SET, below).
2285
2286 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2287 replacements done assuming all offsets are at their initial values. If
2288 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2289 encounter, return the actual location so that find_reloads will do
2290 the proper thing. */
2291
2292rtx
2293eliminate_regs (x, mem_mode, insn)
2294 rtx x;
2295 enum machine_mode mem_mode;
2296 rtx insn;
2297{
2298 enum rtx_code code = GET_CODE (x);
2299 struct elim_table *ep;
2300 int regno;
2301 rtx new;
2302 int i, j;
2303 char *fmt;
2304 int copied = 0;
2305
2306 switch (code)
2307 {
2308 case CONST_INT:
2309 case CONST_DOUBLE:
2310 case CONST:
2311 case SYMBOL_REF:
2312 case CODE_LABEL:
2313 case PC:
2314 case CC0:
2315 case ASM_INPUT:
2316 case ADDR_VEC:
2317 case ADDR_DIFF_VEC:
2318 case RETURN:
2319 return x;
2320
2321 case REG:
2322 regno = REGNO (x);
2323
2324 /* First handle the case where we encounter a bare register that
2325 is eliminable. Replace it with a PLUS. */
2326 if (regno < FIRST_PSEUDO_REGISTER)
2327 {
2328 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2329 ep++)
2330 if (ep->from_rtx == x && ep->can_eliminate)
2331 {
2332 if (! mem_mode)
2333 ep->ref_outside_mem = 1;
2334 return plus_constant (ep->to_rtx, ep->previous_offset);
2335 }
2336
2337 }
2338 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2339 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2340 {
2341 /* In this case, find_reloads would attempt to either use an
2342 incorrect address (if something is not at its initial offset)
2343 or substitute an replaced address into an insn (which loses
2344 if the offset is changed by some later action). So we simply
2345 return the replaced stack slot (assuming it is changed by
2346 elimination) and ignore the fact that this is actually a
2347 reference to the pseudo. Ensure we make a copy of the
2348 address in case it is shared. */
2349 new = eliminate_regs (reg_equiv_memory_loc[regno], mem_mode, 0);
2350 if (new != reg_equiv_memory_loc[regno])
2351 return copy_rtx (new);
2352 }
2353 return x;
2354
2355 case PLUS:
2356 /* If this is the sum of an eliminable register and a constant, rework
2357 the sum. */
2358 if (GET_CODE (XEXP (x, 0)) == REG
2359 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2360 && CONSTANT_P (XEXP (x, 1)))
2361 {
2362 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2363 ep++)
2364 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2365 {
2366 if (! mem_mode)
2367 ep->ref_outside_mem = 1;
2368
2369 /* The only time we want to replace a PLUS with a REG (this
2370 occurs when the constant operand of the PLUS is the negative
2371 of the offset) is when we are inside a MEM. We won't want
2372 to do so at other times because that would change the
2373 structure of the insn in a way that reload can't handle.
2374 We special-case the commonest situation in
2375 eliminate_regs_in_insn, so just replace a PLUS with a
2376 PLUS here, unless inside a MEM. */
2377 if (mem_mode && GET_CODE (XEXP (x, 1)) == CONST_INT
2378 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2379 return ep->to_rtx;
2380 else
2381 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2382 plus_constant (XEXP (x, 1),
2383 ep->previous_offset));
2384 }
2385
2386 /* If the register is not eliminable, we are done since the other
2387 operand is a constant. */
2388 return x;
2389 }
2390
2391 /* If this is part of an address, we want to bring any constant to the
2392 outermost PLUS. We will do this by doing register replacement in
2393 our operands and seeing if a constant shows up in one of them.
2394
2395 We assume here this is part of an address (or a "load address" insn)
2396 since an eliminable register is not likely to appear in any other
2397 context.
2398
2399 If we have (plus (eliminable) (reg)), we want to produce
2400 (plus (plus (replacement) (reg) (const))). If this was part of a
2401 normal add insn, (plus (replacement) (reg)) will be pushed as a
2402 reload. This is the desired action. */
2403
2404 {
2405 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2406 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, 0);
2407
2408 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2409 {
2410 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2411 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2412 we must replace the constant here since it may no longer
2413 be in the position of any operand. */
2414 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2415 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2416 && reg_renumber[REGNO (new1)] < 0
2417 && reg_equiv_constant != 0
2418 && reg_equiv_constant[REGNO (new1)] != 0)
2419 new1 = reg_equiv_constant[REGNO (new1)];
2420 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2421 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2422 && reg_renumber[REGNO (new0)] < 0
2423 && reg_equiv_constant[REGNO (new0)] != 0)
2424 new0 = reg_equiv_constant[REGNO (new0)];
2425
2426 new = form_sum (new0, new1);
2427
2428 /* As above, if we are not inside a MEM we do not want to
2429 turn a PLUS into something else. We might try to do so here
2430 for an addition of 0 if we aren't optimizing. */
2431 if (! mem_mode && GET_CODE (new) != PLUS)
2432 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2433 else
2434 return new;
2435 }
2436 }
2437 return x;
2438
2439 case EXPR_LIST:
2440 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2441 if (XEXP (x, 0))
2442 {
2443 new = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2444 if (new != XEXP (x, 0))
2445 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2446 }
2447
2448 /* ... fall through ... */
2449
2450 case INSN_LIST:
2451 /* Now do eliminations in the rest of the chain. If this was
2452 an EXPR_LIST, this might result in allocating more memory than is
2453 strictly needed, but it simplifies the code. */
2454 if (XEXP (x, 1))
2455 {
2456 new = eliminate_regs (XEXP (x, 1), mem_mode, 0);
2457 if (new != XEXP (x, 1))
2458 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2459 }
2460 return x;
2461
2462 case CALL:
2463 case COMPARE:
2464 case MINUS:
2465 case MULT:
2466 case DIV: case UDIV:
2467 case MOD: case UMOD:
2468 case AND: case IOR: case XOR:
2469 case LSHIFT: case ASHIFT: case ROTATE:
2470 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2471 case NE: case EQ:
2472 case GE: case GT: case GEU: case GTU:
2473 case LE: case LT: case LEU: case LTU:
2474 {
2475 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2476 rtx new1 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, 0) : 0;
2477
2478 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2479 return gen_rtx (code, GET_MODE (x), new0, new1);
2480 }
2481 return x;
2482
2483 case PRE_INC:
2484 case POST_INC:
2485 case PRE_DEC:
2486 case POST_DEC:
2487 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2488 if (ep->to_rtx == XEXP (x, 0))
2489 {
2490 if (code == PRE_DEC || code == POST_DEC)
2491 ep->offset += GET_MODE_SIZE (mem_mode);
2492 else
2493 ep->offset -= GET_MODE_SIZE (mem_mode);
2494 }
2495
2496 /* Fall through to generic unary operation case. */
2497 case USE:
2498 case STRICT_LOW_PART:
2499 case NEG: case NOT:
2500 case SIGN_EXTEND: case ZERO_EXTEND:
2501 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2502 case FLOAT: case FIX:
2503 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2504 case ABS:
2505 case SQRT:
2506 case FFS:
2507 new = eliminate_regs (XEXP (x, 0), mem_mode, 0);
2508 if (new != XEXP (x, 0))
2509 return gen_rtx (code, GET_MODE (x), new);
2510 return x;
2511
2512 case SUBREG:
2513 /* Similar to above processing, but preserve SUBREG_WORD.
2514 Convert (subreg (mem)) to (mem) if not paradoxical.
2515 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2516 pseudo didn't get a hard reg, we must replace this with the
2517 eliminated version of the memory location because push_reloads
2518 may do the replacement in certain circumstances. */
2519 if (GET_CODE (SUBREG_REG (x)) == REG
2520 && (GET_MODE_SIZE (GET_MODE (x))
2521 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2522 && reg_equiv_memory_loc != 0
2523 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2524 {
2525 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2526 mem_mode, 0);
2527
2528 /* If we didn't change anything, we must retain the pseudo. */
2529 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2530 new = XEXP (x, 0);
2531 else
2532 /* Otherwise, ensure NEW isn't shared in case we have to reload
2533 it. */
2534 new = copy_rtx (new);
2535 }
2536 else
2537 new = eliminate_regs (SUBREG_REG (x), mem_mode, 0);
2538
2539 if (new != XEXP (x, 0))
2540 {
2541 if (GET_CODE (new) == MEM
2542 && (GET_MODE_SIZE (GET_MODE (x))
2543 <= GET_MODE_SIZE (GET_MODE (new))))
2544 {
2545 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2546 enum machine_mode mode = GET_MODE (x);
2547
2548#if BYTES_BIG_ENDIAN
2549 offset += (MIN (UNITS_PER_WORD,
2550 GET_MODE_SIZE (GET_MODE (new)))
2551 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2552#endif
2553
2554 PUT_MODE (new, mode);
2555 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2556 return new;
2557 }
2558 else
2559 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2560 }
2561
2562 return x;
2563
2564 case CLOBBER:
2565 /* If clobbering a register that is the replacement register for an
2566 elimination we still think can be peformed, note that it cannot
2567 be performed. Otherwise, we need not be concerned about it. */
2568 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2569 if (ep->to_rtx == XEXP (x, 0))
2570 ep->can_eliminate = 0;
2571
2572 return x;
2573
2574 case ASM_OPERANDS:
2575 {
2576 rtx *temp_vec;
2577 /* Properly handle sharing input and constraint vectors. */
2578 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2579 {
2580 /* When we come to a new vector not seen before,
2581 scan all its elements; keep the old vector if none
2582 of them changes; otherwise, make a copy. */
2583 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2584 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2585 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2586 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2587 mem_mode, 0);
2588
2589 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2590 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2591 break;
2592
2593 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2594 new_asm_operands_vec = old_asm_operands_vec;
2595 else
2596 new_asm_operands_vec
2597 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2598 }
2599
2600 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2601 if (new_asm_operands_vec == old_asm_operands_vec)
2602 return x;
2603
2604 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2605 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2606 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2607 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2608 ASM_OPERANDS_SOURCE_FILE (x),
2609 ASM_OPERANDS_SOURCE_LINE (x));
2610 new->volatil = x->volatil;
2611 return new;
2612 }
2613
2614 case SET:
2615 /* Check for setting a register that we know about. */
2616 if (GET_CODE (SET_DEST (x)) == REG)
2617 {
2618 /* See if this is setting the replacement register for an
a8fdc208 2619 elimination.
32131a9c
RK
2620
2621 If DEST is the frame pointer, we do nothing because we assume that
2622 all assignments to the frame pointer are for non-local gotos and
2623 are being done at a time when they are valid and do not disturb
2624 anything else. Some machines want to eliminate a fake argument
2625 pointer with either the frame or stack pointer. Assignments to
2626 the frame pointer must not prevent this elimination. */
2627
2628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2629 ep++)
2630 if (ep->to_rtx == SET_DEST (x)
2631 && SET_DEST (x) != frame_pointer_rtx)
2632 {
2633 /* If it is being incrememented, adjust the offset. Otherwise,
2634 this elimination can't be done. */
2635 rtx src = SET_SRC (x);
2636
2637 if (GET_CODE (src) == PLUS
2638 && XEXP (src, 0) == SET_DEST (x)
2639 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2640 ep->offset -= INTVAL (XEXP (src, 1));
2641 else
2642 ep->can_eliminate = 0;
2643 }
2644
2645 /* Now check to see we are assigning to a register that can be
2646 eliminated. If so, it must be as part of a PARALLEL, since we
2647 will not have been called if this is a single SET. So indicate
2648 that we can no longer eliminate this reg. */
2649 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2650 ep++)
2651 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2652 ep->can_eliminate = 0;
2653 }
2654
2655 /* Now avoid the loop below in this common case. */
2656 {
2657 rtx new0 = eliminate_regs (SET_DEST (x), 0, 0);
2658 rtx new1 = eliminate_regs (SET_SRC (x), 0, 0);
2659
2660 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2661 write a CLOBBER insn. */
2662 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
2663 && insn != 0)
2664 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
2665
2666 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
2667 return gen_rtx (SET, VOIDmode, new0, new1);
2668 }
2669
2670 return x;
2671
2672 case MEM:
2673 /* Our only special processing is to pass the mode of the MEM to our
2674 recursive call and copy the flags. While we are here, handle this
2675 case more efficiently. */
2676 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), 0);
2677 if (new != XEXP (x, 0))
2678 {
2679 new = gen_rtx (MEM, GET_MODE (x), new);
2680 new->volatil = x->volatil;
2681 new->unchanging = x->unchanging;
2682 new->in_struct = x->in_struct;
2683 return new;
2684 }
2685 else
2686 return x;
2687 }
2688
2689 /* Process each of our operands recursively. If any have changed, make a
2690 copy of the rtx. */
2691 fmt = GET_RTX_FORMAT (code);
2692 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2693 {
2694 if (*fmt == 'e')
2695 {
2696 new = eliminate_regs (XEXP (x, i), mem_mode, 0);
2697 if (new != XEXP (x, i) && ! copied)
2698 {
2699 rtx new_x = rtx_alloc (code);
2700 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2701 + (sizeof (new_x->fld[0])
2702 * GET_RTX_LENGTH (code))));
2703 x = new_x;
2704 copied = 1;
2705 }
2706 XEXP (x, i) = new;
2707 }
2708 else if (*fmt == 'E')
2709 {
2710 int copied_vec = 0;
2711 for (j = 0; j < XVECLEN (x, i); j++)
2712 {
2713 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2714 if (new != XVECEXP (x, i, j) && ! copied_vec)
2715 {
2716 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2717 &XVECEXP (x, i, 0));
2718 if (! copied)
2719 {
2720 rtx new_x = rtx_alloc (code);
2721 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2722 + (sizeof (new_x->fld[0])
2723 * GET_RTX_LENGTH (code))));
2724 x = new_x;
2725 copied = 1;
2726 }
2727 XVEC (x, i) = new_v;
2728 copied_vec = 1;
2729 }
2730 XVECEXP (x, i, j) = new;
2731 }
2732 }
2733 }
2734
2735 return x;
2736}
2737\f
2738/* Scan INSN and eliminate all eliminable registers in it.
2739
2740 If REPLACE is nonzero, do the replacement destructively. Also
2741 delete the insn as dead it if it is setting an eliminable register.
2742
2743 If REPLACE is zero, do all our allocations in reload_obstack.
2744
2745 If no eliminations were done and this insn doesn't require any elimination
2746 processing (these are not identical conditions: it might be updating sp,
2747 but not referencing fp; this needs to be seen during reload_as_needed so
2748 that the offset between fp and sp can be taken into consideration), zero
2749 is returned. Otherwise, 1 is returned. */
2750
2751static int
2752eliminate_regs_in_insn (insn, replace)
2753 rtx insn;
2754 int replace;
2755{
2756 rtx old_body = PATTERN (insn);
2757 rtx new_body;
2758 int val = 0;
2759 struct elim_table *ep;
2760
2761 if (! replace)
2762 push_obstacks (&reload_obstack, &reload_obstack);
2763
2764 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
2765 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
2766 {
2767 /* Check for setting an eliminable register. */
2768 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2769 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
2770 {
2771 /* In this case this insn isn't serving a useful purpose. We
2772 will delete it in reload_as_needed once we know that this
2773 elimination is, in fact, being done.
2774
2775 If REPLACE isn't set, we can't delete this insn, but neededn't
2776 process it since it won't be used unless something changes. */
2777 if (replace)
2778 delete_dead_insn (insn);
2779 val = 1;
2780 goto done;
2781 }
2782
2783 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
2784 in the insn is the negative of the offset in FROM. Substitute
2785 (set (reg) (reg to)) for the insn and change its code.
2786
2787 We have to do this here, rather than in eliminate_regs, do that we can
2788 change the insn code. */
2789
2790 if (GET_CODE (SET_SRC (old_body)) == PLUS
2791 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
2792 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
2793 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2794 ep++)
2795 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
2796 && ep->can_eliminate
2797 && ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
2798 {
2799 PATTERN (insn) = gen_rtx (SET, VOIDmode,
2800 SET_DEST (old_body), ep->to_rtx);
2801 INSN_CODE (insn) = -1;
2802 val = 1;
2803 goto done;
2804 }
2805 }
2806
2807 old_asm_operands_vec = 0;
2808
2809 /* Replace the body of this insn with a substituted form. If we changed
2810 something, return non-zero. If this is the final call for this
2811 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
2812
2813 If we are replacing a body that was a (set X (plus Y Z)), try to
2814 re-recognize the insn. We do this in case we had a simple addition
2815 but now can do this as a load-address. This saves an insn in this
2816 common case. */
2817
2818 new_body = eliminate_regs (old_body, 0, replace ? insn : 0);
2819 if (new_body != old_body)
2820 {
2821 if (GET_CODE (old_body) != SET || GET_CODE (SET_SRC (old_body)) != PLUS
2822 || ! validate_change (insn, &PATTERN (insn), new_body, 0))
2823 PATTERN (insn) = new_body;
2824
2825 if (replace && REG_NOTES (insn))
2826 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, 0);
2827 val = 1;
2828 }
a8fdc208 2829
32131a9c
RK
2830 /* Loop through all elimination pairs. See if any have changed and
2831 recalculate the number not at initial offset.
2832
a8efe40d
RK
2833 Compute the maximum offset (minimum offset if the stack does not
2834 grow downward) for each elimination pair.
2835
32131a9c
RK
2836 We also detect a cases where register elimination cannot be done,
2837 namely, if a register would be both changed and referenced outside a MEM
2838 in the resulting insn since such an insn is often undefined and, even if
2839 not, we cannot know what meaning will be given to it. Note that it is
2840 valid to have a register used in an address in an insn that changes it
2841 (presumably with a pre- or post-increment or decrement).
2842
2843 If anything changes, return nonzero. */
2844
2845 num_not_at_initial_offset = 0;
2846 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2847 {
2848 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
2849 ep->can_eliminate = 0;
2850
2851 ep->ref_outside_mem = 0;
2852
2853 if (ep->previous_offset != ep->offset)
2854 val = 1;
2855
2856 ep->previous_offset = ep->offset;
2857 if (ep->can_eliminate && ep->offset != ep->initial_offset)
2858 num_not_at_initial_offset++;
a8efe40d
RK
2859
2860#ifdef STACK_GROWS_DOWNWARD
2861 ep->max_offset = MAX (ep->max_offset, ep->offset);
2862#else
2863 ep->max_offset = MIN (ep->max_offset, ep->offset);
2864#endif
32131a9c
RK
2865 }
2866
2867 done:
2868 if (! replace)
2869 pop_obstacks ();
2870
2871 return val;
2872}
2873
2874/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
2875 replacement we currently believe is valid, mark it as not eliminable if X
2876 modifies DEST in any way other than by adding a constant integer to it.
2877
2878 If DEST is the frame pointer, we do nothing because we assume that
2879 all assignments to the frame pointer are nonlocal gotos and are being done
2880 at a time when they are valid and do not disturb anything else.
2881 Some machines want to eliminate a fake argument pointer with either the
2882 frame or stack pointer. Assignments to the frame pointer must not prevent
2883 this elimination.
2884
2885 Called via note_stores from reload before starting its passes to scan
2886 the insns of the function. */
2887
2888static void
2889mark_not_eliminable (dest, x)
2890 rtx dest;
2891 rtx x;
2892{
2893 register int i;
2894
2895 /* A SUBREG of a hard register here is just changing its mode. We should
2896 not see a SUBREG of an eliminable hard register, but check just in
2897 case. */
2898 if (GET_CODE (dest) == SUBREG)
2899 dest = SUBREG_REG (dest);
2900
2901 if (dest == frame_pointer_rtx)
2902 return;
2903
2904 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2905 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
2906 && (GET_CODE (x) != SET
2907 || GET_CODE (SET_SRC (x)) != PLUS
2908 || XEXP (SET_SRC (x), 0) != dest
2909 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
2910 {
2911 reg_eliminate[i].can_eliminate_previous
2912 = reg_eliminate[i].can_eliminate = 0;
2913 num_eliminable--;
2914 }
2915}
2916\f
2917/* Kick all pseudos out of hard register REGNO.
2918 If GLOBAL is nonzero, try to find someplace else to put them.
2919 If DUMPFILE is nonzero, log actions taken on that file.
2920
2921 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
2922 because we found we can't eliminate some register. In the case, no pseudos
2923 are allowed to be in the register, even if they are only in a block that
2924 doesn't require spill registers, unlike the case when we are spilling this
2925 hard reg to produce another spill register.
2926
2927 Return nonzero if any pseudos needed to be kicked out. */
2928
2929static int
2930spill_hard_reg (regno, global, dumpfile, cant_eliminate)
2931 register int regno;
2932 int global;
2933 FILE *dumpfile;
2934 int cant_eliminate;
2935{
2936 int something_changed = 0;
2937 register int i;
2938
2939 SET_HARD_REG_BIT (forbidden_regs, regno);
2940
2941 /* Spill every pseudo reg that was allocated to this reg
2942 or to something that overlaps this reg. */
2943
2944 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2945 if (reg_renumber[i] >= 0
2946 && reg_renumber[i] <= regno
a8fdc208 2947 && (reg_renumber[i]
32131a9c
RK
2948 + HARD_REGNO_NREGS (reg_renumber[i],
2949 PSEUDO_REGNO_MODE (i))
2950 > regno))
2951 {
2952 enum reg_class class = REGNO_REG_CLASS (regno);
2953
2954 /* If this register belongs solely to a basic block which needed no
2955 spilling of any class that this register is contained in,
2956 leave it be, unless we are spilling this register because
2957 it was a hard register that can't be eliminated. */
2958
2959 if (! cant_eliminate
2960 && basic_block_needs[0]
2961 && reg_basic_block[i] >= 0
2962 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
2963 {
2964 enum reg_class *p;
2965
2966 for (p = reg_class_superclasses[(int) class];
2967 *p != LIM_REG_CLASSES; p++)
2968 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
2969 break;
a8fdc208 2970
32131a9c
RK
2971 if (*p == LIM_REG_CLASSES)
2972 continue;
2973 }
2974
2975 /* Mark it as no longer having a hard register home. */
2976 reg_renumber[i] = -1;
2977 /* We will need to scan everything again. */
2978 something_changed = 1;
2979 if (global)
2980 retry_global_alloc (i, forbidden_regs);
2981
2982 alter_reg (i, regno);
2983 if (dumpfile)
2984 {
2985 if (reg_renumber[i] == -1)
2986 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
2987 else
2988 fprintf (dumpfile, " Register %d now in %d.\n\n",
2989 i, reg_renumber[i]);
2990 }
2991 }
2992
2993 return something_changed;
2994}
2995\f
2996/* Find all paradoxical subregs within X and update reg_max_ref_width. */
2997
2998static void
2999scan_paradoxical_subregs (x)
3000 register rtx x;
3001{
3002 register int i;
3003 register char *fmt;
3004 register enum rtx_code code = GET_CODE (x);
3005
3006 switch (code)
3007 {
3008 case CONST_INT:
3009 case CONST:
3010 case SYMBOL_REF:
3011 case LABEL_REF:
3012 case CONST_DOUBLE:
3013 case CC0:
3014 case PC:
3015 case REG:
3016 case USE:
3017 case CLOBBER:
3018 return;
3019
3020 case SUBREG:
3021 if (GET_CODE (SUBREG_REG (x)) == REG
3022 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3023 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3024 = GET_MODE_SIZE (GET_MODE (x));
3025 return;
3026 }
3027
3028 fmt = GET_RTX_FORMAT (code);
3029 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3030 {
3031 if (fmt[i] == 'e')
3032 scan_paradoxical_subregs (XEXP (x, i));
3033 else if (fmt[i] == 'E')
3034 {
3035 register int j;
3036 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3037 scan_paradoxical_subregs (XVECEXP (x, i, j));
3038 }
3039 }
3040}
3041\f
3042struct hard_reg_n_uses { int regno; int uses; };
3043
3044static int
3045hard_reg_use_compare (p1, p2)
3046 struct hard_reg_n_uses *p1, *p2;
3047{
3048 int tem = p1->uses - p2->uses;
3049 if (tem != 0) return tem;
3050 /* If regs are equally good, sort by regno,
3051 so that the results of qsort leave nothing to chance. */
3052 return p1->regno - p2->regno;
3053}
3054
3055/* Choose the order to consider regs for use as reload registers
3056 based on how much trouble would be caused by spilling one.
3057 Store them in order of decreasing preference in potential_reload_regs. */
3058
3059static void
3060order_regs_for_reload ()
3061{
3062 register int i;
3063 register int o = 0;
3064 int large = 0;
3065
3066 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3067
3068 CLEAR_HARD_REG_SET (bad_spill_regs);
3069
3070 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3071 potential_reload_regs[i] = -1;
3072
3073 /* Count number of uses of each hard reg by pseudo regs allocated to it
3074 and then order them by decreasing use. */
3075
3076 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3077 {
3078 hard_reg_n_uses[i].uses = 0;
3079 hard_reg_n_uses[i].regno = i;
3080 }
3081
3082 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3083 {
3084 int regno = reg_renumber[i];
3085 if (regno >= 0)
3086 {
3087 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3088 while (regno < lim)
3089 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3090 }
3091 large += reg_n_refs[i];
3092 }
3093
3094 /* Now fixed registers (which cannot safely be used for reloading)
3095 get a very high use count so they will be considered least desirable.
3096 Registers used explicitly in the rtl code are almost as bad. */
3097
3098 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3099 {
3100 if (fixed_regs[i])
3101 {
3102 hard_reg_n_uses[i].uses += 2 * large + 2;
3103 SET_HARD_REG_BIT (bad_spill_regs, i);
3104 }
3105 else if (regs_explicitly_used[i])
3106 {
3107 hard_reg_n_uses[i].uses += large + 1;
3108 /* ??? We are doing this here because of the potential that
3109 bad code may be generated if a register explicitly used in
3110 an insn was used as a spill register for that insn. But
3111 not using these are spill registers may lose on some machine.
3112 We'll have to see how this works out. */
3113 SET_HARD_REG_BIT (bad_spill_regs, i);
3114 }
3115 }
3116 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3117 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3118
3119#ifdef ELIMINABLE_REGS
3120 /* If registers other than the frame pointer are eliminable, mark them as
3121 poor choices. */
3122 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3123 {
3124 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3125 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3126 }
3127#endif
3128
3129 /* Prefer registers not so far used, for use in temporary loading.
3130 Among them, if REG_ALLOC_ORDER is defined, use that order.
3131 Otherwise, prefer registers not preserved by calls. */
3132
3133#ifdef REG_ALLOC_ORDER
3134 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3135 {
3136 int regno = reg_alloc_order[i];
3137
3138 if (hard_reg_n_uses[regno].uses == 0)
3139 potential_reload_regs[o++] = regno;
3140 }
3141#else
3142 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3143 {
3144 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3145 potential_reload_regs[o++] = i;
3146 }
3147 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3148 {
3149 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3150 potential_reload_regs[o++] = i;
3151 }
3152#endif
3153
3154 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3155 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3156
3157 /* Now add the regs that are already used,
3158 preferring those used less often. The fixed and otherwise forbidden
3159 registers will be at the end of this list. */
3160
3161 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3162 if (hard_reg_n_uses[i].uses != 0)
3163 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3164}
3165\f
3166/* Reload pseudo-registers into hard regs around each insn as needed.
3167 Additional register load insns are output before the insn that needs it
3168 and perhaps store insns after insns that modify the reloaded pseudo reg.
3169
3170 reg_last_reload_reg and reg_reloaded_contents keep track of
3171 which pseudo-registers are already available in reload registers.
3172 We update these for the reloads that we perform,
3173 as the insns are scanned. */
3174
3175static void
3176reload_as_needed (first, live_known)
3177 rtx first;
3178 int live_known;
3179{
3180 register rtx insn;
3181 register int i;
3182 int this_block = 0;
3183 rtx x;
3184 rtx after_call = 0;
3185
3186 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3187 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3188 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3189 reg_has_output_reload = (char *) alloca (max_regno);
3190 for (i = 0; i < n_spills; i++)
3191 {
3192 reg_reloaded_contents[i] = -1;
3193 reg_reloaded_insn[i] = 0;
3194 }
3195
3196 /* Reset all offsets on eliminable registers to their initial values. */
3197#ifdef ELIMINABLE_REGS
3198 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3199 {
3200 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3201 reg_eliminate[i].initial_offset)
3202 reg_eliminate[i].previous_offset
3203 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3204 }
3205#else
3206 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3207 reg_eliminate[0].previous_offset
3208 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3209#endif
3210
3211 num_not_at_initial_offset = 0;
3212
3213 for (insn = first; insn;)
3214 {
3215 register rtx next = NEXT_INSN (insn);
3216
3217 /* Notice when we move to a new basic block. */
3218 if (live_known && basic_block_needs && this_block + 1 < n_basic_blocks
3219 && insn == basic_block_head[this_block+1])
3220 ++this_block;
3221
3222 /* If we pass a label, copy the offsets from the label information
3223 into the current offsets of each elimination. */
3224 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3225 {
3226 num_not_at_initial_offset = 0;
3227 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3228 {
3229 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3230 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3231 if (reg_eliminate[i].offset != reg_eliminate[i].initial_offset)
3232 num_not_at_initial_offset++;
3233 }
3234 }
32131a9c
RK
3235
3236 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3237 {
3238 rtx avoid_return_reg = 0;
3239
3240#ifdef SMALL_REGISTER_CLASSES
3241 /* Set avoid_return_reg if this is an insn
3242 that might use the value of a function call. */
3243 if (GET_CODE (insn) == CALL_INSN)
3244 {
3245 if (GET_CODE (PATTERN (insn)) == SET)
3246 after_call = SET_DEST (PATTERN (insn));
3247 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3248 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3249 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3250 else
3251 after_call = 0;
3252 }
3253 else if (after_call != 0
3254 && !(GET_CODE (PATTERN (insn)) == SET
3255 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3256 {
3257 if (reg_mentioned_p (after_call, PATTERN (insn)))
3258 avoid_return_reg = after_call;
3259 after_call = 0;
3260 }
3261#endif /* SMALL_REGISTER_CLASSES */
3262
3263 /* If we need to do register elimination processing, do so.
3264 This might delete the insn, in which case we are done. */
3265 if (num_eliminable && GET_MODE (insn) == QImode)
3266 {
3267 eliminate_regs_in_insn (insn, 1);
3268 if (GET_CODE (insn) == NOTE)
3269 {
3270 insn = next;
3271 continue;
3272 }
3273 }
3274
3275 if (GET_MODE (insn) == VOIDmode)
3276 n_reloads = 0;
3277 /* First find the pseudo regs that must be reloaded for this insn.
3278 This info is returned in the tables reload_... (see reload.h).
3279 Also modify the body of INSN by substituting RELOAD
3280 rtx's for those pseudo regs. */
3281 else
3282 {
3283 bzero (reg_has_output_reload, max_regno);
3284 CLEAR_HARD_REG_SET (reg_is_output_reload);
3285
3286 find_reloads (insn, 1, spill_indirect_levels, live_known,
3287 spill_reg_order);
3288 }
3289
3290 if (n_reloads > 0)
3291 {
3292 int class;
3293
3294 /* If this block has not had spilling done for a
a8fdc208 3295 particular class, deactivate any optional reloads
32131a9c
RK
3296 of that class lest they try to use a spill-reg which isn't
3297 available here. If we have any non-optionals that need a
3298 spill reg, abort. */
3299
3300 for (class = 0; class < N_REG_CLASSES; class++)
3301 if (basic_block_needs[class] != 0
3302 && basic_block_needs[class][this_block] == 0)
3303 for (i = 0; i < n_reloads; i++)
3304 if (class == (int) reload_reg_class[i])
3305 {
3306 if (reload_optional[i])
3307 reload_in[i] = reload_out[i] = reload_reg_rtx[i] = 0;
3308 else if (reload_reg_rtx[i] == 0)
3309 abort ();
3310 }
3311
3312 /* Now compute which reload regs to reload them into. Perhaps
3313 reusing reload regs from previous insns, or else output
3314 load insns to reload them. Maybe output store insns too.
3315 Record the choices of reload reg in reload_reg_rtx. */
3316 choose_reload_regs (insn, avoid_return_reg);
3317
3318 /* Generate the insns to reload operands into or out of
3319 their reload regs. */
3320 emit_reload_insns (insn);
3321
3322 /* Substitute the chosen reload regs from reload_reg_rtx
3323 into the insn's body (or perhaps into the bodies of other
3324 load and store insn that we just made for reloading
3325 and that we moved the structure into). */
3326 subst_reloads ();
3327 }
3328 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3329 is no longer validly lying around to save a future reload.
3330 Note that this does not detect pseudos that were reloaded
3331 for this insn in order to be stored in
3332 (obeying register constraints). That is correct; such reload
3333 registers ARE still valid. */
3334 note_stores (PATTERN (insn), forget_old_reloads_1);
3335
3336 /* There may have been CLOBBER insns placed after INSN. So scan
3337 between INSN and NEXT and use them to forget old reloads. */
3338 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3339 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3340 note_stores (PATTERN (x), forget_old_reloads_1);
3341
3342#ifdef AUTO_INC_DEC
3343 /* Likewise for regs altered by auto-increment in this insn.
3344 But note that the reg-notes are not changed by reloading:
3345 they still contain the pseudo-regs, not the spill regs. */
3346 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3347 if (REG_NOTE_KIND (x) == REG_INC)
3348 {
3349 /* See if this pseudo reg was reloaded in this insn.
3350 If so, its last-reload info is still valid
3351 because it is based on this insn's reload. */
3352 for (i = 0; i < n_reloads; i++)
3353 if (reload_out[i] == XEXP (x, 0))
3354 break;
3355
3356 if (i != n_reloads)
3357 forget_old_reloads_1 (XEXP (x, 0));
3358 }
3359#endif
3360 }
3361 /* A reload reg's contents are unknown after a label. */
3362 if (GET_CODE (insn) == CODE_LABEL)
3363 for (i = 0; i < n_spills; i++)
3364 {
3365 reg_reloaded_contents[i] = -1;
3366 reg_reloaded_insn[i] = 0;
3367 }
3368
3369 /* Don't assume a reload reg is still good after a call insn
3370 if it is a call-used reg. */
3371 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == CALL_INSN)
3372 for (i = 0; i < n_spills; i++)
3373 if (call_used_regs[spill_regs[i]])
3374 {
3375 reg_reloaded_contents[i] = -1;
3376 reg_reloaded_insn[i] = 0;
3377 }
3378
3379 /* In case registers overlap, allow certain insns to invalidate
3380 particular hard registers. */
3381
3382#ifdef INSN_CLOBBERS_REGNO_P
3383 for (i = 0 ; i < n_spills ; i++)
3384 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3385 {
3386 reg_reloaded_contents[i] = -1;
3387 reg_reloaded_insn[i] = 0;
3388 }
3389#endif
3390
3391 insn = next;
3392
3393#ifdef USE_C_ALLOCA
3394 alloca (0);
3395#endif
3396 }
3397}
3398
3399/* Discard all record of any value reloaded from X,
3400 or reloaded in X from someplace else;
3401 unless X is an output reload reg of the current insn.
3402
3403 X may be a hard reg (the reload reg)
3404 or it may be a pseudo reg that was reloaded from. */
3405
3406static void
3407forget_old_reloads_1 (x)
3408 rtx x;
3409{
3410 register int regno;
3411 int nr;
3412
3413 if (GET_CODE (x) != REG)
3414 return;
3415
3416 regno = REGNO (x);
3417
3418 if (regno >= FIRST_PSEUDO_REGISTER)
3419 nr = 1;
3420 else
3421 {
3422 int i;
3423 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3424 /* Storing into a spilled-reg invalidates its contents.
3425 This can happen if a block-local pseudo is allocated to that reg
3426 and it wasn't spilled because this block's total need is 0.
3427 Then some insn might have an optional reload and use this reg. */
3428 for (i = 0; i < nr; i++)
3429 if (spill_reg_order[regno + i] >= 0
3430 /* But don't do this if the reg actually serves as an output
3431 reload reg in the current instruction. */
3432 && (n_reloads == 0
3433 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3434 {
3435 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3436 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3437 }
3438 }
3439
3440 /* Since value of X has changed,
3441 forget any value previously copied from it. */
3442
3443 while (nr-- > 0)
3444 /* But don't forget a copy if this is the output reload
3445 that establishes the copy's validity. */
3446 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3447 reg_last_reload_reg[regno + nr] = 0;
3448}
3449\f
3450/* For each reload, the mode of the reload register. */
3451static enum machine_mode reload_mode[MAX_RELOADS];
3452
3453/* For each reload, the largest number of registers it will require. */
3454static int reload_nregs[MAX_RELOADS];
3455
3456/* Comparison function for qsort to decide which of two reloads
3457 should be handled first. *P1 and *P2 are the reload numbers. */
3458
3459static int
3460reload_reg_class_lower (p1, p2)
3461 short *p1, *p2;
3462{
3463 register int r1 = *p1, r2 = *p2;
3464 register int t;
a8fdc208 3465
32131a9c
RK
3466 /* Consider required reloads before optional ones. */
3467 t = reload_optional[r1] - reload_optional[r2];
3468 if (t != 0)
3469 return t;
3470
3471 /* Count all solitary classes before non-solitary ones. */
3472 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3473 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3474 if (t != 0)
3475 return t;
3476
3477 /* Aside from solitaires, consider all multi-reg groups first. */
3478 t = reload_nregs[r2] - reload_nregs[r1];
3479 if (t != 0)
3480 return t;
3481
3482 /* Consider reloads in order of increasing reg-class number. */
3483 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3484 if (t != 0)
3485 return t;
3486
3487 /* If reloads are equally urgent, sort by reload number,
3488 so that the results of qsort leave nothing to chance. */
3489 return r1 - r2;
3490}
3491\f
3492/* The following HARD_REG_SETs indicate when each hard register is
3493 used for a reload of various parts of the current insn. */
3494
3495/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3496static HARD_REG_SET reload_reg_used;
3497/* If reg is in use for a RELOAD_FOR_INPUT_RELOAD_ADDRESS reload. */
3498static HARD_REG_SET reload_reg_used_in_input_addr;
3499/* If reg is in use for a RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reload. */
3500static HARD_REG_SET reload_reg_used_in_output_addr;
3501/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3502static HARD_REG_SET reload_reg_used_in_op_addr;
3503/* If reg is in use for a RELOAD_FOR_INPUT reload. */
3504static HARD_REG_SET reload_reg_used_in_input;
3505/* If reg is in use for a RELOAD_FOR_OUTPUT reload. */
3506static HARD_REG_SET reload_reg_used_in_output;
3507
3508/* If reg is in use as a reload reg for any sort of reload. */
3509static HARD_REG_SET reload_reg_used_at_all;
3510
3511/* Mark reg REGNO as in use for a reload of the sort spec'd by WHEN_NEEDED.
3512 MODE is used to indicate how many consecutive regs are actually used. */
3513
3514static void
3515mark_reload_reg_in_use (regno, when_needed, mode)
3516 int regno;
3517 enum reload_when_needed when_needed;
3518 enum machine_mode mode;
3519{
3520 int nregs = HARD_REGNO_NREGS (regno, mode);
3521 int i;
3522
3523 for (i = regno; i < nregs + regno; i++)
3524 {
3525 switch (when_needed)
3526 {
3527 case RELOAD_OTHER:
3528 SET_HARD_REG_BIT (reload_reg_used, i);
3529 break;
3530
3531 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3532 SET_HARD_REG_BIT (reload_reg_used_in_input_addr, i);
3533 break;
3534
3535 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3536 SET_HARD_REG_BIT (reload_reg_used_in_output_addr, i);
3537 break;
3538
3539 case RELOAD_FOR_OPERAND_ADDRESS:
3540 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3541 break;
3542
3543 case RELOAD_FOR_INPUT:
3544 SET_HARD_REG_BIT (reload_reg_used_in_input, i);
3545 break;
3546
3547 case RELOAD_FOR_OUTPUT:
3548 SET_HARD_REG_BIT (reload_reg_used_in_output, i);
3549 break;
3550 }
3551
3552 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3553 }
3554}
3555
3556/* 1 if reg REGNO is free as a reload reg for a reload of the sort
3557 specified by WHEN_NEEDED. */
3558
3559static int
3560reload_reg_free_p (regno, when_needed)
3561 int regno;
3562 enum reload_when_needed when_needed;
3563{
3564 /* In use for a RELOAD_OTHER means it's not available for anything. */
3565 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
3566 return 0;
3567 switch (when_needed)
3568 {
3569 case RELOAD_OTHER:
3570 /* In use for anything means not available for a RELOAD_OTHER. */
3571 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
3572
3573 /* The other kinds of use can sometimes share a register. */
3574 case RELOAD_FOR_INPUT:
3575 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3576 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3577 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno));
3578 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3579 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno)
3580 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno));
3581 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3582 return (! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3583 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3584 case RELOAD_FOR_OPERAND_ADDRESS:
3585 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3586 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3587 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3588 case RELOAD_FOR_OUTPUT:
3589 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3590 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3591 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3592 }
3593 abort ();
3594}
3595
3596/* Return 1 if the value in reload reg REGNO, as used by a reload
3597 needed for the part of the insn specified by WHEN_NEEDED,
3598 is not in use for a reload in any prior part of the insn.
3599
3600 We can assume that the reload reg was already tested for availability
3601 at the time it is needed, and we should not check this again,
3602 in case the reg has already been marked in use. */
3603
3604static int
3605reload_reg_free_before_p (regno, when_needed)
3606 int regno;
3607 enum reload_when_needed when_needed;
3608{
3609 switch (when_needed)
3610 {
3611 case RELOAD_OTHER:
3612 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3613 its use starts from the beginning, so nothing can use it earlier. */
3614 return 1;
3615
3616 /* If this use is for part of the insn,
3617 check the reg is not in use for any prior part. */
3618 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3619 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
3620 return 0;
3621 case RELOAD_FOR_OUTPUT:
3622 if (TEST_HARD_REG_BIT (reload_reg_used_in_input, regno))
3623 return 0;
3624 case RELOAD_FOR_OPERAND_ADDRESS:
3625 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno))
3626 return 0;
3627 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3628 case RELOAD_FOR_INPUT:
3629 return 1;
3630 }
3631 abort ();
3632}
3633
3634/* Return 1 if the value in reload reg REGNO, as used by a reload
3635 needed for the part of the insn specified by WHEN_NEEDED,
3636 is still available in REGNO at the end of the insn.
3637
3638 We can assume that the reload reg was already tested for availability
3639 at the time it is needed, and we should not check this again,
3640 in case the reg has already been marked in use. */
3641
3642static int
3643reload_reg_reaches_end_p (regno, when_needed)
3644 int regno;
3645 enum reload_when_needed when_needed;
3646{
3647 switch (when_needed)
3648 {
3649 case RELOAD_OTHER:
3650 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3651 its value must reach the end. */
3652 return 1;
3653
3654 /* If this use is for part of the insn,
3655 its value reaches if no subsequent part uses the same register. */
3656 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3657 case RELOAD_FOR_INPUT:
3658 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3659 || TEST_HARD_REG_BIT (reload_reg_used_in_output, regno))
3660 return 0;
3661 case RELOAD_FOR_OPERAND_ADDRESS:
3662 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno))
3663 return 0;
3664 case RELOAD_FOR_OUTPUT:
3665 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3666 return 1;
3667 }
3668 abort ();
3669}
3670\f
3671/* Vector of reload-numbers showing the order in which the reloads should
3672 be processed. */
3673short reload_order[MAX_RELOADS];
3674
3675/* Indexed by reload number, 1 if incoming value
3676 inherited from previous insns. */
3677char reload_inherited[MAX_RELOADS];
3678
3679/* For an inherited reload, this is the insn the reload was inherited from,
3680 if we know it. Otherwise, this is 0. */
3681rtx reload_inheritance_insn[MAX_RELOADS];
3682
3683/* If non-zero, this is a place to get the value of the reload,
3684 rather than using reload_in. */
3685rtx reload_override_in[MAX_RELOADS];
3686
3687/* For each reload, the index in spill_regs of the spill register used,
3688 or -1 if we did not need one of the spill registers for this reload. */
3689int reload_spill_index[MAX_RELOADS];
3690
3691/* Index of last register assigned as a spill register. We allocate in
3692 a round-robin fashio. */
3693
3694static last_spill_reg = 0;
3695
3696/* Find a spill register to use as a reload register for reload R.
3697 LAST_RELOAD is non-zero if this is the last reload for the insn being
3698 processed.
3699
3700 Set reload_reg_rtx[R] to the register allocated.
3701
3702 If NOERROR is nonzero, we return 1 if successful,
3703 or 0 if we couldn't find a spill reg and we didn't change anything. */
3704
3705static int
3706allocate_reload_reg (r, insn, last_reload, noerror)
3707 int r;
3708 rtx insn;
3709 int last_reload;
3710 int noerror;
3711{
3712 int i;
3713 int pass;
3714 int count;
3715 rtx new;
3716 int regno;
3717
3718 /* If we put this reload ahead, thinking it is a group,
3719 then insist on finding a group. Otherwise we can grab a
a8fdc208 3720 reg that some other reload needs.
32131a9c
RK
3721 (That can happen when we have a 68000 DATA_OR_FP_REG
3722 which is a group of data regs or one fp reg.)
3723 We need not be so restrictive if there are no more reloads
3724 for this insn.
3725
3726 ??? Really it would be nicer to have smarter handling
3727 for that kind of reg class, where a problem like this is normal.
3728 Perhaps those classes should be avoided for reloading
3729 by use of more alternatives. */
3730
3731 int force_group = reload_nregs[r] > 1 && ! last_reload;
3732
3733 /* If we want a single register and haven't yet found one,
3734 take any reg in the right class and not in use.
3735 If we want a consecutive group, here is where we look for it.
3736
3737 We use two passes so we can first look for reload regs to
3738 reuse, which are already in use for other reloads in this insn,
3739 and only then use additional registers.
3740 I think that maximizing reuse is needed to make sure we don't
3741 run out of reload regs. Suppose we have three reloads, and
3742 reloads A and B can share regs. These need two regs.
3743 Suppose A and B are given different regs.
3744 That leaves none for C. */
3745 for (pass = 0; pass < 2; pass++)
3746 {
3747 /* I is the index in spill_regs.
3748 We advance it round-robin between insns to use all spill regs
3749 equally, so that inherited reloads have a chance
3750 of leapfrogging each other. */
3751
3752 for (count = 0, i = last_spill_reg; count < n_spills; count++)
3753 {
3754 int class = (int) reload_reg_class[r];
3755
3756 i = (i + 1) % n_spills;
3757
3758 if (reload_reg_free_p (spill_regs[i], reload_when_needed[r])
3759 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
3760 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
3761 /* Look first for regs to share, then for unshared. */
3762 && (pass || TEST_HARD_REG_BIT (reload_reg_used_at_all,
3763 spill_regs[i])))
3764 {
3765 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
3766 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
3767 (on 68000) got us two FP regs. If NR is 1,
3768 we would reject both of them. */
3769 if (force_group)
3770 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
3771 /* If we need only one reg, we have already won. */
3772 if (nr == 1)
3773 {
3774 /* But reject a single reg if we demand a group. */
3775 if (force_group)
3776 continue;
3777 break;
3778 }
3779 /* Otherwise check that as many consecutive regs as we need
3780 are available here.
3781 Also, don't use for a group registers that are
3782 needed for nongroups. */
3783 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
3784 while (nr > 1)
3785 {
3786 regno = spill_regs[i] + nr - 1;
3787 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
3788 && spill_reg_order[regno] >= 0
3789 && reload_reg_free_p (regno, reload_when_needed[r])
3790 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
3791 regno)))
3792 break;
3793 nr--;
3794 }
3795 if (nr == 1)
3796 break;
3797 }
3798 }
3799
3800 /* If we found something on pass 1, omit pass 2. */
3801 if (count < n_spills)
3802 break;
3803 }
3804
3805 /* We should have found a spill register by now. */
3806 if (count == n_spills)
3807 {
3808 if (noerror)
3809 return 0;
3810 abort ();
3811 }
3812
3813 last_spill_reg = i;
3814
3815 /* Mark as in use for this insn the reload regs we use for this. */
3816 mark_reload_reg_in_use (spill_regs[i], reload_when_needed[r],
3817 reload_mode[r]);
3818
3819 new = spill_reg_rtx[i];
3820
3821 if (new == 0 || GET_MODE (new) != reload_mode[r])
3822 spill_reg_rtx[i] = new = gen_rtx (REG, reload_mode[r], spill_regs[i]);
3823
3824 reload_reg_rtx[r] = new;
3825 reload_spill_index[r] = i;
3826 regno = true_regnum (new);
3827
3828 /* Detect when the reload reg can't hold the reload mode.
3829 This used to be one `if', but Sequent compiler can't handle that. */
3830 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
3831 {
3832 enum machine_mode test_mode = VOIDmode;
3833 if (reload_in[r])
3834 test_mode = GET_MODE (reload_in[r]);
3835 /* If reload_in[r] has VOIDmode, it means we will load it
3836 in whatever mode the reload reg has: to wit, reload_mode[r].
3837 We have already tested that for validity. */
3838 /* Aside from that, we need to test that the expressions
3839 to reload from or into have modes which are valid for this
3840 reload register. Otherwise the reload insns would be invalid. */
3841 if (! (reload_in[r] != 0 && test_mode != VOIDmode
3842 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
3843 if (! (reload_out[r] != 0
3844 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
3845 /* The reg is OK. */
3846 return 1;
3847 }
3848
3849 /* The reg is not OK. */
3850 if (noerror)
3851 return 0;
3852
3853 if (asm_noperands (PATTERN (insn)) < 0)
3854 /* It's the compiler's fault. */
3855 abort ();
3856
3857 /* It's the user's fault; the operand's mode and constraint
3858 don't match. Disable this reload so we don't crash in final. */
3859 error_for_asm (insn,
3860 "`asm' operand constraint incompatible with operand size");
3861 reload_in[r] = 0;
3862 reload_out[r] = 0;
3863 reload_reg_rtx[r] = 0;
3864 reload_optional[r] = 1;
3865 reload_secondary_p[r] = 1;
3866
3867 return 1;
3868}
3869\f
3870/* Assign hard reg targets for the pseudo-registers we must reload
3871 into hard regs for this insn.
3872 Also output the instructions to copy them in and out of the hard regs.
3873
3874 For machines with register classes, we are responsible for
3875 finding a reload reg in the proper class. */
3876
3877static void
3878choose_reload_regs (insn, avoid_return_reg)
3879 rtx insn;
3880 /* This argument is currently ignored. */
3881 rtx avoid_return_reg;
3882{
3883 register int i, j;
3884 int max_group_size = 1;
3885 enum reg_class group_class = NO_REGS;
3886 int inheritance;
3887
3888 rtx save_reload_reg_rtx[MAX_RELOADS];
3889 char save_reload_inherited[MAX_RELOADS];
3890 rtx save_reload_inheritance_insn[MAX_RELOADS];
3891 rtx save_reload_override_in[MAX_RELOADS];
3892 int save_reload_spill_index[MAX_RELOADS];
3893 HARD_REG_SET save_reload_reg_used;
3894 HARD_REG_SET save_reload_reg_used_in_input_addr;
3895 HARD_REG_SET save_reload_reg_used_in_output_addr;
3896 HARD_REG_SET save_reload_reg_used_in_op_addr;
3897 HARD_REG_SET save_reload_reg_used_in_input;
3898 HARD_REG_SET save_reload_reg_used_in_output;
3899 HARD_REG_SET save_reload_reg_used_at_all;
3900
3901 bzero (reload_inherited, MAX_RELOADS);
3902 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
3903 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
3904
3905 CLEAR_HARD_REG_SET (reload_reg_used);
3906 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
3907 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr);
3908 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr);
3909 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
3910 CLEAR_HARD_REG_SET (reload_reg_used_in_output);
3911 CLEAR_HARD_REG_SET (reload_reg_used_in_input);
3912
3913 /* Distinguish output-only and input-only reloads
3914 because they can overlap with other things. */
3915 for (j = 0; j < n_reloads; j++)
3916 if (reload_when_needed[j] == RELOAD_OTHER
3917 && ! reload_needed_for_multiple[j])
3918 {
3919 if (reload_in[j] == 0)
3920 {
3921 /* But earlyclobber operands must stay as RELOAD_OTHER. */
3922 for (i = 0; i < n_earlyclobbers; i++)
3923 if (rtx_equal_p (reload_out[j], reload_earlyclobbers[i]))
3924 break;
3925 if (i == n_earlyclobbers)
3926 reload_when_needed[j] = RELOAD_FOR_OUTPUT;
3927 }
3928 if (reload_out[j] == 0)
3929 reload_when_needed[j] = RELOAD_FOR_INPUT;
3930
3931 if (reload_secondary_reload[j] >= 0
3932 && ! reload_needed_for_multiple[reload_secondary_reload[j]])
3933 reload_when_needed[reload_secondary_reload[j]]
3934 = reload_when_needed[j];
3935 }
3936
3937#ifdef SMALL_REGISTER_CLASSES
3938 /* Don't bother with avoiding the return reg
3939 if we have no mandatory reload that could use it. */
3940 if (avoid_return_reg)
3941 {
3942 int do_avoid = 0;
3943 int regno = REGNO (avoid_return_reg);
3944 int nregs
3945 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
3946 int r;
3947
3948 for (r = regno; r < regno + nregs; r++)
3949 if (spill_reg_order[r] >= 0)
3950 for (j = 0; j < n_reloads; j++)
3951 if (!reload_optional[j] && reload_reg_rtx[j] == 0
3952 && (reload_in[j] != 0 || reload_out[j] != 0
3953 || reload_secondary_p[j])
3954 &&
3955 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
3956 do_avoid = 1;
3957 if (!do_avoid)
3958 avoid_return_reg = 0;
3959 }
3960#endif /* SMALL_REGISTER_CLASSES */
3961
3962#if 0 /* Not needed, now that we can always retry without inheritance. */
3963 /* See if we have more mandatory reloads than spill regs.
3964 If so, then we cannot risk optimizations that could prevent
a8fdc208 3965 reloads from sharing one spill register.
32131a9c
RK
3966
3967 Since we will try finding a better register than reload_reg_rtx
3968 unless it is equal to reload_in or reload_out, count such reloads. */
3969
3970 {
3971 int tem = 0;
3972#ifdef SMALL_REGISTER_CLASSES
3973 int tem = (avoid_return_reg != 0);
a8fdc208 3974#endif
32131a9c
RK
3975 for (j = 0; j < n_reloads; j++)
3976 if (! reload_optional[j]
3977 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
3978 && (reload_reg_rtx[j] == 0
3979 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
3980 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
3981 tem++;
3982 if (tem > n_spills)
3983 must_reuse = 1;
3984 }
3985#endif
3986
3987#ifdef SMALL_REGISTER_CLASSES
3988 /* Don't use the subroutine call return reg for a reload
3989 if we are supposed to avoid it. */
3990 if (avoid_return_reg)
3991 {
3992 int regno = REGNO (avoid_return_reg);
3993 int nregs
3994 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
3995 int r;
3996
3997 for (r = regno; r < regno + nregs; r++)
3998 if (spill_reg_order[r] >= 0)
3999 SET_HARD_REG_BIT (reload_reg_used, r);
4000 }
4001#endif /* SMALL_REGISTER_CLASSES */
4002
4003 /* In order to be certain of getting the registers we need,
4004 we must sort the reloads into order of increasing register class.
4005 Then our grabbing of reload registers will parallel the process
a8fdc208 4006 that provided the reload registers.
32131a9c
RK
4007
4008 Also note whether any of the reloads wants a consecutive group of regs.
4009 If so, record the maximum size of the group desired and what
4010 register class contains all the groups needed by this insn. */
4011
4012 for (j = 0; j < n_reloads; j++)
4013 {
4014 reload_order[j] = j;
4015 reload_spill_index[j] = -1;
4016
4017 reload_mode[j]
4018 = (reload_strict_low[j] && reload_out[j]
4019 ? GET_MODE (SUBREG_REG (reload_out[j]))
4020 : (reload_inmode[j] == VOIDmode
4021 || (GET_MODE_SIZE (reload_outmode[j])
4022 > GET_MODE_SIZE (reload_inmode[j])))
4023 ? reload_outmode[j] : reload_inmode[j]);
4024
4025 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4026
4027 if (reload_nregs[j] > 1)
4028 {
4029 max_group_size = MAX (reload_nregs[j], max_group_size);
4030 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4031 }
4032
4033 /* If we have already decided to use a certain register,
4034 don't use it in another way. */
4035 if (reload_reg_rtx[j])
4036 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]),
4037 reload_when_needed[j], reload_mode[j]);
4038 }
4039
4040 if (n_reloads > 1)
4041 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4042
4043 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4044 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4045 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4046 sizeof reload_inheritance_insn);
4047 bcopy (reload_override_in, save_reload_override_in,
4048 sizeof reload_override_in);
4049 bcopy (reload_spill_index, save_reload_spill_index,
4050 sizeof reload_spill_index);
4051 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4052 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4053 COPY_HARD_REG_SET (save_reload_reg_used_in_output,
4054 reload_reg_used_in_output);
4055 COPY_HARD_REG_SET (save_reload_reg_used_in_input,
4056 reload_reg_used_in_input);
4057 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr,
4058 reload_reg_used_in_input_addr);
4059 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr,
4060 reload_reg_used_in_output_addr);
4061 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4062 reload_reg_used_in_op_addr);
4063
4064 /* Try first with inheritance, then turning it off. */
4065
4066 for (inheritance = 1; inheritance >= 0; inheritance--)
4067 {
4068 /* Process the reloads in order of preference just found.
4069 Beyond this point, subregs can be found in reload_reg_rtx.
4070
4071 This used to look for an existing reloaded home for all
4072 of the reloads, and only then perform any new reloads.
4073 But that could lose if the reloads were done out of reg-class order
4074 because a later reload with a looser constraint might have an old
4075 home in a register needed by an earlier reload with a tighter constraint.
4076
4077 To solve this, we make two passes over the reloads, in the order
4078 described above. In the first pass we try to inherit a reload
4079 from a previous insn. If there is a later reload that needs a
4080 class that is a proper subset of the class being processed, we must
4081 also allocate a spill register during the first pass.
4082
4083 Then make a second pass over the reloads to allocate any reloads
4084 that haven't been given registers yet. */
4085
4086 for (j = 0; j < n_reloads; j++)
4087 {
4088 register int r = reload_order[j];
4089
4090 /* Ignore reloads that got marked inoperative. */
4091 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4092 continue;
4093
4094 /* If find_reloads chose a to use reload_in or reload_out as a reload
4095 register, we don't need to chose one. Otherwise, try even if it found
4096 one since we might save an insn if we find the value lying around. */
4097 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4098 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4099 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4100 continue;
4101
4102#if 0 /* No longer needed for correct operation.
4103 It might give better code, or might not; worth an experiment? */
4104 /* If this is an optional reload, we can't inherit from earlier insns
4105 until we are sure that any non-optional reloads have been allocated.
4106 The following code takes advantage of the fact that optional reloads
4107 are at the end of reload_order. */
4108 if (reload_optional[r] != 0)
4109 for (i = 0; i < j; i++)
4110 if ((reload_out[reload_order[i]] != 0
4111 || reload_in[reload_order[i]] != 0
4112 || reload_secondary_p[reload_order[i]])
4113 && ! reload_optional[reload_order[i]]
4114 && reload_reg_rtx[reload_order[i]] == 0)
4115 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4116#endif
4117
4118 /* First see if this pseudo is already available as reloaded
4119 for a previous insn. We cannot try to inherit for reloads
4120 that are smaller than the maximum number of registers needed
4121 for groups unless the register we would allocate cannot be used
4122 for the groups.
4123
4124 We could check here to see if this is a secondary reload for
4125 an object that is already in a register of the desired class.
4126 This would avoid the need for the secondary reload register.
4127 But this is complex because we can't easily determine what
4128 objects might want to be loaded via this reload. So let a register
4129 be allocated here. In `emit_reload_insns' we suppress one of the
4130 loads in the case described above. */
4131
4132 if (inheritance)
4133 {
4134 register int regno = -1;
4135
4136 if (reload_in[r] == 0)
4137 ;
4138 else if (GET_CODE (reload_in[r]) == REG)
4139 regno = REGNO (reload_in[r]);
4140 else if (GET_CODE (reload_in_reg[r]) == REG)
4141 regno = REGNO (reload_in_reg[r]);
4142#if 0
4143 /* This won't work, since REGNO can be a pseudo reg number.
4144 Also, it takes much more hair to keep track of all the things
4145 that can invalidate an inherited reload of part of a pseudoreg. */
4146 else if (GET_CODE (reload_in[r]) == SUBREG
4147 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4148 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4149#endif
4150
4151 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4152 {
4153 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4154
4155 if (reg_reloaded_contents[i] == regno
4156 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4157 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4158 spill_regs[i])
4159 && (reload_nregs[r] == max_group_size
4160 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4161 spill_regs[i]))
4162 && reload_reg_free_p (spill_regs[i], reload_when_needed[r])
4163 && reload_reg_free_before_p (spill_regs[i],
4164 reload_when_needed[r]))
4165 {
4166 /* If a group is needed, verify that all the subsequent
4167 registers still have their values intact. */
4168 int nr
4169 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4170 int k;
4171
4172 for (k = 1; k < nr; k++)
4173 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4174 != regno)
4175 break;
4176
4177 if (k == nr)
4178 {
4179 /* Mark the register as in use for this part of
4180 the insn. */
4181 mark_reload_reg_in_use (spill_regs[i],
4182 reload_when_needed[r],
4183 reload_mode[r]);
4184 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4185 reload_inherited[r] = 1;
4186 reload_inheritance_insn[r] = reg_reloaded_insn[i];
4187 reload_spill_index[r] = i;
4188 }
4189 }
4190 }
4191 }
4192
4193 /* Here's another way to see if the value is already lying around. */
4194 if (inheritance
4195 && reload_in[r] != 0
4196 && ! reload_inherited[r]
4197 && reload_out[r] == 0
4198 && (CONSTANT_P (reload_in[r])
4199 || GET_CODE (reload_in[r]) == PLUS
4200 || GET_CODE (reload_in[r]) == REG
4201 || GET_CODE (reload_in[r]) == MEM)
4202 && (reload_nregs[r] == max_group_size
4203 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4204 {
4205 register rtx equiv
4206 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
4207 -1, 0, 0, reload_mode[r]);
4208 int regno;
4209
4210 if (equiv != 0)
4211 {
4212 if (GET_CODE (equiv) == REG)
4213 regno = REGNO (equiv);
4214 else if (GET_CODE (equiv) == SUBREG)
4215 {
4216 regno = REGNO (SUBREG_REG (equiv));
4217 if (regno < FIRST_PSEUDO_REGISTER)
4218 regno += SUBREG_WORD (equiv);
4219 }
4220 else
4221 abort ();
4222 }
4223
4224 /* If we found a spill reg, reject it unless it is free
4225 and of the desired class. */
4226 if (equiv != 0
4227 && ((spill_reg_order[regno] >= 0
4228 && ! reload_reg_free_before_p (regno,
4229 reload_when_needed[r]))
4230 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4231 regno)))
4232 equiv = 0;
4233
4234 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4235 equiv = 0;
4236
4237 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4238 equiv = 0;
4239
4240 /* We found a register that contains the value we need.
4241 If this register is the same as an `earlyclobber' operand
4242 of the current insn, just mark it as a place to reload from
4243 since we can't use it as the reload register itself. */
4244
4245 if (equiv != 0)
4246 for (i = 0; i < n_earlyclobbers; i++)
4247 if (reg_overlap_mentioned_p (equiv, reload_earlyclobbers[i]))
4248 {
4249 reload_override_in[r] = equiv;
4250 equiv = 0;
4251 break;
4252 }
4253
4254 /* JRV: If the equiv register we have found is explicitly
4255 clobbered in the current insn, mark but don't use, as above. */
4256
4257 if (equiv != 0 && regno_clobbered_p (regno, insn))
4258 {
4259 reload_override_in[r] = equiv;
4260 equiv = 0;
4261 }
4262
4263 /* If we found an equivalent reg, say no code need be generated
4264 to load it, and use it as our reload reg. */
4265 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4266 {
4267 reload_reg_rtx[r] = equiv;
4268 reload_inherited[r] = 1;
4269 /* If it is a spill reg,
4270 mark the spill reg as in use for this insn. */
4271 i = spill_reg_order[regno];
4272 if (i >= 0)
4273 mark_reload_reg_in_use (regno, reload_when_needed[r],
4274 reload_mode[r]);
4275 }
4276 }
4277
4278 /* If we found a register to use already, or if this is an optional
4279 reload, we are done. */
4280 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
4281 continue;
4282
4283#if 0 /* No longer needed for correct operation. Might or might not
4284 give better code on the average. Want to experiment? */
4285
4286 /* See if there is a later reload that has a class different from our
4287 class that intersects our class or that requires less register
4288 than our reload. If so, we must allocate a register to this
4289 reload now, since that reload might inherit a previous reload
4290 and take the only available register in our class. Don't do this
4291 for optional reloads since they will force all previous reloads
4292 to be allocated. Also don't do this for reloads that have been
4293 turned off. */
4294
4295 for (i = j + 1; i < n_reloads; i++)
4296 {
4297 int s = reload_order[i];
4298
4299 if ((reload_in[s] == 0 && reload_out[s] == 0 &&
4300 ! reload_secondary_p[s])
4301 || reload_optional[s])
4302 continue;
4303
4304 if ((reload_reg_class[s] != reload_reg_class[r]
4305 && reg_classes_intersect_p (reload_reg_class[r],
4306 reload_reg_class[s]))
4307 || reload_nregs[s] < reload_nregs[r])
4308 break;
4309 }
4310
4311 if (i == n_reloads)
4312 continue;
4313
4314 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
4315#endif
4316 }
4317
4318 /* Now allocate reload registers for anything non-optional that
4319 didn't get one yet. */
4320 for (j = 0; j < n_reloads; j++)
4321 {
4322 register int r = reload_order[j];
4323
4324 /* Ignore reloads that got marked inoperative. */
4325 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4326 continue;
4327
4328 /* Skip reloads that already have a register allocated or are
4329 optional. */
4330 if (reload_reg_rtx[r] != 0 || reload_optional[r])
4331 continue;
4332
4333 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
4334 break;
4335 }
4336
4337 /* If that loop got all the way, we have won. */
4338 if (j == n_reloads)
4339 break;
4340
4341 fail:
4342 /* Loop around and try without any inheritance. */
4343 /* First undo everything done by the failed attempt
4344 to allocate with inheritance. */
4345 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
4346 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
4347 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
4348 sizeof reload_inheritance_insn);
4349 bcopy (save_reload_override_in, reload_override_in,
4350 sizeof reload_override_in);
4351 bcopy (save_reload_spill_index, reload_spill_index,
4352 sizeof reload_spill_index);
4353 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
4354 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
4355 COPY_HARD_REG_SET (reload_reg_used_in_input,
4356 save_reload_reg_used_in_input);
4357 COPY_HARD_REG_SET (reload_reg_used_in_output,
4358 save_reload_reg_used_in_output);
4359 COPY_HARD_REG_SET (reload_reg_used_in_input_addr,
4360 save_reload_reg_used_in_input_addr);
4361 COPY_HARD_REG_SET (reload_reg_used_in_output_addr,
4362 save_reload_reg_used_in_output_addr);
4363 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
4364 save_reload_reg_used_in_op_addr);
4365 }
4366
4367 /* If we thought we could inherit a reload, because it seemed that
4368 nothing else wanted the same reload register earlier in the insn,
4369 verify that assumption, now that all reloads have been assigned. */
4370
4371 for (j = 0; j < n_reloads; j++)
4372 {
4373 register int r = reload_order[j];
4374
4375 if (reload_inherited[r] && reload_reg_rtx[r] != 0
4376 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
4377 reload_when_needed[r]))
4378 reload_inherited[r] = 0;
4379
4380 /* If we found a better place to reload from,
4381 validate it in the same fashion, if it is a reload reg. */
4382 if (reload_override_in[r]
4383 && (GET_CODE (reload_override_in[r]) == REG
4384 || GET_CODE (reload_override_in[r]) == SUBREG))
4385 {
4386 int regno = true_regnum (reload_override_in[r]);
4387 if (spill_reg_order[regno] >= 0
4388 && ! reload_reg_free_before_p (regno, reload_when_needed[r]))
4389 reload_override_in[r] = 0;
4390 }
4391 }
4392
4393 /* Now that reload_override_in is known valid,
4394 actually override reload_in. */
4395 for (j = 0; j < n_reloads; j++)
4396 if (reload_override_in[j])
4397 reload_in[j] = reload_override_in[j];
4398
4399 /* If this reload won't be done because it has been cancelled or is
4400 optional and not inherited, clear reload_reg_rtx so other
4401 routines (such as subst_reloads) don't get confused. */
4402 for (j = 0; j < n_reloads; j++)
4403 if ((reload_optional[j] && ! reload_inherited[j])
4404 || (reload_in[j] == 0 && reload_out[j] == 0
4405 && ! reload_secondary_p[j]))
4406 reload_reg_rtx[j] = 0;
4407
4408 /* Record which pseudos and which spill regs have output reloads. */
4409 for (j = 0; j < n_reloads; j++)
4410 {
4411 register int r = reload_order[j];
4412
4413 i = reload_spill_index[r];
4414
4415 /* I is nonneg if this reload used one of the spill regs.
4416 If reload_reg_rtx[r] is 0, this is an optional reload
4417 that we opted to ignore. */
4418 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
4419 && reload_reg_rtx[r] != 0)
4420 {
4421 register int nregno = REGNO (reload_out[r]);
4422 int nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
4423
4424 while (--nr >= 0)
4425 {
4426 reg_has_output_reload[nregno + nr] = 1;
4427 if (i >= 0)
4428 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
4429 }
4430
4431 if (reload_when_needed[r] != RELOAD_OTHER
4432 && reload_when_needed[r] != RELOAD_FOR_OUTPUT)
4433 abort ();
4434 }
4435 }
4436}
4437\f
4438/* Output insns to reload values in and out of the chosen reload regs. */
4439
4440static void
4441emit_reload_insns (insn)
4442 rtx insn;
4443{
4444 register int j;
4445 rtx following_insn = NEXT_INSN (insn);
a8efe40d 4446 rtx before_insn = insn;
32131a9c
RK
4447 rtx first_output_reload_insn = NEXT_INSN (insn);
4448 rtx first_other_reload_insn = insn;
4449 rtx first_operand_address_reload_insn = insn;
4450 int special;
4451 /* Values to be put in spill_reg_store are put here first. */
4452 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
4453
a8efe40d
RK
4454 /* If this is a CALL_INSN preceeded by USE insns, any reload insns
4455 must go in front of the first USE insn, not in front of INSN. */
4456
4457 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
4458 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
4459 while (GET_CODE (PREV_INSN (before_insn)) == INSN
4460 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
4461 first_other_reload_insn = first_operand_address_reload_insn
4462 = before_insn = PREV_INSN (before_insn);
4463
32131a9c
RK
4464 /* Now output the instructions to copy the data into and out of the
4465 reload registers. Do these in the order that the reloads were reported,
4466 since reloads of base and index registers precede reloads of operands
4467 and the operands may need the base and index registers reloaded. */
4468
4469 for (j = 0; j < n_reloads; j++)
4470 {
4471 register rtx old;
4472 rtx oldequiv_reg = 0;
4473 rtx this_reload_insn = 0;
4474 rtx store_insn = 0;
4475
4476 old = reload_in[j];
4477 if (old != 0 && ! reload_inherited[j]
4478 && ! rtx_equal_p (reload_reg_rtx[j], old)
4479 && reload_reg_rtx[j] != 0)
4480 {
4481 register rtx reloadreg = reload_reg_rtx[j];
4482 rtx oldequiv = 0;
4483 enum machine_mode mode;
4484 rtx where;
d445b551 4485 rtx reload_insn;
32131a9c
RK
4486
4487 /* Determine the mode to reload in.
4488 This is very tricky because we have three to choose from.
4489 There is the mode the insn operand wants (reload_inmode[J]).
4490 There is the mode of the reload register RELOADREG.
4491 There is the intrinsic mode of the operand, which we could find
4492 by stripping some SUBREGs.
4493 It turns out that RELOADREG's mode is irrelevant:
4494 we can change that arbitrarily.
4495
4496 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
4497 then the reload reg may not support QImode moves, so use SImode.
4498 If foo is in memory due to spilling a pseudo reg, this is safe,
4499 because the QImode value is in the least significant part of a
4500 slot big enough for a SImode. If foo is some other sort of
4501 memory reference, then it is impossible to reload this case,
4502 so previous passes had better make sure this never happens.
4503
4504 Then consider a one-word union which has SImode and one of its
4505 members is a float, being fetched as (SUBREG:SF union:SI).
4506 We must fetch that as SFmode because we could be loading into
4507 a float-only register. In this case OLD's mode is correct.
4508
4509 Consider an immediate integer: it has VOIDmode. Here we need
4510 to get a mode from something else.
4511
4512 In some cases, there is a fourth mode, the operand's
4513 containing mode. If the insn specifies a containing mode for
4514 this operand, it overrides all others.
4515
4516 I am not sure whether the algorithm here is always right,
4517 but it does the right things in those cases. */
4518
4519 mode = GET_MODE (old);
4520 if (mode == VOIDmode)
4521 mode = reload_inmode[j];
4522 if (reload_strict_low[j])
4523 mode = GET_MODE (SUBREG_REG (reload_in[j]));
4524
4525#ifdef SECONDARY_INPUT_RELOAD_CLASS
4526 /* If we need a secondary register for this operation, see if
4527 the value is already in a register in that class. Don't
4528 do this if the secondary register will be used as a scratch
4529 register. */
4530
4531 if (reload_secondary_reload[j] >= 0
4532 && reload_secondary_icode[j] == CODE_FOR_nothing)
4533 oldequiv
4534 = find_equiv_reg (old, insn,
4535 reload_reg_class[reload_secondary_reload[j]],
4536 -1, 0, 0, mode);
4537#endif
4538
4539 /* If reloading from memory, see if there is a register
4540 that already holds the same value. If so, reload from there.
4541 We can pass 0 as the reload_reg_p argument because
4542 any other reload has either already been emitted,
4543 in which case find_equiv_reg will see the reload-insn,
4544 or has yet to be emitted, in which case it doesn't matter
4545 because we will use this equiv reg right away. */
4546
4547 if (oldequiv == 0
4548 && (GET_CODE (old) == MEM
4549 || (GET_CODE (old) == REG
4550 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4551 && reg_renumber[REGNO (old)] < 0)))
4552 oldequiv = find_equiv_reg (old, insn, GENERAL_REGS,
4553 -1, 0, 0, mode);
4554
4555 if (oldequiv)
4556 {
4557 int regno = true_regnum (oldequiv);
4558
4559 /* If OLDEQUIV is a spill register, don't use it for this
4560 if any other reload needs it at an earlier stage of this insn
a8fdc208 4561 or at this stage. */
32131a9c
RK
4562 if (spill_reg_order[regno] >= 0
4563 && (! reload_reg_free_p (regno, reload_when_needed[j])
4564 || ! reload_reg_free_before_p (regno,
4565 reload_when_needed[j])))
4566 oldequiv = 0;
4567
4568 /* If OLDEQUIV is not a spill register,
4569 don't use it if any other reload wants it. */
4570 if (spill_reg_order[regno] < 0)
4571 {
4572 int k;
4573 for (k = 0; k < n_reloads; k++)
4574 if (reload_reg_rtx[k] != 0 && k != j
4575 && reg_overlap_mentioned_p (reload_reg_rtx[k], oldequiv))
4576 {
4577 oldequiv = 0;
4578 break;
4579 }
4580 }
4581 }
4582
4583 if (oldequiv == 0)
4584 oldequiv = old;
4585 else if (GET_CODE (oldequiv) == REG)
4586 oldequiv_reg = oldequiv;
4587 else if (GET_CODE (oldequiv) == SUBREG)
4588 oldequiv_reg = SUBREG_REG (oldequiv);
4589
4590 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
4591 then load RELOADREG from OLDEQUIV. */
4592
4593 if (GET_MODE (reloadreg) != mode)
4594 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
4595 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
4596 oldequiv = SUBREG_REG (oldequiv);
4597 if (GET_MODE (oldequiv) != VOIDmode
4598 && mode != GET_MODE (oldequiv))
4599 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
4600
4601 /* Decide where to put reload insn for this reload. */
4602 switch (reload_when_needed[j])
4603 {
4604 case RELOAD_FOR_INPUT:
4605 case RELOAD_OTHER:
4606 where = first_operand_address_reload_insn;
4607 break;
4608 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
4609 where = first_other_reload_insn;
4610 break;
4611 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
4612 where = first_output_reload_insn;
4613 break;
4614 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4615 where = before_insn;
32131a9c
RK
4616 }
4617
4618 special = 0;
4619
4620 /* Auto-increment addresses must be reloaded in a special way. */
4621 if (GET_CODE (oldequiv) == POST_INC
4622 || GET_CODE (oldequiv) == POST_DEC
4623 || GET_CODE (oldequiv) == PRE_INC
4624 || GET_CODE (oldequiv) == PRE_DEC)
4625 {
4626 /* We are not going to bother supporting the case where a
4627 incremented register can't be copied directly from
4628 OLDEQUIV since this seems highly unlikely. */
4629 if (reload_secondary_reload[j] >= 0)
4630 abort ();
4631 /* Prevent normal processing of this reload. */
4632 special = 1;
4633 /* Output a special code sequence for this case. */
4634 this_reload_insn
4635 = inc_for_reload (reloadreg, oldequiv, reload_inc[j], where);
4636 }
4637
4638 /* If we are reloading a pseudo-register that was set by the previous
4639 insn, see if we can get rid of that pseudo-register entirely
4640 by redirecting the previous insn into our reload register. */
4641
4642 else if (optimize && GET_CODE (old) == REG
4643 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4644 && dead_or_set_p (insn, old)
4645 /* This is unsafe if some other reload
4646 uses the same reg first. */
4647 && (reload_when_needed[j] == RELOAD_OTHER
4648 || reload_when_needed[j] == RELOAD_FOR_INPUT
4649 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS))
4650 {
4651 rtx temp = PREV_INSN (insn);
4652 while (temp && GET_CODE (temp) == NOTE)
4653 temp = PREV_INSN (temp);
4654 if (temp
4655 && GET_CODE (temp) == INSN
4656 && GET_CODE (PATTERN (temp)) == SET
4657 && SET_DEST (PATTERN (temp)) == old
4658 /* Make sure we can access insn_operand_constraint. */
4659 && asm_noperands (PATTERN (temp)) < 0
4660 /* This is unsafe if prev insn rejects our reload reg. */
4661 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
4662 reloadreg)
4663 /* This is unsafe if operand occurs more than once in current
4664 insn. Perhaps some occurrences aren't reloaded. */
4665 && count_occurrences (PATTERN (insn), old) == 1
4666 /* Don't risk splitting a matching pair of operands. */
4667 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
4668 {
4669 /* Store into the reload register instead of the pseudo. */
4670 SET_DEST (PATTERN (temp)) = reloadreg;
4671 /* If these are the only uses of the pseudo reg,
4672 pretend for GDB it lives in the reload reg we used. */
4673 if (reg_n_deaths[REGNO (old)] == 1
4674 && reg_n_sets[REGNO (old)] == 1)
4675 {
4676 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
4677 alter_reg (REGNO (old), -1);
4678 }
4679 special = 1;
4680 }
4681 }
4682
4683 /* We can't do that, so output an insn to load RELOADREG.
4684 Keep them in the following order:
4685 all reloads for input reload addresses,
4686 all reloads for ordinary input operands,
4687 all reloads for addresses of non-reloaded operands,
4688 the insn being reloaded,
4689 all reloads for addresses of output reloads,
4690 the output reloads. */
4691 if (! special)
4692 {
4693#ifdef SECONDARY_INPUT_RELOAD_CLASS
4694 rtx second_reload_reg = 0;
4695 enum insn_code icode;
4696
4697 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
4698 and icode, if any. If OLDEQUIV and OLD are different or
4699 if this is an in-out reload, recompute whether or not we
4700 still need a secondary register and what the icode should
4701 be. If we still need a secondary register and the class or
4702 icode is different, go back to reloading from OLD if using
4703 OLDEQUIV means that we got the wrong type of register. We
4704 cannot have different class or icode due to an in-out reload
4705 because we don't make such reloads when both the input and
4706 output need secondary reload registers. */
32131a9c
RK
4707
4708 if (reload_secondary_reload[j] >= 0)
4709 {
4710 int secondary_reload = reload_secondary_reload[j];
4711 second_reload_reg = reload_reg_rtx[secondary_reload];
4712 icode = reload_secondary_icode[j];
4713
d445b551
RK
4714 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
4715 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
4716 {
4717 enum reg_class new_class
4718 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
4719 mode, oldequiv);
4720
4721 if (new_class == NO_REGS)
4722 second_reload_reg = 0;
4723 else
4724 {
4725 enum insn_code new_icode;
4726 enum machine_mode new_mode;
4727
4728 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
4729 REGNO (second_reload_reg)))
4730 oldequiv = old;
4731 else
4732 {
4733 new_icode = reload_in_optab[(int) mode];
4734 if (new_icode != CODE_FOR_nothing
4735 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 4736 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 4737 (reloadreg, mode)))
a8fdc208
RS
4738 || (insn_operand_predicate[(int) new_icode][1]
4739 && ! ((*insn_operand_predicate[(int) new_icode][1])
32131a9c
RK
4740 (oldequiv, mode)))))
4741 new_icode = CODE_FOR_nothing;
4742
4743 if (new_icode == CODE_FOR_nothing)
4744 new_mode = mode;
4745 else
4746 new_mode = insn_operand_mode[new_icode][2];
4747
4748 if (GET_MODE (second_reload_reg) != new_mode)
4749 {
4750 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
4751 new_mode))
4752 oldequiv = old;
4753 else
4754 second_reload_reg
4755 = gen_reg_rtx (REG, new_mode,
4756 REGNO (second_reload_reg));
4757 }
4758 }
4759 }
4760 }
4761
4762 /* If we still need a secondary reload register, check
4763 to see if it is being used as a scratch or intermediate
4764 register and generate code appropriately. */
4765
4766 if (second_reload_reg)
4767 {
4768 if (icode != CODE_FOR_nothing)
4769 {
d445b551
RK
4770 reload_insn = emit_insn_before (GEN_FCN (icode)
4771 (reloadreg, oldequiv,
4772 second_reload_reg),
4773 where);
4774 if (this_reload_insn == 0)
4775 this_reload_insn = reload_insn;
32131a9c
RK
4776 special = 1;
4777 }
4778 else
4779 {
4780 /* See if we need a scratch register to load the
4781 intermediate register (a tertiary reload). */
4782 enum insn_code tertiary_icode
4783 = reload_secondary_icode[secondary_reload];
4784
4785 if (tertiary_icode != CODE_FOR_nothing)
4786 {
4787 rtx third_reload_reg
4788 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
4789
d445b551
RK
4790 reload_insn
4791 = emit_insn_before ((GEN_FCN (tertiary_icode)
4792 (second_reload_reg,
4793 oldequiv,
4794 third_reload_reg)),
4795 where);
4796 if (this_reload_insn == 0)
4797 this_reload_insn = reload_insn;
32131a9c
RK
4798 }
4799 else
4800 {
d445b551
RK
4801 reload_insn
4802 = gen_input_reload (second_reload_reg,
4803 oldequiv, where);
4804 if (this_reload_insn == 0)
4805 this_reload_insn = reload_insn;
32131a9c
RK
4806 oldequiv = second_reload_reg;
4807 }
4808 }
4809 }
4810 }
4811#endif
4812
4813 if (! special)
d445b551
RK
4814 {
4815 reload_insn = gen_input_reload (reloadreg,
4816 oldequiv, where);
4817 if (this_reload_insn == 0)
4818 this_reload_insn = reload_insn;
4819 }
32131a9c
RK
4820
4821#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
4822 /* We may have to make a REG_DEAD note for the secondary reload
4823 register in the insns we just made. Find the last insn that
4824 mentioned the register. */
4825 if (! special && second_reload_reg
4826 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
4827 {
4828 rtx prev;
4829
4830 for (prev = where;
4831 prev != PREV_INSN (this_reload_insn);
4832 prev = PREV_INSN (prev))
4833 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
4834 && reg_overlap_mentioned_p (second_reload_reg,
4835 PATTERN (prev)))
4836 {
4837 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
4838 second_reload_reg,
4839 REG_NOTES (prev));
4840 break;
4841 }
4842 }
4843#endif
4844 }
4845
4846 /* Update where to put other reload insns. */
4847 if (this_reload_insn)
4848 switch (reload_when_needed[j])
4849 {
4850 case RELOAD_FOR_INPUT:
4851 case RELOAD_OTHER:
4852 if (first_other_reload_insn == first_operand_address_reload_insn)
4853 first_other_reload_insn = this_reload_insn;
4854 break;
4855 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4856 if (first_operand_address_reload_insn == before_insn)
32131a9c 4857 first_operand_address_reload_insn = this_reload_insn;
a8efe40d 4858 if (first_other_reload_insn == before_insn)
32131a9c
RK
4859 first_other_reload_insn = this_reload_insn;
4860 }
4861
4862 /* reload_inc[j] was formerly processed here. */
4863 }
4864
4865 /* Add a note saying the input reload reg
4866 dies in this insn, if anyone cares. */
4867#ifdef PRESERVE_DEATH_INFO_REGNO_P
4868 if (old != 0
4869 && reload_reg_rtx[j] != old
4870 && reload_reg_rtx[j] != 0
4871 && reload_out[j] == 0
4872 && ! reload_inherited[j]
4873 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
4874 {
4875 register rtx reloadreg = reload_reg_rtx[j];
4876
a8fdc208 4877#if 0
32131a9c
RK
4878 /* We can't abort here because we need to support this for sched.c.
4879 It's not terrible to miss a REG_DEAD note, but we should try
4880 to figure out how to do this correctly. */
4881 /* The code below is incorrect for address-only reloads. */
4882 if (reload_when_needed[j] != RELOAD_OTHER
4883 && reload_when_needed[j] != RELOAD_FOR_INPUT)
4884 abort ();
4885#endif
4886
4887 /* Add a death note to this insn, for an input reload. */
4888
4889 if ((reload_when_needed[j] == RELOAD_OTHER
4890 || reload_when_needed[j] == RELOAD_FOR_INPUT)
4891 && ! dead_or_set_p (insn, reloadreg))
4892 REG_NOTES (insn)
4893 = gen_rtx (EXPR_LIST, REG_DEAD,
4894 reloadreg, REG_NOTES (insn));
4895 }
4896
4897 /* When we inherit a reload, the last marked death of the reload reg
4898 may no longer really be a death. */
4899 if (reload_reg_rtx[j] != 0
4900 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
4901 && reload_inherited[j])
4902 {
4903 /* Handle inheriting an output reload.
4904 Remove the death note from the output reload insn. */
4905 if (reload_spill_index[j] >= 0
4906 && GET_CODE (reload_in[j]) == REG
4907 && spill_reg_store[reload_spill_index[j]] != 0
4908 && find_regno_note (spill_reg_store[reload_spill_index[j]],
4909 REG_DEAD, REGNO (reload_reg_rtx[j])))
4910 remove_death (REGNO (reload_reg_rtx[j]),
4911 spill_reg_store[reload_spill_index[j]]);
4912 /* Likewise for input reloads that were inherited. */
4913 else if (reload_spill_index[j] >= 0
4914 && GET_CODE (reload_in[j]) == REG
4915 && spill_reg_store[reload_spill_index[j]] == 0
4916 && reload_inheritance_insn[j] != 0
a8fdc208 4917 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
4918 REGNO (reload_reg_rtx[j])))
4919 remove_death (REGNO (reload_reg_rtx[j]),
4920 reload_inheritance_insn[j]);
4921 else
4922 {
4923 rtx prev;
4924
4925 /* We got this register from find_equiv_reg.
4926 Search back for its last death note and get rid of it.
4927 But don't search back too far.
4928 Don't go past a place where this reg is set,
4929 since a death note before that remains valid. */
4930 for (prev = PREV_INSN (insn);
4931 prev && GET_CODE (prev) != CODE_LABEL;
4932 prev = PREV_INSN (prev))
4933 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
4934 && dead_or_set_p (prev, reload_reg_rtx[j]))
4935 {
4936 if (find_regno_note (prev, REG_DEAD,
4937 REGNO (reload_reg_rtx[j])))
4938 remove_death (REGNO (reload_reg_rtx[j]), prev);
4939 break;
4940 }
4941 }
4942 }
4943
4944 /* We might have used find_equiv_reg above to choose an alternate
4945 place from which to reload. If so, and it died, we need to remove
4946 that death and move it to one of the insns we just made. */
4947
4948 if (oldequiv_reg != 0
4949 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
4950 {
4951 rtx prev, prev1;
4952
4953 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
4954 prev = PREV_INSN (prev))
4955 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
4956 && dead_or_set_p (prev, oldequiv_reg))
4957 {
4958 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
4959 {
4960 for (prev1 = this_reload_insn;
4961 prev1; prev1 = PREV_INSN (prev1))
4962 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
4963 && reg_overlap_mentioned_p (oldequiv_reg,
4964 PATTERN (prev1)))
4965 {
4966 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
4967 oldequiv_reg,
4968 REG_NOTES (prev1));
4969 break;
4970 }
4971 remove_death (REGNO (oldequiv_reg), prev);
4972 }
4973 break;
4974 }
4975 }
4976#endif
4977
4978 /* If we are reloading a register that was recently stored in with an
4979 output-reload, see if we can prove there was
4980 actually no need to store the old value in it. */
4981
4982 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
4983 /* This is unsafe if some other reload uses the same reg first. */
4984 && (reload_when_needed[j] == RELOAD_OTHER
4985 || reload_when_needed[j] == RELOAD_FOR_INPUT
4986 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS)
4987 && GET_CODE (reload_in[j]) == REG
4988#if 0
4989 /* There doesn't seem to be any reason to restrict this to pseudos
4990 and doing so loses in the case where we are copying from a
4991 register of the wrong class. */
4992 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
4993#endif
4994 && spill_reg_store[reload_spill_index[j]] != 0
4995 && dead_or_set_p (insn, reload_in[j])
4996 /* This is unsafe if operand occurs more than once in current
4997 insn. Perhaps some occurrences weren't reloaded. */
4998 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
4999 delete_output_reload (insn, j,
5000 spill_reg_store[reload_spill_index[j]]);
5001
5002 /* Input-reloading is done. Now do output-reloading,
5003 storing the value from the reload-register after the main insn
5004 if reload_out[j] is nonzero.
5005
5006 ??? At some point we need to support handling output reloads of
5007 JUMP_INSNs or insns that set cc0. */
5008 old = reload_out[j];
5009 if (old != 0
5010 && reload_reg_rtx[j] != old
5011 && reload_reg_rtx[j] != 0)
5012 {
5013 register rtx reloadreg = reload_reg_rtx[j];
5014 register rtx second_reloadreg = 0;
5015 rtx prev_insn = PREV_INSN (first_output_reload_insn);
5016 rtx note, p;
5017 enum machine_mode mode;
5018 int special = 0;
5019
5020 /* An output operand that dies right away does need a reload,
5021 but need not be copied from it. Show the new location in the
5022 REG_UNUSED note. */
5023 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5024 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5025 {
5026 XEXP (note, 0) = reload_reg_rtx[j];
5027 continue;
5028 }
5029 else if (GET_CODE (old) == SCRATCH)
5030 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5031 but we don't want to make an output reload. */
5032 continue;
5033
5034#if 0
5035 /* Strip off of OLD any size-increasing SUBREGs such as
5036 (SUBREG:SI foo:QI 0). */
5037
5038 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5039 && (GET_MODE_SIZE (GET_MODE (old))
5040 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5041 old = SUBREG_REG (old);
5042#endif
5043
5044 /* If is a JUMP_INSN, we can't support output reloads yet. */
5045 if (GET_CODE (insn) == JUMP_INSN)
5046 abort ();
5047
5048 /* Determine the mode to reload in.
5049 See comments above (for input reloading). */
5050
5051 mode = GET_MODE (old);
5052 if (mode == VOIDmode)
5053 abort (); /* Should never happen for an output. */
5054
5055 /* A strict-low-part output operand needs to be reloaded
5056 in the mode of the entire value. */
5057 if (reload_strict_low[j])
5058 {
5059 mode = GET_MODE (SUBREG_REG (reload_out[j]));
5060 /* Encapsulate OLD into that mode. */
5061 /* If OLD is a subreg, then strip it, since the subreg will
5062 be altered by this very reload. */
5063 while (GET_CODE (old) == SUBREG && GET_MODE (old) != mode)
5064 old = SUBREG_REG (old);
5065 if (GET_MODE (old) != VOIDmode
5066 && mode != GET_MODE (old))
5067 old = gen_rtx (SUBREG, mode, old, 0);
5068 }
5069
5070 if (GET_MODE (reloadreg) != mode)
5071 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5072
5073#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5074
5075 /* If we need two reload regs, set RELOADREG to the intermediate
5076 one, since it will be stored into OUT. We might need a secondary
5077 register only for an input reload, so check again here. */
5078
5079 if (reload_secondary_reload[j] >= 0
5080 && (SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5081 mode, old)
5082 != NO_REGS))
5083 {
5084 second_reloadreg = reloadreg;
5085 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
5086
5087 /* See if RELOADREG is to be used as a scratch register
5088 or as an intermediate register. */
5089 if (reload_secondary_icode[j] != CODE_FOR_nothing)
5090 {
5091 emit_insn_before ((GEN_FCN (reload_secondary_icode[j])
5092 (old, second_reloadreg, reloadreg)),
5093 first_output_reload_insn);
5094 special = 1;
5095 }
5096 else
5097 {
5098 /* See if we need both a scratch and intermediate reload
5099 register. */
5100 int secondary_reload = reload_secondary_reload[j];
5101 enum insn_code tertiary_icode
5102 = reload_secondary_icode[secondary_reload];
5103 rtx pat;
5104
5105 if (GET_MODE (reloadreg) != mode)
5106 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5107
5108 if (tertiary_icode != CODE_FOR_nothing)
5109 {
5110 rtx third_reloadreg
5111 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5112 pat = (GEN_FCN (tertiary_icode)
5113 (reloadreg, second_reloadreg, third_reloadreg));
5114 }
5115 else
5116 pat = gen_move_insn (reloadreg, second_reloadreg);
5117
5118 emit_insn_before (pat, first_output_reload_insn);
5119 }
5120 }
5121#endif
5122
5123 /* Output the last reload insn. */
5124 if (! special)
5125 emit_insn_before (gen_move_insn (old, reloadreg),
5126 first_output_reload_insn);
5127
5128#ifdef PRESERVE_DEATH_INFO_REGNO_P
5129 /* If final will look at death notes for this reg,
5130 put one on the last output-reload insn to use it. Similarly
5131 for any secondary register. */
5132 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
5133 for (p = PREV_INSN (first_output_reload_insn);
5134 p != prev_insn; p = PREV_INSN (p))
5135 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
5136 && reg_overlap_mentioned_p (reloadreg, PATTERN (p)))
5137 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5138 reloadreg, REG_NOTES (p));
5139
5140#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5141 if (! special
5142 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
5143 for (p = PREV_INSN (first_output_reload_insn);
5144 p != prev_insn; p = PREV_INSN (p))
5145 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
5146 && reg_overlap_mentioned_p (second_reloadreg, PATTERN (p)))
5147 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5148 second_reloadreg, REG_NOTES (p));
5149#endif
5150#endif
5151 /* Look at all insns we emitted, just to be safe. */
5152 for (p = NEXT_INSN (prev_insn); p != first_output_reload_insn;
5153 p = NEXT_INSN (p))
5154 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
5155 {
5156 /* If this output reload doesn't come from a spill reg,
5157 clear any memory of reloaded copies of the pseudo reg.
5158 If this output reload comes from a spill reg,
5159 reg_has_output_reload will make this do nothing. */
5160 note_stores (PATTERN (p), forget_old_reloads_1);
5161
5162 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
5163 store_insn = p;
5164 }
5165
5166 first_output_reload_insn = NEXT_INSN (prev_insn);
5167 }
5168
5169 if (reload_spill_index[j] >= 0)
5170 new_spill_reg_store[reload_spill_index[j]] = store_insn;
5171 }
5172
32131a9c
RK
5173 /* Move death notes from INSN
5174 to output-operand-address and output reload insns. */
5175#ifdef PRESERVE_DEATH_INFO_REGNO_P
5176 {
5177 rtx insn1;
5178 /* Loop over those insns, last ones first. */
5179 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
5180 insn1 = PREV_INSN (insn1))
5181 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
5182 {
5183 rtx source = SET_SRC (PATTERN (insn1));
5184 rtx dest = SET_DEST (PATTERN (insn1));
5185
5186 /* The note we will examine next. */
5187 rtx reg_notes = REG_NOTES (insn);
5188 /* The place that pointed to this note. */
5189 rtx *prev_reg_note = &REG_NOTES (insn);
5190
5191 /* If the note is for something used in the source of this
5192 reload insn, or in the output address, move the note. */
5193 while (reg_notes)
5194 {
5195 rtx next_reg_notes = XEXP (reg_notes, 1);
5196 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
5197 && GET_CODE (XEXP (reg_notes, 0)) == REG
5198 && ((GET_CODE (dest) != REG
5199 && reg_overlap_mentioned_p (XEXP (reg_notes, 0), dest))
5200 || reg_overlap_mentioned_p (XEXP (reg_notes, 0), source)))
5201 {
5202 *prev_reg_note = next_reg_notes;
5203 XEXP (reg_notes, 1) = REG_NOTES (insn1);
5204 REG_NOTES (insn1) = reg_notes;
5205 }
5206 else
5207 prev_reg_note = &XEXP (reg_notes, 1);
5208
5209 reg_notes = next_reg_notes;
5210 }
5211 }
5212 }
5213#endif
5214
5215 /* For all the spill regs newly reloaded in this instruction,
5216 record what they were reloaded from, so subsequent instructions
d445b551
RK
5217 can inherit the reloads.
5218
5219 Update spill_reg_store for the reloads of this insn.
e9e79d69 5220 Copy the elements that were updated in the loop above. */
32131a9c
RK
5221
5222 for (j = 0; j < n_reloads; j++)
5223 {
5224 register int r = reload_order[j];
5225 register int i = reload_spill_index[r];
5226
5227 /* I is nonneg if this reload used one of the spill regs.
5228 If reload_reg_rtx[r] is 0, this is an optional reload
5229 that we opted to ignore. */
d445b551 5230
32131a9c
RK
5231 if (i >= 0 && reload_reg_rtx[r] != 0)
5232 {
5233 /* First, clear out memory of what used to be in this spill reg.
5234 If consecutive registers are used, clear them all. */
5235 int nr
5236 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
5237 int k;
5238
5239 for (k = 0; k < nr; k++)
5240 {
5241 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
5242 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
5243 }
5244
5245 /* Maybe the spill reg contains a copy of reload_out. */
5246 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5247 {
5248 register int nregno = REGNO (reload_out[r]);
d445b551
RK
5249
5250 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 5251 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 5252
32131a9c
RK
5253 for (k = 0; k < nr; k++)
5254 {
5255 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5256 = nregno;
5257 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
5258 }
5259 }
d445b551 5260
32131a9c
RK
5261 /* Maybe the spill reg contains a copy of reload_in. */
5262 else if (reload_out[r] == 0
5263 && reload_in[r] != 0
5264 && (GET_CODE (reload_in[r]) == REG
5265 || GET_CODE (reload_in_reg[r]) == REG))
5266 {
5267 register int nregno;
5268 if (GET_CODE (reload_in[r]) == REG)
5269 nregno = REGNO (reload_in[r]);
5270 else
5271 nregno = REGNO (reload_in_reg[r]);
5272
5273 /* If there are two separate reloads (one in and one out)
5274 for the same (hard or pseudo) reg,
a8fdc208 5275 leave reg_last_reload_reg set
32131a9c
RK
5276 based on the output reload.
5277 Otherwise, set it from this input reload. */
5278 if (!reg_has_output_reload[nregno]
5279 /* But don't do so if another input reload
5280 will clobber this one's value. */
5281 && reload_reg_reaches_end_p (spill_regs[i],
5282 reload_when_needed[r]))
5283 {
5284 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551
RK
5285
5286 /* Unless we inherited this reload, show we haven't
5287 recently done a store. */
5288 if (! reload_inherited[r])
5289 spill_reg_store[i] = 0;
5290
32131a9c
RK
5291 for (k = 0; k < nr; k++)
5292 {
5293 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5294 = nregno;
5295 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
5296 = insn;
5297 }
5298 }
5299 }
5300 }
5301
5302 /* The following if-statement was #if 0'd in 1.34 (or before...).
5303 It's reenabled in 1.35 because supposedly nothing else
5304 deals with this problem. */
5305
5306 /* If a register gets output-reloaded from a non-spill register,
5307 that invalidates any previous reloaded copy of it.
5308 But forget_old_reloads_1 won't get to see it, because
5309 it thinks only about the original insn. So invalidate it here. */
5310 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5311 {
5312 register int nregno = REGNO (reload_out[r]);
5313 reg_last_reload_reg[nregno] = 0;
5314 }
5315 }
5316}
5317\f
5318/* Emit code before BEFORE_INSN to perform an input reload of IN to RELOADREG.
d445b551 5319 Returns first insn emitted. */
32131a9c
RK
5320
5321rtx
5322gen_input_reload (reloadreg, in, before_insn)
5323 rtx reloadreg;
5324 rtx in;
5325 rtx before_insn;
5326{
5327 register rtx prev_insn = PREV_INSN (before_insn);
5328
a8fdc208 5329 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
5330 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
5331 register that didn't get a hard register. In that case we can just
5332 call emit_move_insn.
5333
5334 We can also be asked to reload a PLUS that adds either two registers or
5335 a register and a constant or MEM. This can occur during frame pointer
5336 elimination. That case if handled by trying to emit a single insn
5337 to perform the add. If it is not valid, we use a two insn sequence.
5338
5339 Finally, we could be called to handle an 'o' constraint by putting
5340 an address into a register. In that case, we first try to do this
5341 with a named pattern of "reload_load_address". If no such pattern
5342 exists, we just emit a SET insn and hope for the best (it will normally
5343 be valid on machines that use 'o').
5344
5345 This entire process is made complex because reload will never
5346 process the insns we generate here and so we must ensure that
5347 they will fit their constraints and also by the fact that parts of
5348 IN might be being reloaded separately and replaced with spill registers.
5349 Because of this, we are, in some sense, just guessing the right approach
5350 here. The one listed above seems to work.
5351
5352 ??? At some point, this whole thing needs to be rethought. */
5353
5354 if (GET_CODE (in) == PLUS
5355 && GET_CODE (XEXP (in, 0)) == REG
5356 && (GET_CODE (XEXP (in, 1)) == REG
5357 || CONSTANT_P (XEXP (in, 1))
5358 || GET_CODE (XEXP (in, 1)) == MEM))
5359 {
5360 /* We need to compute the sum of what is either a register and a
5361 constant, a register and memory, or a hard register and a pseudo
5362 register and put it into the reload register. The best possible way
5363 of doing this is if the machine has a three-operand ADD insn that
5364 accepts the required operands.
5365
5366 The simplest approach is to try to generate such an insn and see if it
5367 is recognized and matches its constraints. If so, it can be used.
5368
5369 It might be better not to actually emit the insn unless it is valid,
5370 but we need to pass the insn as an operand to `recog' and it is
5371 simpler to emit and then delete the insn if not valid than to
5372 dummy things up. */
a8fdc208 5373
32131a9c
RK
5374 rtx move_operand, other_operand, insn;
5375 int code;
a8fdc208 5376
32131a9c
RK
5377 /* Since constraint checking is strict, commutativity won't be
5378 checked, so we need to do that here to avoid spurious failure
5379 if the add instruction is two-address and the second operand
5380 of the add is the same as the reload reg, which is frequently
5381 the case. If the insn would be A = B + A, rearrange it so
5382 it will be A = A + B as constrain_operands expects. */
a8fdc208 5383
32131a9c
RK
5384 if (GET_CODE (XEXP (in, 1)) == REG
5385 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
5386 in = gen_rtx (PLUS, GET_MODE (in), XEXP (in, 1), XEXP (in, 0));
5387
5388 insn = emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in),
5389 before_insn);
5390 code = recog_memoized (insn);
5391
5392 if (code >= 0)
5393 {
5394 insn_extract (insn);
5395 /* We want constrain operands to treat this insn strictly in
5396 its validity determination, i.e., the way it would after reload
5397 has completed. */
5398 if (constrain_operands (code, 1))
5399 return insn;
5400 }
5401
5402 if (PREV_INSN (insn))
5403 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
5404 if (NEXT_INSN (insn))
5405 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
5406
5407 /* If that failed, we must use a conservative two-insn sequence.
5408 use move to copy constant, MEM, or pseudo register to the reload
5409 register since "move" will be able to handle arbitrary operand, unlike
5410 add which can't, in general. Then add the registers.
5411
5412 If there is another way to do this for a specific machine, a
5413 DEFINE_PEEPHOLE should be specified that recognizes the sequence
5414 we emit below. */
5415
5416 if (CONSTANT_P (XEXP (in, 1))
7e929166 5417 || GET_CODE (XEXP (in, 1)) == MEM
32131a9c
RK
5418 || (GET_CODE (XEXP (in, 1)) == REG
5419 && REGNO (XEXP (in, 1)) >= FIRST_PSEUDO_REGISTER))
5420 move_operand = XEXP (in, 1), other_operand = XEXP (in, 0);
5421 else
5422 move_operand = XEXP (in, 0), other_operand = XEXP (in, 1);
5423
5424 emit_insn_before (gen_move_insn (reloadreg, move_operand), before_insn);
5425 emit_insn_before (gen_add2_insn (reloadreg, other_operand), before_insn);
5426 }
5427
5428 /* If IN is a simple operand, use gen_move_insn. */
5429 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
5430 emit_insn_before (gen_move_insn (reloadreg, in), before_insn);
5431
5432#ifdef HAVE_reload_load_address
5433 else if (HAVE_reload_load_address)
5434 emit_insn_before (gen_reload_load_address (reloadreg, in), before_insn);
5435#endif
5436
5437 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
5438 else
5439 emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in), before_insn);
5440
5441 /* Return the first insn emitted.
5442 We can not just return PREV_INSN (before_insn), because there may have
5443 been multiple instructions emitted. Also note that gen_move_insn may
5444 emit more than one insn itself, so we can not assume that there is one
5445 insn emitted per emit_insn_before call. */
5446
5447 return NEXT_INSN (prev_insn);
5448}
5449\f
5450/* Delete a previously made output-reload
5451 whose result we now believe is not needed.
5452 First we double-check.
5453
5454 INSN is the insn now being processed.
5455 OUTPUT_RELOAD_INSN is the insn of the output reload.
5456 J is the reload-number for this insn. */
5457
5458static void
5459delete_output_reload (insn, j, output_reload_insn)
5460 rtx insn;
5461 int j;
5462 rtx output_reload_insn;
5463{
5464 register rtx i1;
5465
5466 /* Get the raw pseudo-register referred to. */
5467
5468 rtx reg = reload_in[j];
5469 while (GET_CODE (reg) == SUBREG)
5470 reg = SUBREG_REG (reg);
5471
5472 /* If the pseudo-reg we are reloading is no longer referenced
5473 anywhere between the store into it and here,
5474 and no jumps or labels intervene, then the value can get
5475 here through the reload reg alone.
5476 Otherwise, give up--return. */
5477 for (i1 = NEXT_INSN (output_reload_insn);
5478 i1 != insn; i1 = NEXT_INSN (i1))
5479 {
5480 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
5481 return;
5482 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
5483 && reg_mentioned_p (reg, PATTERN (i1)))
5484 return;
5485 }
5486
5487 /* If this insn will store in the pseudo again,
5488 the previous store can be removed. */
5489 if (reload_out[j] == reload_in[j])
5490 delete_insn (output_reload_insn);
5491
5492 /* See if the pseudo reg has been completely replaced
5493 with reload regs. If so, delete the store insn
5494 and forget we had a stack slot for the pseudo. */
5495 else if (reg_n_deaths[REGNO (reg)] == 1
5496 && reg_basic_block[REGNO (reg)] >= 0
5497 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
5498 {
5499 rtx i2;
5500
5501 /* We know that it was used only between here
5502 and the beginning of the current basic block.
5503 (We also know that the last use before INSN was
5504 the output reload we are thinking of deleting, but never mind that.)
5505 Search that range; see if any ref remains. */
5506 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5507 {
d445b551
RK
5508 rtx set = single_set (i2);
5509
32131a9c
RK
5510 /* Uses which just store in the pseudo don't count,
5511 since if they are the only uses, they are dead. */
d445b551 5512 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5513 continue;
5514 if (GET_CODE (i2) == CODE_LABEL
5515 || GET_CODE (i2) == JUMP_INSN)
5516 break;
5517 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
5518 && reg_mentioned_p (reg, PATTERN (i2)))
5519 /* Some other ref remains;
5520 we can't do anything. */
5521 return;
5522 }
5523
5524 /* Delete the now-dead stores into this pseudo. */
5525 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5526 {
d445b551
RK
5527 rtx set = single_set (i2);
5528
5529 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5530 delete_insn (i2);
5531 if (GET_CODE (i2) == CODE_LABEL
5532 || GET_CODE (i2) == JUMP_INSN)
5533 break;
5534 }
5535
5536 /* For the debugging info,
5537 say the pseudo lives in this reload reg. */
5538 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
5539 alter_reg (REGNO (reg), -1);
5540 }
5541}
5542
5543\f
a8fdc208 5544/* Output reload-insns to reload VALUE into RELOADREG.
32131a9c
RK
5545 VALUE is a autoincrement or autodecrement RTX whose operand
5546 is a register or memory location;
5547 so reloading involves incrementing that location.
5548
5549 INC_AMOUNT is the number to increment or decrement by (always positive).
5550 This cannot be deduced from VALUE.
5551
5552 INSN is the insn before which the new insns should be emitted.
5553
5554 The return value is the first of the insns emitted. */
5555
5556static rtx
5557inc_for_reload (reloadreg, value, inc_amount, insn)
5558 rtx reloadreg;
5559 rtx value;
5560 int inc_amount;
5561 rtx insn;
5562{
5563 /* REG or MEM to be copied and incremented. */
5564 rtx incloc = XEXP (value, 0);
5565 /* Nonzero if increment after copying. */
5566 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
5567
5568 /* No hard register is equivalent to this register after
5569 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
5570 we could inc/dec that register as well (maybe even using it for
5571 the source), but I'm not sure it's worth worrying about. */
5572 if (GET_CODE (incloc) == REG)
5573 reg_last_reload_reg[REGNO (incloc)] = 0;
5574
5575 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
5576 inc_amount = - inc_amount;
5577
5578 /* First handle preincrement, which is simpler. */
5579 if (! post)
5580 {
5581 /* If incrementing a register, assume we can
5582 output an insn to increment it directly. */
5583 if (GET_CODE (incloc) == REG &&
5584 (REGNO (incloc) < FIRST_PSEUDO_REGISTER
5585 || reg_renumber[REGNO (incloc)] >= 0))
5586 {
5587 rtx first_new
5588 = emit_insn_before (gen_add2_insn (incloc,
5589 gen_rtx (CONST_INT, VOIDmode,
5590 inc_amount)),
5591 insn);
5592 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5593 return first_new;
5594 }
5595 else
5596 /* Else we must not assume we can increment the location directly
5597 (even though on many target machines we can);
5598 copy it to the reload register, increment there, then save back. */
5599 {
5600 rtx first_new
5601 = emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5602 emit_insn_before (gen_add2_insn (reloadreg,
5603 gen_rtx (CONST_INT, VOIDmode,
5604 inc_amount)),
5605 insn);
5606 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
5607 return first_new;
5608 }
5609 }
5610 /* Postincrement.
5611 Because this might be a jump insn or a compare, and because RELOADREG
5612 may not be available after the insn in an input reload,
5613 we must do the incrementation before the insn being reloaded for. */
5614 else
5615 {
5616 /* Copy the value, then increment it. */
5617 rtx first_new
5618 = emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5619
5620 /* If incrementing a register, assume we can
5621 output an insn to increment it directly. */
5622 if (GET_CODE (incloc) == REG &&
5623 (REGNO (incloc) < FIRST_PSEUDO_REGISTER
5624 || reg_renumber[REGNO (incloc)] >= 0))
5625 {
5626 emit_insn_before (gen_add2_insn (incloc,
5627 gen_rtx (CONST_INT, VOIDmode,
5628 inc_amount)),
5629 insn);
5630 }
5631 else
5632 /* Else we must not assume we can increment INCLOC
5633 (even though on many target machines we can);
5634 increment the copy in the reload register,
5635 save that back, then decrement the reload register
5636 so it has the original value. */
5637 {
5638 emit_insn_before (gen_add2_insn (reloadreg,
5639 gen_rtx (CONST_INT, VOIDmode,
5640 inc_amount)),
5641 insn);
5642 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
5643 emit_insn_before (gen_sub2_insn (reloadreg,
5644 gen_rtx (CONST_INT, VOIDmode,
5645 inc_amount)),
5646 insn);
5647 }
5648 return first_new;
5649 }
5650}
5651\f
5652/* Return 1 if we are certain that the constraint-string STRING allows
5653 the hard register REG. Return 0 if we can't be sure of this. */
5654
5655static int
5656constraint_accepts_reg_p (string, reg)
5657 char *string;
5658 rtx reg;
5659{
5660 int value = 0;
5661 int regno = true_regnum (reg);
5662 int c;
5663
5664 /* Initialize for first alternative. */
5665 value = 0;
5666 /* Check that each alternative contains `g' or `r'. */
5667 while (1)
5668 switch (c = *string++)
5669 {
5670 case 0:
5671 /* If an alternative lacks `g' or `r', we lose. */
5672 return value;
5673 case ',':
5674 /* If an alternative lacks `g' or `r', we lose. */
5675 if (value == 0)
5676 return 0;
5677 /* Initialize for next alternative. */
5678 value = 0;
5679 break;
5680 case 'g':
5681 case 'r':
5682 /* Any general reg wins for this alternative. */
5683 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
5684 value = 1;
5685 break;
5686 default:
5687 /* Any reg in specified class wins for this alternative. */
5688 {
5689 int class = REG_CLASS_FROM_LETTER (c);
5690
5691 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno))
5692 value = 1;
5693 }
5694 }
5695}
5696\f
d445b551
RK
5697/* Return the number of places FIND appears within X, but don't count
5698 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
5699
5700static int
5701count_occurrences (x, find)
5702 register rtx x, find;
5703{
5704 register int i, j;
5705 register enum rtx_code code;
5706 register char *format_ptr;
5707 int count;
5708
5709 if (x == find)
5710 return 1;
5711 if (x == 0)
5712 return 0;
5713
5714 code = GET_CODE (x);
5715
5716 switch (code)
5717 {
5718 case REG:
5719 case QUEUED:
5720 case CONST_INT:
5721 case CONST_DOUBLE:
5722 case SYMBOL_REF:
5723 case CODE_LABEL:
5724 case PC:
5725 case CC0:
5726 return 0;
d445b551
RK
5727
5728 case SET:
5729 if (SET_DEST (x) == find)
5730 return count_occurrences (SET_SRC (x), find);
5731 break;
32131a9c
RK
5732 }
5733
5734 format_ptr = GET_RTX_FORMAT (code);
5735 count = 0;
5736
5737 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5738 {
5739 switch (*format_ptr++)
5740 {
5741 case 'e':
5742 count += count_occurrences (XEXP (x, i), find);
5743 break;
5744
5745 case 'E':
5746 if (XVEC (x, i) != NULL)
5747 {
5748 for (j = 0; j < XVECLEN (x, i); j++)
5749 count += count_occurrences (XVECEXP (x, i, j), find);
5750 }
5751 break;
5752 }
5753 }
5754 return count;
5755}
This page took 0.565317 seconds and 5 git commands to generate.