]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
Get alloca using ALLOCA, not libucb.a.
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
a8efe40d 2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69\f
70/* During reload_as_needed, element N contains a REG rtx for the hard reg
71 into which pseudo reg N has been reloaded (perhaps for a previous insn). */
72static rtx *reg_last_reload_reg;
73
74/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
75 for an output reload that stores into reg N. */
76static char *reg_has_output_reload;
77
78/* Indicates which hard regs are reload-registers for an output reload
79 in the current insn. */
80static HARD_REG_SET reg_is_output_reload;
81
82/* Element N is the constant value to which pseudo reg N is equivalent,
83 or zero if pseudo reg N is not equivalent to a constant.
84 find_reloads looks at this in order to replace pseudo reg N
85 with the constant it stands for. */
86rtx *reg_equiv_constant;
87
88/* Element N is a memory location to which pseudo reg N is equivalent,
89 prior to any register elimination (such as frame pointer to stack
90 pointer). Depending on whether or not it is a valid address, this value
91 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 92rtx *reg_equiv_memory_loc;
32131a9c
RK
93
94/* Element N is the address of stack slot to which pseudo reg N is equivalent.
95 This is used when the address is not valid as a memory address
96 (because its displacement is too big for the machine.) */
97rtx *reg_equiv_address;
98
99/* Element N is the memory slot to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a memory slot. */
101rtx *reg_equiv_mem;
102
103/* Widest width in which each pseudo reg is referred to (via subreg). */
104static int *reg_max_ref_width;
105
106/* Element N is the insn that initialized reg N from its equivalent
107 constant or memory slot. */
108static rtx *reg_equiv_init;
109
110/* During reload_as_needed, element N contains the last pseudo regno
111 reloaded into the Nth reload register. This vector is in parallel
112 with spill_regs. If that pseudo reg occupied more than one register,
113 reg_reloaded_contents points to that pseudo for each spill register in
114 use; all of these must remain set for an inheritance to occur. */
115static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
116
117/* During reload_as_needed, element N contains the insn for which
118 the Nth reload register was last used. This vector is in parallel
119 with spill_regs, and its contents are significant only when
120 reg_reloaded_contents is significant. */
121static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
122
123/* Number of spill-regs so far; number of valid elements of spill_regs. */
124static int n_spills;
125
126/* In parallel with spill_regs, contains REG rtx's for those regs.
127 Holds the last rtx used for any given reg, or 0 if it has never
128 been used for spilling yet. This rtx is reused, provided it has
129 the proper mode. */
130static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
131
132/* In parallel with spill_regs, contains nonzero for a spill reg
133 that was stored after the last time it was used.
134 The precise value is the insn generated to do the store. */
135static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
136
137/* This table is the inverse mapping of spill_regs:
138 indexed by hard reg number,
139 it contains the position of that reg in spill_regs,
140 or -1 for something that is not in spill_regs. */
141static short spill_reg_order[FIRST_PSEUDO_REGISTER];
142
143/* This reg set indicates registers that may not be used for retrying global
144 allocation. The registers that may not be used include all spill registers
145 and the frame pointer (if we are using one). */
146HARD_REG_SET forbidden_regs;
147
148/* This reg set indicates registers that are not good for spill registers.
149 They will not be used to complete groups of spill registers. This includes
150 all fixed registers, registers that may be eliminated, and registers
151 explicitly used in the rtl.
152
153 (spill_reg_order prevents these registers from being used to start a
154 group.) */
155static HARD_REG_SET bad_spill_regs;
156
157/* Describes order of use of registers for reloading
158 of spilled pseudo-registers. `spills' is the number of
159 elements that are actually valid; new ones are added at the end. */
160static short spill_regs[FIRST_PSEUDO_REGISTER];
161
162/* Describes order of preference for putting regs into spill_regs.
163 Contains the numbers of all the hard regs, in order most preferred first.
164 This order is different for each function.
165 It is set up by order_regs_for_reload.
166 Empty elements at the end contain -1. */
167static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
168
169/* 1 for a hard register that appears explicitly in the rtl
170 (for example, function value registers, special registers
171 used by insns, structure value pointer registers). */
172static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
173
174/* Indicates if a register was counted against the need for
175 groups. 0 means it can count against max_nongroup instead. */
176static HARD_REG_SET counted_for_groups;
177
178/* Indicates if a register was counted against the need for
179 non-groups. 0 means it can become part of a new group.
180 During choose_reload_regs, 1 here means don't use this reg
181 as part of a group, even if it seems to be otherwise ok. */
182static HARD_REG_SET counted_for_nongroups;
183
184/* Nonzero if indirect addressing is supported on the machine; this means
185 that spilling (REG n) does not require reloading it into a register in
186 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
187 value indicates the level of indirect addressing supported, e.g., two
188 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
189 a hard register. */
190
191static char spill_indirect_levels;
192
193/* Nonzero if indirect addressing is supported when the innermost MEM is
194 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
195 which these are valid is the same as spill_indirect_levels, above. */
196
197char indirect_symref_ok;
198
199/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
200
201char double_reg_address_ok;
202
203/* Record the stack slot for each spilled hard register. */
204
205static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
206
207/* Width allocated so far for that stack slot. */
208
209static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
210
211/* Indexed by register class and basic block number, nonzero if there is
212 any need for a spill register of that class in that basic block.
213 The pointer is 0 if we did stupid allocation and don't know
214 the structure of basic blocks. */
215
216char *basic_block_needs[N_REG_CLASSES];
217
218/* First uid used by insns created by reload in this function.
219 Used in find_equiv_reg. */
220int reload_first_uid;
221
222/* Flag set by local-alloc or global-alloc if anything is live in
223 a call-clobbered reg across calls. */
224
225int caller_save_needed;
226
227/* Set to 1 while reload_as_needed is operating.
228 Required by some machines to handle any generated moves differently. */
229
230int reload_in_progress = 0;
231
232/* These arrays record the insn_code of insns that may be needed to
233 perform input and output reloads of special objects. They provide a
234 place to pass a scratch register. */
235
236enum insn_code reload_in_optab[NUM_MACHINE_MODES];
237enum insn_code reload_out_optab[NUM_MACHINE_MODES];
238
d45cf215 239/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
240 The allocated storage can be freed once find_reloads has processed the
241 insn. */
242
243struct obstack reload_obstack;
244char *reload_firstobj;
245
246#define obstack_chunk_alloc xmalloc
247#define obstack_chunk_free free
248
32131a9c
RK
249/* List of labels that must never be deleted. */
250extern rtx forced_labels;
251\f
252/* This structure is used to record information about register eliminations.
253 Each array entry describes one possible way of eliminating a register
254 in favor of another. If there is more than one way of eliminating a
255 particular register, the most preferred should be specified first. */
256
257static struct elim_table
258{
259 int from; /* Register number to be eliminated. */
260 int to; /* Register number used as replacement. */
261 int initial_offset; /* Initial difference between values. */
262 int can_eliminate; /* Non-zero if this elimination can be done. */
263 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
264 insns made by reload. */
265 int offset; /* Current offset between the two regs. */
a8efe40d 266 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
267 int previous_offset; /* Offset at end of previous insn. */
268 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
269 rtx from_rtx; /* REG rtx for the register to be eliminated.
270 We cannot simply compare the number since
271 we might then spuriously replace a hard
272 register corresponding to a pseudo
273 assigned to the reg to be eliminated. */
274 rtx to_rtx; /* REG rtx for the replacement. */
275} reg_eliminate[] =
276
277/* If a set of eliminable registers was specified, define the table from it.
278 Otherwise, default to the normal case of the frame pointer being
279 replaced by the stack pointer. */
280
281#ifdef ELIMINABLE_REGS
282 ELIMINABLE_REGS;
283#else
284 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
285#endif
286
287#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
288
289/* Record the number of pending eliminations that have an offset not equal
290 to their initial offset. If non-zero, we use a new copy of each
291 replacement result in any insns encountered. */
292static int num_not_at_initial_offset;
293
294/* Count the number of registers that we may be able to eliminate. */
295static int num_eliminable;
296
297/* For each label, we record the offset of each elimination. If we reach
298 a label by more than one path and an offset differs, we cannot do the
299 elimination. This information is indexed by the number of the label.
300 The first table is an array of flags that records whether we have yet
301 encountered a label and the second table is an array of arrays, one
302 entry in the latter array for each elimination. */
303
304static char *offsets_known_at;
305static int (*offsets_at)[NUM_ELIMINABLE_REGS];
306
307/* Number of labels in the current function. */
308
309static int num_labels;
310\f
311void mark_home_live ();
312static void count_possible_groups ();
313static int possible_group_p ();
314static void scan_paradoxical_subregs ();
315static void reload_as_needed ();
316static int modes_equiv_for_class_p ();
317static void alter_reg ();
318static void delete_dead_insn ();
5352b11a 319static void spill_failure ();
32131a9c
RK
320static int new_spill_reg();
321static void set_label_offsets ();
322static int eliminate_regs_in_insn ();
323static void mark_not_eliminable ();
324static int spill_hard_reg ();
325static void choose_reload_regs ();
326static void emit_reload_insns ();
327static void delete_output_reload ();
328static void forget_old_reloads_1 ();
329static void order_regs_for_reload ();
330static rtx inc_for_reload ();
331static int constraint_accepts_reg_p ();
332static int count_occurrences ();
333
334extern void remove_death ();
335extern rtx adj_offsettable_operand ();
336extern rtx form_sum ();
337\f
338void
339init_reload ()
340{
341 register int i;
342
343 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
344 Set spill_indirect_levels to the number of levels such addressing is
345 permitted, zero if it is not permitted at all. */
346
347 register rtx tem
348 = gen_rtx (MEM, Pmode,
349 gen_rtx (PLUS, Pmode,
350 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 351 GEN_INT (4)));
32131a9c
RK
352 spill_indirect_levels = 0;
353
354 while (memory_address_p (QImode, tem))
355 {
356 spill_indirect_levels++;
357 tem = gen_rtx (MEM, Pmode, tem);
358 }
359
360 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
361
362 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
363 indirect_symref_ok = memory_address_p (QImode, tem);
364
365 /* See if reg+reg is a valid (and offsettable) address. */
366
65701fd2 367 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
368 {
369 tem = gen_rtx (PLUS, Pmode,
370 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
371 gen_rtx (REG, Pmode, i));
372 /* This way, we make sure that reg+reg is an offsettable address. */
373 tem = plus_constant (tem, 4);
374
375 if (memory_address_p (QImode, tem))
376 {
377 double_reg_address_ok = 1;
378 break;
379 }
380 }
32131a9c
RK
381
382 /* Initialize obstack for our rtl allocation. */
383 gcc_obstack_init (&reload_obstack);
384 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
385
386#ifdef HAVE_SECONDARY_RELOADS
387
388 /* Initialize the optabs for doing special input and output reloads. */
389
390 for (i = 0; i < NUM_MACHINE_MODES; i++)
391 reload_in_optab[i] = reload_out_optab[i] = CODE_FOR_nothing;
392
393#ifdef HAVE_reload_inqi
394 if (HAVE_reload_inqi)
395 reload_in_optab[(int) QImode] = CODE_FOR_reload_inqi;
396#endif
397#ifdef HAVE_reload_inhi
398 if (HAVE_reload_inhi)
399 reload_in_optab[(int) HImode] = CODE_FOR_reload_inhi;
400#endif
401#ifdef HAVE_reload_insi
402 if (HAVE_reload_insi)
403 reload_in_optab[(int) SImode] = CODE_FOR_reload_insi;
404#endif
405#ifdef HAVE_reload_indi
406 if (HAVE_reload_indi)
407 reload_in_optab[(int) DImode] = CODE_FOR_reload_indi;
408#endif
409#ifdef HAVE_reload_inti
410 if (HAVE_reload_inti)
411 reload_in_optab[(int) TImode] = CODE_FOR_reload_inti;
412#endif
413#ifdef HAVE_reload_insf
414 if (HAVE_reload_insf)
415 reload_in_optab[(int) SFmode] = CODE_FOR_reload_insf;
416#endif
417#ifdef HAVE_reload_indf
418 if (HAVE_reload_indf)
419 reload_in_optab[(int) DFmode] = CODE_FOR_reload_indf;
420#endif
421#ifdef HAVE_reload_inxf
422 if (HAVE_reload_inxf)
423 reload_in_optab[(int) XFmode] = CODE_FOR_reload_inxf;
424#endif
425#ifdef HAVE_reload_intf
426 if (HAVE_reload_intf)
427 reload_in_optab[(int) TFmode] = CODE_FOR_reload_intf;
428#endif
429
430#ifdef HAVE_reload_outqi
431 if (HAVE_reload_outqi)
432 reload_out_optab[(int) QImode] = CODE_FOR_reload_outqi;
433#endif
434#ifdef HAVE_reload_outhi
435 if (HAVE_reload_outhi)
436 reload_out_optab[(int) HImode] = CODE_FOR_reload_outhi;
437#endif
438#ifdef HAVE_reload_outsi
439 if (HAVE_reload_outsi)
440 reload_out_optab[(int) SImode] = CODE_FOR_reload_outsi;
441#endif
442#ifdef HAVE_reload_outdi
443 if (HAVE_reload_outdi)
444 reload_out_optab[(int) DImode] = CODE_FOR_reload_outdi;
445#endif
446#ifdef HAVE_reload_outti
447 if (HAVE_reload_outti)
448 reload_out_optab[(int) TImode] = CODE_FOR_reload_outti;
449#endif
450#ifdef HAVE_reload_outsf
451 if (HAVE_reload_outsf)
452 reload_out_optab[(int) SFmode] = CODE_FOR_reload_outsf;
453#endif
454#ifdef HAVE_reload_outdf
455 if (HAVE_reload_outdf)
456 reload_out_optab[(int) DFmode] = CODE_FOR_reload_outdf;
457#endif
458#ifdef HAVE_reload_outxf
459 if (HAVE_reload_outxf)
460 reload_out_optab[(int) XFmode] = CODE_FOR_reload_outxf;
461#endif
462#ifdef HAVE_reload_outtf
463 if (HAVE_reload_outtf)
464 reload_out_optab[(int) TFmode] = CODE_FOR_reload_outtf;
465#endif
466
467#endif /* HAVE_SECONDARY_RELOADS */
468
469}
470
471/* Main entry point for the reload pass, and only entry point
472 in this file.
473
474 FIRST is the first insn of the function being compiled.
475
476 GLOBAL nonzero means we were called from global_alloc
477 and should attempt to reallocate any pseudoregs that we
478 displace from hard regs we will use for reloads.
479 If GLOBAL is zero, we do not have enough information to do that,
480 so any pseudo reg that is spilled must go to the stack.
481
482 DUMPFILE is the global-reg debugging dump file stream, or 0.
483 If it is nonzero, messages are written to it to describe
484 which registers are seized as reload regs, which pseudo regs
5352b11a 485 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 486
5352b11a
RS
487 Return value is nonzero if reload failed
488 and we must not do any more for this function. */
489
490int
32131a9c
RK
491reload (first, global, dumpfile)
492 rtx first;
493 int global;
494 FILE *dumpfile;
495{
496 register int class;
497 register int i;
498 register rtx insn;
499 register struct elim_table *ep;
500
501 int something_changed;
502 int something_needs_reloads;
503 int something_needs_elimination;
504 int new_basic_block_needs;
a8efe40d
RK
505 enum reg_class caller_save_spill_class = NO_REGS;
506 int caller_save_group_size = 1;
32131a9c 507
5352b11a
RS
508 /* Nonzero means we couldn't get enough spill regs. */
509 int failure = 0;
510
32131a9c
RK
511 /* The basic block number currently being processed for INSN. */
512 int this_block;
513
514 /* Make sure even insns with volatile mem refs are recognizable. */
515 init_recog ();
516
517 /* Enable find_equiv_reg to distinguish insns made by reload. */
518 reload_first_uid = get_max_uid ();
519
520 for (i = 0; i < N_REG_CLASSES; i++)
521 basic_block_needs[i] = 0;
522
0dadecf6
RK
523#ifdef SECONDARY_MEMORY_NEEDED
524 /* Initialize the secondary memory table. */
525 clear_secondary_mem ();
526#endif
527
32131a9c
RK
528 /* Remember which hard regs appear explicitly
529 before we merge into `regs_ever_live' the ones in which
530 pseudo regs have been allocated. */
531 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
532
533 /* We don't have a stack slot for any spill reg yet. */
534 bzero (spill_stack_slot, sizeof spill_stack_slot);
535 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
536
a8efe40d
RK
537 /* Initialize the save area information for caller-save, in case some
538 are needed. */
539 init_save_areas ();
a8fdc208 540
32131a9c
RK
541 /* Compute which hard registers are now in use
542 as homes for pseudo registers.
543 This is done here rather than (eg) in global_alloc
544 because this point is reached even if not optimizing. */
545
546 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
547 mark_home_live (i);
548
549 /* Make sure that the last insn in the chain
550 is not something that needs reloading. */
fb3821f7 551 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
552
553 /* Find all the pseudo registers that didn't get hard regs
554 but do have known equivalent constants or memory slots.
555 These include parameters (known equivalent to parameter slots)
556 and cse'd or loop-moved constant memory addresses.
557
558 Record constant equivalents in reg_equiv_constant
559 so they will be substituted by find_reloads.
560 Record memory equivalents in reg_mem_equiv so they can
561 be substituted eventually by altering the REG-rtx's. */
562
563 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
564 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
565 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
566 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
567 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
568 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
569 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
570 bzero (reg_equiv_init, max_regno * sizeof (rtx));
571 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
572 bzero (reg_equiv_address, max_regno * sizeof (rtx));
573 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
574 bzero (reg_max_ref_width, max_regno * sizeof (int));
575
576 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
577 Also find all paradoxical subregs
578 and find largest such for each pseudo. */
579
580 for (insn = first; insn; insn = NEXT_INSN (insn))
581 {
582 rtx set = single_set (insn);
583
584 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
585 {
fb3821f7 586 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
587 if (note
588#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 589 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
590 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
591#endif
592 )
32131a9c
RK
593 {
594 rtx x = XEXP (note, 0);
595 i = REGNO (SET_DEST (set));
596 if (i > LAST_VIRTUAL_REGISTER)
597 {
598 if (GET_CODE (x) == MEM)
599 reg_equiv_memory_loc[i] = x;
600 else if (CONSTANT_P (x))
601 {
602 if (LEGITIMATE_CONSTANT_P (x))
603 reg_equiv_constant[i] = x;
604 else
605 reg_equiv_memory_loc[i]
d445b551 606 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
607 }
608 else
609 continue;
610
611 /* If this register is being made equivalent to a MEM
612 and the MEM is not SET_SRC, the equivalencing insn
613 is one with the MEM as a SET_DEST and it occurs later.
614 So don't mark this insn now. */
615 if (GET_CODE (x) != MEM
616 || rtx_equal_p (SET_SRC (set), x))
617 reg_equiv_init[i] = insn;
618 }
619 }
620 }
621
622 /* If this insn is setting a MEM from a register equivalent to it,
623 this is the equivalencing insn. */
624 else if (set && GET_CODE (SET_DEST (set)) == MEM
625 && GET_CODE (SET_SRC (set)) == REG
626 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
627 && rtx_equal_p (SET_DEST (set),
628 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
629 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
630
631 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
632 scan_paradoxical_subregs (PATTERN (insn));
633 }
634
635 /* Does this function require a frame pointer? */
636
637 frame_pointer_needed = (! flag_omit_frame_pointer
638#ifdef EXIT_IGNORE_STACK
639 /* ?? If EXIT_IGNORE_STACK is set, we will not save
640 and restore sp for alloca. So we can't eliminate
641 the frame pointer in that case. At some point,
642 we should improve this by emitting the
643 sp-adjusting insns for this case. */
644 || (current_function_calls_alloca
645 && EXIT_IGNORE_STACK)
646#endif
647 || FRAME_POINTER_REQUIRED);
648
649 num_eliminable = 0;
650
651 /* Initialize the table of registers to eliminate. The way we do this
652 depends on how the eliminable registers were defined. */
653#ifdef ELIMINABLE_REGS
654 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
655 {
656 ep->can_eliminate = ep->can_eliminate_previous
657 = (CAN_ELIMINATE (ep->from, ep->to)
658 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
659 }
660#else
661 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
662 = ! frame_pointer_needed;
663#endif
664
665 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 666 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
667 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
668 We depend on this. */
669 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
670 {
671 num_eliminable += ep->can_eliminate;
672 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
673 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
674 }
675
676 num_labels = max_label_num () - get_first_label_num ();
677
678 /* Allocate the tables used to store offset information at labels. */
679 offsets_known_at = (char *) alloca (num_labels);
680 offsets_at
681 = (int (*)[NUM_ELIMINABLE_REGS])
682 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
683
684 offsets_known_at -= get_first_label_num ();
685 offsets_at -= get_first_label_num ();
686
687 /* Alter each pseudo-reg rtx to contain its hard reg number.
688 Assign stack slots to the pseudos that lack hard regs or equivalents.
689 Do not touch virtual registers. */
690
691 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
692 alter_reg (i, -1);
693
694 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
695 because the stack size may be a part of the offset computation for
696 register elimination. */
697 assign_stack_local (BLKmode, 0, 0);
698
699 /* If we have some registers we think can be eliminated, scan all insns to
700 see if there is an insn that sets one of these registers to something
701 other than itself plus a constant. If so, the register cannot be
702 eliminated. Doing this scan here eliminates an extra pass through the
703 main reload loop in the most common case where register elimination
704 cannot be done. */
705 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
706 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
707 || GET_CODE (insn) == CALL_INSN)
708 note_stores (PATTERN (insn), mark_not_eliminable);
709
710#ifndef REGISTER_CONSTRAINTS
711 /* If all the pseudo regs have hard regs,
712 except for those that are never referenced,
713 we know that no reloads are needed. */
714 /* But that is not true if there are register constraints, since
715 in that case some pseudos might be in the wrong kind of hard reg. */
716
717 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
718 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
719 break;
720
b8093d02 721 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
722 return;
723#endif
724
725 /* Compute the order of preference for hard registers to spill.
726 Store them by decreasing preference in potential_reload_regs. */
727
728 order_regs_for_reload ();
729
730 /* So far, no hard regs have been spilled. */
731 n_spills = 0;
732 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
733 spill_reg_order[i] = -1;
734
735 /* On most machines, we can't use any register explicitly used in the
736 rtl as a spill register. But on some, we have to. Those will have
737 taken care to keep the life of hard regs as short as possible. */
738
739#ifdef SMALL_REGISTER_CLASSES
740 CLEAR_HARD_REG_SET (forbidden_regs);
741#else
742 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
743#endif
744
745 /* Spill any hard regs that we know we can't eliminate. */
746 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
747 if (! ep->can_eliminate)
748 {
749 spill_hard_reg (ep->from, global, dumpfile, 1);
750 regs_ever_live[ep->from] = 1;
751 }
752
753 if (global)
754 for (i = 0; i < N_REG_CLASSES; i++)
755 {
756 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
757 bzero (basic_block_needs[i], n_basic_blocks);
758 }
759
b2f15f94
RK
760 /* From now on, we need to emit any moves without making new pseudos. */
761 reload_in_progress = 1;
762
32131a9c
RK
763 /* This loop scans the entire function each go-round
764 and repeats until one repetition spills no additional hard regs. */
765
d45cf215 766 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
767 to require another pass. Note that getting an additional reload
768 reg does not necessarily imply any pseudo reg was spilled;
769 sometimes we find a reload reg that no pseudo reg was allocated in. */
770 something_changed = 1;
771 /* This flag is set if there are any insns that require reloading. */
772 something_needs_reloads = 0;
773 /* This flag is set if there are any insns that require register
774 eliminations. */
775 something_needs_elimination = 0;
776 while (something_changed)
777 {
778 rtx after_call = 0;
779
780 /* For each class, number of reload regs needed in that class.
781 This is the maximum over all insns of the needs in that class
782 of the individual insn. */
783 int max_needs[N_REG_CLASSES];
784 /* For each class, size of group of consecutive regs
785 that is needed for the reloads of this class. */
786 int group_size[N_REG_CLASSES];
787 /* For each class, max number of consecutive groups needed.
788 (Each group contains group_size[CLASS] consecutive registers.) */
789 int max_groups[N_REG_CLASSES];
790 /* For each class, max number needed of regs that don't belong
791 to any of the groups. */
792 int max_nongroups[N_REG_CLASSES];
793 /* For each class, the machine mode which requires consecutive
794 groups of regs of that class.
795 If two different modes ever require groups of one class,
796 they must be the same size and equally restrictive for that class,
797 otherwise we can't handle the complexity. */
798 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
799 /* Record the insn where each maximum need is first found. */
800 rtx max_needs_insn[N_REG_CLASSES];
801 rtx max_groups_insn[N_REG_CLASSES];
802 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 803 rtx x;
0dadecf6 804 int starting_frame_size = get_frame_size ();
32131a9c
RK
805
806 something_changed = 0;
807 bzero (max_needs, sizeof max_needs);
808 bzero (max_groups, sizeof max_groups);
809 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
810 bzero (max_needs_insn, sizeof max_needs_insn);
811 bzero (max_groups_insn, sizeof max_groups_insn);
812 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
813 bzero (group_size, sizeof group_size);
814 for (i = 0; i < N_REG_CLASSES; i++)
815 group_mode[i] = VOIDmode;
816
817 /* Keep track of which basic blocks are needing the reloads. */
818 this_block = 0;
819
820 /* Remember whether any element of basic_block_needs
821 changes from 0 to 1 in this pass. */
822 new_basic_block_needs = 0;
823
824 /* Reset all offsets on eliminable registers to their initial values. */
825#ifdef ELIMINABLE_REGS
826 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
827 {
828 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
829 ep->previous_offset = ep->offset
830 = ep->max_offset = ep->initial_offset;
32131a9c
RK
831 }
832#else
833#ifdef INITIAL_FRAME_POINTER_OFFSET
834 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
835#else
836 if (!FRAME_POINTER_REQUIRED)
837 abort ();
838 reg_eliminate[0].initial_offset = 0;
839#endif
a8efe40d 840 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
841 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
842#endif
843
844 num_not_at_initial_offset = 0;
845
846 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
847
848 /* Set a known offset for each forced label to be at the initial offset
849 of each elimination. We do this because we assume that all
850 computed jumps occur from a location where each elimination is
851 at its initial offset. */
852
853 for (x = forced_labels; x; x = XEXP (x, 1))
854 if (XEXP (x, 0))
fb3821f7 855 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
856
857 /* For each pseudo register that has an equivalent location defined,
858 try to eliminate any eliminable registers (such as the frame pointer)
859 assuming initial offsets for the replacement register, which
860 is the normal case.
861
862 If the resulting location is directly addressable, substitute
863 the MEM we just got directly for the old REG.
864
865 If it is not addressable but is a constant or the sum of a hard reg
866 and constant, it is probably not addressable because the constant is
867 out of range, in that case record the address; we will generate
868 hairy code to compute the address in a register each time it is
a8fdc208 869 needed.
32131a9c
RK
870
871 If the location is not addressable, but does not have one of the
872 above forms, assign a stack slot. We have to do this to avoid the
873 potential of producing lots of reloads if, e.g., a location involves
874 a pseudo that didn't get a hard register and has an equivalent memory
875 location that also involves a pseudo that didn't get a hard register.
876
877 Perhaps at some point we will improve reload_when_needed handling
878 so this problem goes away. But that's very hairy. */
879
880 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
881 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
882 {
fb3821f7 883 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
884
885 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
886 XEXP (x, 0)))
887 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
888 else if (CONSTANT_P (XEXP (x, 0))
889 || (GET_CODE (XEXP (x, 0)) == PLUS
890 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
891 && (REGNO (XEXP (XEXP (x, 0), 0))
892 < FIRST_PSEUDO_REGISTER)
893 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
894 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
895 else
896 {
897 /* Make a new stack slot. Then indicate that something
a8fdc208 898 changed so we go back and recompute offsets for
32131a9c
RK
899 eliminable registers because the allocation of memory
900 below might change some offset. reg_equiv_{mem,address}
901 will be set up for this pseudo on the next pass around
902 the loop. */
903 reg_equiv_memory_loc[i] = 0;
904 reg_equiv_init[i] = 0;
905 alter_reg (i, -1);
906 something_changed = 1;
907 }
908 }
a8fdc208 909
d45cf215 910 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
911 bookkeeping. */
912 if (something_changed)
913 continue;
914
a8efe40d
RK
915 /* If caller-saves needs a group, initialize the group to include
916 the size and mode required for caller-saves. */
917
918 if (caller_save_group_size > 1)
919 {
920 group_mode[(int) caller_save_spill_class] = Pmode;
921 group_size[(int) caller_save_spill_class] = caller_save_group_size;
922 }
923
32131a9c
RK
924 /* Compute the most additional registers needed by any instruction.
925 Collect information separately for each class of regs. */
926
927 for (insn = first; insn; insn = NEXT_INSN (insn))
928 {
929 if (global && this_block + 1 < n_basic_blocks
930 && insn == basic_block_head[this_block+1])
931 ++this_block;
932
933 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
934 might include REG_LABEL), we need to see what effects this
935 has on the known offsets at labels. */
936
937 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
938 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
939 && REG_NOTES (insn) != 0))
940 set_label_offsets (insn, insn, 0);
941
942 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
943 {
944 /* Nonzero means don't use a reload reg that overlaps
945 the place where a function value can be returned. */
946 rtx avoid_return_reg = 0;
947
948 rtx old_body = PATTERN (insn);
949 int old_code = INSN_CODE (insn);
950 rtx old_notes = REG_NOTES (insn);
951 int did_elimination = 0;
952
953 /* Initially, count RELOAD_OTHER reloads.
954 Later, merge in the other kinds. */
955 int insn_needs[N_REG_CLASSES];
956 int insn_groups[N_REG_CLASSES];
957 int insn_total_groups = 0;
958
959 /* Count RELOAD_FOR_INPUT_RELOAD_ADDRESS reloads. */
960 int insn_needs_for_inputs[N_REG_CLASSES];
961 int insn_groups_for_inputs[N_REG_CLASSES];
962 int insn_total_groups_for_inputs = 0;
963
964 /* Count RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reloads. */
965 int insn_needs_for_outputs[N_REG_CLASSES];
966 int insn_groups_for_outputs[N_REG_CLASSES];
967 int insn_total_groups_for_outputs = 0;
968
969 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
970 int insn_needs_for_operands[N_REG_CLASSES];
971 int insn_groups_for_operands[N_REG_CLASSES];
972 int insn_total_groups_for_operands = 0;
973
32131a9c
RK
974#if 0 /* This wouldn't work nowadays, since optimize_bit_field
975 looks for non-strict memory addresses. */
976 /* Optimization: a bit-field instruction whose field
977 happens to be a byte or halfword in memory
978 can be changed to a move instruction. */
979
980 if (GET_CODE (PATTERN (insn)) == SET)
981 {
982 rtx dest = SET_DEST (PATTERN (insn));
983 rtx src = SET_SRC (PATTERN (insn));
984
985 if (GET_CODE (dest) == ZERO_EXTRACT
986 || GET_CODE (dest) == SIGN_EXTRACT)
987 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
988 if (GET_CODE (src) == ZERO_EXTRACT
989 || GET_CODE (src) == SIGN_EXTRACT)
990 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
991 }
992#endif
993
994 /* If needed, eliminate any eliminable registers. */
995 if (num_eliminable)
996 did_elimination = eliminate_regs_in_insn (insn, 0);
997
998#ifdef SMALL_REGISTER_CLASSES
999 /* Set avoid_return_reg if this is an insn
1000 that might use the value of a function call. */
1001 if (GET_CODE (insn) == CALL_INSN)
1002 {
1003 if (GET_CODE (PATTERN (insn)) == SET)
1004 after_call = SET_DEST (PATTERN (insn));
1005 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1006 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1007 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1008 else
1009 after_call = 0;
1010 }
1011 else if (after_call != 0
1012 && !(GET_CODE (PATTERN (insn)) == SET
1013 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1014 {
1015 if (reg_mentioned_p (after_call, PATTERN (insn)))
1016 avoid_return_reg = after_call;
1017 after_call = 0;
1018 }
1019#endif /* SMALL_REGISTER_CLASSES */
1020
1021 /* Analyze the instruction. */
1022 find_reloads (insn, 0, spill_indirect_levels, global,
1023 spill_reg_order);
1024
1025 /* Remember for later shortcuts which insns had any reloads or
1026 register eliminations.
1027
1028 One might think that it would be worthwhile to mark insns
1029 that need register replacements but not reloads, but this is
1030 not safe because find_reloads may do some manipulation of
1031 the insn (such as swapping commutative operands), which would
1032 be lost when we restore the old pattern after register
1033 replacement. So the actions of find_reloads must be redone in
1034 subsequent passes or in reload_as_needed.
1035
1036 However, it is safe to mark insns that need reloads
1037 but not register replacement. */
1038
1039 PUT_MODE (insn, (did_elimination ? QImode
1040 : n_reloads ? HImode
1041 : VOIDmode));
1042
1043 /* Discard any register replacements done. */
1044 if (did_elimination)
1045 {
1046 obstack_free (&reload_obstack, reload_firstobj);
1047 PATTERN (insn) = old_body;
1048 INSN_CODE (insn) = old_code;
1049 REG_NOTES (insn) = old_notes;
1050 something_needs_elimination = 1;
1051 }
1052
a8efe40d 1053 /* If this insn has no reloads, we need not do anything except
a8fdc208 1054 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1055 caller-save needs reloads. */
1056
1057 if (n_reloads == 0
1058 && ! (GET_CODE (insn) == CALL_INSN
1059 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1060 continue;
1061
1062 something_needs_reloads = 1;
1063
a8efe40d
RK
1064 for (i = 0; i < N_REG_CLASSES; i++)
1065 {
1066 insn_needs[i] = 0, insn_groups[i] = 0;
1067 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1068 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1069 insn_needs_for_operands[i] = 0, insn_groups_for_operands[i] = 0;
1070 }
1071
32131a9c
RK
1072 /* Count each reload once in every class
1073 containing the reload's own class. */
1074
1075 for (i = 0; i < n_reloads; i++)
1076 {
1077 register enum reg_class *p;
e85ddd99 1078 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1079 int size;
1080 enum machine_mode mode;
1081 int *this_groups;
1082 int *this_needs;
1083 int *this_total_groups;
1084
1085 /* Don't count the dummy reloads, for which one of the
1086 regs mentioned in the insn can be used for reloading.
1087 Don't count optional reloads.
1088 Don't count reloads that got combined with others. */
1089 if (reload_reg_rtx[i] != 0
1090 || reload_optional[i] != 0
1091 || (reload_out[i] == 0 && reload_in[i] == 0
1092 && ! reload_secondary_p[i]))
1093 continue;
1094
e85ddd99
RK
1095 /* Show that a reload register of this class is needed
1096 in this basic block. We do not use insn_needs and
1097 insn_groups because they are overly conservative for
1098 this purpose. */
1099 if (global && ! basic_block_needs[(int) class][this_block])
1100 {
1101 basic_block_needs[(int) class][this_block] = 1;
1102 new_basic_block_needs = 1;
1103 }
1104
32131a9c
RK
1105 /* Decide which time-of-use to count this reload for. */
1106 switch (reload_when_needed[i])
1107 {
1108 case RELOAD_OTHER:
1109 case RELOAD_FOR_OUTPUT:
1110 case RELOAD_FOR_INPUT:
1111 this_needs = insn_needs;
1112 this_groups = insn_groups;
1113 this_total_groups = &insn_total_groups;
1114 break;
1115
1116 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
1117 this_needs = insn_needs_for_inputs;
1118 this_groups = insn_groups_for_inputs;
1119 this_total_groups = &insn_total_groups_for_inputs;
1120 break;
1121
1122 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
1123 this_needs = insn_needs_for_outputs;
1124 this_groups = insn_groups_for_outputs;
1125 this_total_groups = &insn_total_groups_for_outputs;
1126 break;
1127
1128 case RELOAD_FOR_OPERAND_ADDRESS:
1129 this_needs = insn_needs_for_operands;
1130 this_groups = insn_groups_for_operands;
1131 this_total_groups = &insn_total_groups_for_operands;
1132 break;
1133 }
1134
1135 mode = reload_inmode[i];
1136 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1137 mode = reload_outmode[i];
e85ddd99 1138 size = CLASS_MAX_NREGS (class, mode);
32131a9c
RK
1139 if (size > 1)
1140 {
1141 enum machine_mode other_mode, allocate_mode;
1142
1143 /* Count number of groups needed separately from
1144 number of individual regs needed. */
e85ddd99
RK
1145 this_groups[(int) class]++;
1146 p = reg_class_superclasses[(int) class];
32131a9c
RK
1147 while (*p != LIM_REG_CLASSES)
1148 this_groups[(int) *p++]++;
1149 (*this_total_groups)++;
1150
1151 /* Record size and mode of a group of this class. */
1152 /* If more than one size group is needed,
1153 make all groups the largest needed size. */
e85ddd99 1154 if (group_size[(int) class] < size)
32131a9c 1155 {
e85ddd99 1156 other_mode = group_mode[(int) class];
32131a9c
RK
1157 allocate_mode = mode;
1158
e85ddd99
RK
1159 group_size[(int) class] = size;
1160 group_mode[(int) class] = mode;
32131a9c
RK
1161 }
1162 else
1163 {
1164 other_mode = mode;
e85ddd99 1165 allocate_mode = group_mode[(int) class];
32131a9c
RK
1166 }
1167
1168 /* Crash if two dissimilar machine modes both need
1169 groups of consecutive regs of the same class. */
1170
1171 if (other_mode != VOIDmode
1172 && other_mode != allocate_mode
1173 && ! modes_equiv_for_class_p (allocate_mode,
1174 other_mode,
e85ddd99 1175 class))
32131a9c
RK
1176 abort ();
1177 }
1178 else if (size == 1)
1179 {
e85ddd99
RK
1180 this_needs[(int) class] += 1;
1181 p = reg_class_superclasses[(int) class];
32131a9c
RK
1182 while (*p != LIM_REG_CLASSES)
1183 this_needs[(int) *p++] += 1;
1184 }
1185 else
1186 abort ();
1187 }
1188
1189 /* All reloads have been counted for this insn;
1190 now merge the various times of use.
1191 This sets insn_needs, etc., to the maximum total number
1192 of registers needed at any point in this insn. */
1193
1194 for (i = 0; i < N_REG_CLASSES; i++)
1195 {
1196 int this_max;
1197 this_max = insn_needs_for_inputs[i];
1198 if (insn_needs_for_outputs[i] > this_max)
1199 this_max = insn_needs_for_outputs[i];
1200 if (insn_needs_for_operands[i] > this_max)
1201 this_max = insn_needs_for_operands[i];
1202 insn_needs[i] += this_max;
1203 this_max = insn_groups_for_inputs[i];
1204 if (insn_groups_for_outputs[i] > this_max)
1205 this_max = insn_groups_for_outputs[i];
1206 if (insn_groups_for_operands[i] > this_max)
1207 this_max = insn_groups_for_operands[i];
1208 insn_groups[i] += this_max;
32131a9c 1209 }
a8efe40d 1210
32131a9c
RK
1211 insn_total_groups += MAX (insn_total_groups_for_inputs,
1212 MAX (insn_total_groups_for_outputs,
1213 insn_total_groups_for_operands));
1214
a8efe40d
RK
1215 /* If this is a CALL_INSN and caller-saves will need
1216 a spill register, act as if the spill register is
1217 needed for this insn. However, the spill register
1218 can be used by any reload of this insn, so we only
1219 need do something if no need for that class has
a8fdc208 1220 been recorded.
a8efe40d
RK
1221
1222 The assumption that every CALL_INSN will trigger a
1223 caller-save is highly conservative, however, the number
1224 of cases where caller-saves will need a spill register but
1225 a block containing a CALL_INSN won't need a spill register
1226 of that class should be quite rare.
1227
1228 If a group is needed, the size and mode of the group will
d45cf215 1229 have been set up at the beginning of this loop. */
a8efe40d
RK
1230
1231 if (GET_CODE (insn) == CALL_INSN
1232 && caller_save_spill_class != NO_REGS)
1233 {
1234 int *caller_save_needs
1235 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1236
1237 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1238 {
1239 register enum reg_class *p
1240 = reg_class_superclasses[(int) caller_save_spill_class];
1241
1242 caller_save_needs[(int) caller_save_spill_class]++;
1243
1244 while (*p != LIM_REG_CLASSES)
0aaa6af8 1245 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1246 }
1247
1248 if (caller_save_group_size > 1)
1249 insn_total_groups = MAX (insn_total_groups, 1);
d1c1397e
RS
1250
1251
1252 /* Show that this basic block will need a register of
1253 this class. */
1254
1255 if (global
1256 && ! (basic_block_needs[(int) caller_save_spill_class]
1257 [this_block]))
1258 {
1259 basic_block_needs[(int) caller_save_spill_class]
1260 [this_block] = 1;
1261 new_basic_block_needs = 1;
1262 }
a8efe40d
RK
1263 }
1264
32131a9c
RK
1265#ifdef SMALL_REGISTER_CLASSES
1266 /* If this insn stores the value of a function call,
1267 and that value is in a register that has been spilled,
1268 and if the insn needs a reload in a class
1269 that might use that register as the reload register,
1270 then add add an extra need in that class.
1271 This makes sure we have a register available that does
1272 not overlap the return value. */
1273 if (avoid_return_reg)
1274 {
1275 int regno = REGNO (avoid_return_reg);
1276 int nregs
1277 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1278 int r;
1279 int inc_groups = 0;
1280 for (r = regno; r < regno + nregs; r++)
1281 if (spill_reg_order[r] >= 0)
1282 for (i = 0; i < N_REG_CLASSES; i++)
1283 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1284 {
1285 if (insn_needs[i] > 0)
1286 insn_needs[i]++;
1287 if (insn_groups[i] > 0
1288 && nregs > 1)
1289 inc_groups = 1;
1290 }
1291 if (inc_groups)
1292 insn_groups[i]++;
1293 }
1294#endif /* SMALL_REGISTER_CLASSES */
1295
1296 /* For each class, collect maximum need of any insn. */
1297
1298 for (i = 0; i < N_REG_CLASSES; i++)
1299 {
1300 if (max_needs[i] < insn_needs[i])
5352b11a
RS
1301 {
1302 max_needs[i] = insn_needs[i];
1303 max_needs_insn[i] = insn;
1304 }
32131a9c 1305 if (max_groups[i] < insn_groups[i])
5352b11a
RS
1306 {
1307 max_groups[i] = insn_groups[i];
1308 max_groups_insn[i] = insn;
1309 }
32131a9c
RK
1310 if (insn_total_groups > 0)
1311 if (max_nongroups[i] < insn_needs[i])
5352b11a
RS
1312 {
1313 max_nongroups[i] = insn_needs[i];
1314 max_nongroups_insn[i] = insn;
1315 }
32131a9c
RK
1316 }
1317 }
1318 /* Note that there is a continue statement above. */
1319 }
1320
0dadecf6
RK
1321 /* If we allocated any new memory locations, make another pass
1322 since it might have changed elimination offsets. */
1323 if (starting_frame_size != get_frame_size ())
1324 something_changed = 1;
1325
d445b551 1326 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1327 will need a spill register. */
32131a9c 1328
d445b551 1329 if (caller_save_needed
a8efe40d
RK
1330 && ! setup_save_areas (&something_changed)
1331 && caller_save_spill_class == NO_REGS)
32131a9c 1332 {
a8efe40d
RK
1333 /* The class we will need depends on whether the machine
1334 supports the sum of two registers for an address; see
1335 find_address_reloads for details. */
1336
a8fdc208 1337 caller_save_spill_class
a8efe40d
RK
1338 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1339 caller_save_group_size
1340 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1341 something_changed = 1;
32131a9c
RK
1342 }
1343
1344 /* Now deduct from the needs for the registers already
1345 available (already spilled). */
1346
1347 CLEAR_HARD_REG_SET (counted_for_groups);
1348 CLEAR_HARD_REG_SET (counted_for_nongroups);
1349
1350 /* First find all regs alone in their class
1351 and count them (if desired) for non-groups.
1352 We would be screwed if a group took the only reg in a class
d445b551 1353 for which a non-group reload is needed.
32131a9c
RK
1354 (Note there is still a bug; if a class has 2 regs,
1355 both could be stolen by groups and we would lose the same way.
1356 With luck, no machine will need a nongroup in a 2-reg class.) */
1357
1358 for (i = 0; i < n_spills; i++)
1359 {
1360 register enum reg_class *p;
1361 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1362
1363 if (reg_class_size[class] == 1 && max_nongroups[class] > 0)
1364 {
1365 max_needs[class]--;
1366 p = reg_class_superclasses[class];
1367 while (*p != LIM_REG_CLASSES)
1368 max_needs[(int) *p++]--;
1369
1370 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1371 max_nongroups[class]--;
1372 p = reg_class_superclasses[class];
1373 while (*p != LIM_REG_CLASSES)
1374 {
1375 if (max_nongroups[(int) *p] > 0)
1376 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1377 max_nongroups[(int) *p++]--;
1378 }
1379 }
1380 }
1381
1382 /* Now find all consecutive groups of spilled registers
1383 and mark each group off against the need for such groups.
1384 But don't count them against ordinary need, yet. */
1385
1386 count_possible_groups (group_size, group_mode, max_groups);
1387
1388 /* Now count all spill regs against the individual need,
a8fdc208 1389 This includes those counted above for groups,
32131a9c
RK
1390 but not those previously counted for nongroups.
1391
1392 Those that weren't counted_for_groups can also count against
1393 the not-in-group need. */
1394
1395 for (i = 0; i < n_spills; i++)
1396 {
1397 register enum reg_class *p;
1398 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1399
1400 /* Those counted at the beginning shouldn't be counted twice. */
1401 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
1402 {
1403 max_needs[class]--;
1404 p = reg_class_superclasses[class];
1405 while (*p != LIM_REG_CLASSES)
1406 max_needs[(int) *p++]--;
1407
1408 if (! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i]))
1409 {
1410 if (max_nongroups[class] > 0)
1411 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1412 max_nongroups[class]--;
1413 p = reg_class_superclasses[class];
1414 while (*p != LIM_REG_CLASSES)
1415 {
1416 if (max_nongroups[(int) *p] > 0)
1417 SET_HARD_REG_BIT (counted_for_nongroups,
1418 spill_regs[i]);
1419 max_nongroups[(int) *p++]--;
1420 }
1421 }
1422 }
1423 }
1424
5c23c401
RK
1425 /* See if anything that happened changes which eliminations are valid.
1426 For example, on the Sparc, whether or not the frame pointer can
1427 be eliminated can depend on what registers have been used. We need
1428 not check some conditions again (such as flag_omit_frame_pointer)
1429 since they can't have changed. */
1430
1431 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1432 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1433#ifdef ELIMINABLE_REGS
1434 || ! CAN_ELIMINATE (ep->from, ep->to)
1435#endif
1436 )
1437 ep->can_eliminate = 0;
1438
32131a9c
RK
1439 /* Look for the case where we have discovered that we can't replace
1440 register A with register B and that means that we will now be
1441 trying to replace register A with register C. This means we can
1442 no longer replace register C with register B and we need to disable
1443 such an elimination, if it exists. This occurs often with A == ap,
1444 B == sp, and C == fp. */
a8fdc208 1445
32131a9c
RK
1446 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1447 {
1448 struct elim_table *op;
1449 register int new_to = -1;
1450
1451 if (! ep->can_eliminate && ep->can_eliminate_previous)
1452 {
1453 /* Find the current elimination for ep->from, if there is a
1454 new one. */
1455 for (op = reg_eliminate;
1456 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1457 if (op->from == ep->from && op->can_eliminate)
1458 {
1459 new_to = op->to;
1460 break;
1461 }
1462
1463 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1464 disable it. */
1465 for (op = reg_eliminate;
1466 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1467 if (op->from == new_to && op->to == ep->to)
1468 op->can_eliminate = 0;
1469 }
1470 }
1471
1472 /* See if any registers that we thought we could eliminate the previous
1473 time are no longer eliminable. If so, something has changed and we
1474 must spill the register. Also, recompute the number of eliminable
1475 registers and see if the frame pointer is needed; it is if there is
1476 no elimination of the frame pointer that we can perform. */
1477
1478 frame_pointer_needed = 1;
1479 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1480 {
1481 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1482 frame_pointer_needed = 0;
1483
1484 if (! ep->can_eliminate && ep->can_eliminate_previous)
1485 {
1486 ep->can_eliminate_previous = 0;
1487 spill_hard_reg (ep->from, global, dumpfile, 1);
1488 regs_ever_live[ep->from] = 1;
1489 something_changed = 1;
1490 num_eliminable--;
1491 }
1492 }
1493
1494 /* If all needs are met, we win. */
1495
1496 for (i = 0; i < N_REG_CLASSES; i++)
1497 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1498 break;
1499 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1500 break;
1501
1502 /* Not all needs are met; must spill more hard regs. */
1503
1504 /* If any element of basic_block_needs changed from 0 to 1,
1505 re-spill all the regs already spilled. This may spill
1506 additional pseudos that didn't spill before. */
1507
1508 if (new_basic_block_needs)
1509 for (i = 0; i < n_spills; i++)
1510 something_changed
1511 |= spill_hard_reg (spill_regs[i], global, dumpfile, 0);
1512
1513 /* Now find more reload regs to satisfy the remaining need
1514 Do it by ascending class number, since otherwise a reg
1515 might be spilled for a big class and might fail to count
1516 for a smaller class even though it belongs to that class.
1517
1518 Count spilled regs in `spills', and add entries to
1519 `spill_regs' and `spill_reg_order'.
1520
1521 ??? Note there is a problem here.
1522 When there is a need for a group in a high-numbered class,
1523 and also need for non-group regs that come from a lower class,
1524 the non-group regs are chosen first. If there aren't many regs,
1525 they might leave no room for a group.
1526
1527 This was happening on the 386. To fix it, we added the code
1528 that calls possible_group_p, so that the lower class won't
1529 break up the last possible group.
1530
1531 Really fixing the problem would require changes above
1532 in counting the regs already spilled, and in choose_reload_regs.
1533 It might be hard to avoid introducing bugs there. */
1534
1535 for (class = 0; class < N_REG_CLASSES; class++)
1536 {
1537 /* First get the groups of registers.
1538 If we got single registers first, we might fragment
1539 possible groups. */
1540 while (max_groups[class] > 0)
1541 {
1542 /* If any single spilled regs happen to form groups,
1543 count them now. Maybe we don't really need
1544 to spill another group. */
1545 count_possible_groups (group_size, group_mode, max_groups);
1546
1547 /* Groups of size 2 (the only groups used on most machines)
1548 are treated specially. */
1549 if (group_size[class] == 2)
1550 {
1551 /* First, look for a register that will complete a group. */
1552 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1553 {
1554 int j = potential_reload_regs[i];
1555 int other;
1556 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1557 &&
1558 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1559 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1560 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1561 && HARD_REGNO_MODE_OK (other, group_mode[class])
1562 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1563 other)
1564 /* We don't want one part of another group.
1565 We could get "two groups" that overlap! */
1566 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1567 ||
1568 (j < FIRST_PSEUDO_REGISTER - 1
1569 && (other = j + 1, spill_reg_order[other] >= 0)
1570 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1571 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1572 && HARD_REGNO_MODE_OK (j, group_mode[class])
1573 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1574 other)
1575 && ! TEST_HARD_REG_BIT (counted_for_groups,
1576 other))))
1577 {
1578 register enum reg_class *p;
1579
1580 /* We have found one that will complete a group,
1581 so count off one group as provided. */
1582 max_groups[class]--;
1583 p = reg_class_superclasses[class];
1584 while (*p != LIM_REG_CLASSES)
1585 max_groups[(int) *p++]--;
1586
1587 /* Indicate both these regs are part of a group. */
1588 SET_HARD_REG_BIT (counted_for_groups, j);
1589 SET_HARD_REG_BIT (counted_for_groups, other);
1590 break;
1591 }
1592 }
1593 /* We can't complete a group, so start one. */
1594 if (i == FIRST_PSEUDO_REGISTER)
1595 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1596 {
1597 int j = potential_reload_regs[i];
1598 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1599 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1600 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1601 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1602 && HARD_REGNO_MODE_OK (j, group_mode[class])
1603 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1604 j + 1))
1605 break;
1606 }
1607
1608 /* I should be the index in potential_reload_regs
1609 of the new reload reg we have found. */
1610
5352b11a
RS
1611 if (i >= FIRST_PSEUDO_REGISTER)
1612 {
1613 /* There are no groups left to spill. */
1614 spill_failure (max_groups_insn[class]);
1615 failure = 1;
1616 goto failed;
1617 }
1618 else
1619 something_changed
fb3821f7 1620 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1621 global, dumpfile);
32131a9c
RK
1622 }
1623 else
1624 {
1625 /* For groups of more than 2 registers,
1626 look for a sufficient sequence of unspilled registers,
1627 and spill them all at once. */
1628 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1629 {
1630 int j = potential_reload_regs[i];
1631 int k;
1632 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1633 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1634 {
1635 /* Check each reg in the sequence. */
1636 for (k = 0; k < group_size[class]; k++)
1637 if (! (spill_reg_order[j + k] < 0
1638 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1639 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1640 break;
1641 /* We got a full sequence, so spill them all. */
1642 if (k == group_size[class])
1643 {
1644 register enum reg_class *p;
1645 for (k = 0; k < group_size[class]; k++)
1646 {
1647 int idx;
1648 SET_HARD_REG_BIT (counted_for_groups, j + k);
1649 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1650 if (potential_reload_regs[idx] == j + k)
1651 break;
5352b11a
RS
1652 if (i >= FIRST_PSEUDO_REGISTER)
1653 {
1654 /* There are no groups left. */
1655 spill_failure (max_groups_insn[class]);
1656 failure = 1;
1657 goto failed;
1658 }
1659 else
1660 something_changed
fb3821f7
CH
1661 |= new_spill_reg (idx, class,
1662 max_needs, NULL_PTR,
5352b11a 1663 global, dumpfile);
32131a9c
RK
1664 }
1665
1666 /* We have found one that will complete a group,
1667 so count off one group as provided. */
1668 max_groups[class]--;
1669 p = reg_class_superclasses[class];
1670 while (*p != LIM_REG_CLASSES)
1671 max_groups[(int) *p++]--;
1672
1673 break;
1674 }
1675 }
1676 }
fa52261e
RS
1677 /* We couldn't find any registers for this reload.
1678 Abort to avoid going into an infinite loop. */
1679 if (i == FIRST_PSEUDO_REGISTER)
1680 abort ();
32131a9c
RK
1681 }
1682 }
1683
1684 /* Now similarly satisfy all need for single registers. */
1685
1686 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1687 {
1688 /* Consider the potential reload regs that aren't
1689 yet in use as reload regs, in order of preference.
1690 Find the most preferred one that's in this class. */
1691
1692 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1693 if (potential_reload_regs[i] >= 0
1694 && TEST_HARD_REG_BIT (reg_class_contents[class],
1695 potential_reload_regs[i])
1696 /* If this reg will not be available for groups,
1697 pick one that does not foreclose possible groups.
1698 This is a kludge, and not very general,
1699 but it should be sufficient to make the 386 work,
1700 and the problem should not occur on machines with
1701 more registers. */
1702 && (max_nongroups[class] == 0
1703 || possible_group_p (potential_reload_regs[i], max_groups)))
1704 break;
1705
1706 /* I should be the index in potential_reload_regs
1707 of the new reload reg we have found. */
1708
5352b11a
RS
1709 if (i >= FIRST_PSEUDO_REGISTER)
1710 {
1711 /* There are no possible registers left to spill. */
1712 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1713 : max_nongroups_insn[class]);
1714 failure = 1;
1715 goto failed;
1716 }
1717 else
1718 something_changed
1719 |= new_spill_reg (i, class, max_needs, max_nongroups,
1720 global, dumpfile);
32131a9c
RK
1721 }
1722 }
1723 }
1724
1725 /* If global-alloc was run, notify it of any register eliminations we have
1726 done. */
1727 if (global)
1728 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1729 if (ep->can_eliminate)
1730 mark_elimination (ep->from, ep->to);
1731
32131a9c 1732 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1733 around calls. Tell if what mode to use so that we will process
1734 those insns in reload_as_needed if we have to. */
32131a9c
RK
1735
1736 if (caller_save_needed)
a8efe40d
RK
1737 save_call_clobbered_regs (num_eliminable ? QImode
1738 : caller_save_spill_class != NO_REGS ? HImode
1739 : VOIDmode);
32131a9c
RK
1740
1741 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1742 If that insn didn't set the register (i.e., it copied the register to
1743 memory), just delete that insn instead of the equivalencing insn plus
1744 anything now dead. If we call delete_dead_insn on that insn, we may
1745 delete the insn that actually sets the register if the register die
1746 there and that is incorrect. */
1747
1748 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1749 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1750 && GET_CODE (reg_equiv_init[i]) != NOTE)
1751 {
1752 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1753 delete_dead_insn (reg_equiv_init[i]);
1754 else
1755 {
1756 PUT_CODE (reg_equiv_init[i], NOTE);
1757 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1758 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1759 }
1760 }
1761
1762 /* Use the reload registers where necessary
1763 by generating move instructions to move the must-be-register
1764 values into or out of the reload registers. */
1765
a8efe40d
RK
1766 if (something_needs_reloads || something_needs_elimination
1767 || (caller_save_needed && num_eliminable)
1768 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1769 reload_as_needed (first, global);
1770
1771 reload_in_progress = 0;
1772
5352b11a
RS
1773 /* Come here (with failure set nonzero) if we can't get enough spill regs
1774 and we decide not to abort about it. */
1775 failed:
1776
32131a9c
RK
1777 /* Now eliminate all pseudo regs by modifying them into
1778 their equivalent memory references.
1779 The REG-rtx's for the pseudos are modified in place,
1780 so all insns that used to refer to them now refer to memory.
1781
1782 For a reg that has a reg_equiv_address, all those insns
1783 were changed by reloading so that no insns refer to it any longer;
1784 but the DECL_RTL of a variable decl may refer to it,
1785 and if so this causes the debugging info to mention the variable. */
1786
1787 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1788 {
1789 rtx addr = 0;
ab1fd483 1790 int in_struct = 0;
32131a9c 1791 if (reg_equiv_mem[i])
ab1fd483
RS
1792 {
1793 addr = XEXP (reg_equiv_mem[i], 0);
1794 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1795 }
32131a9c
RK
1796 if (reg_equiv_address[i])
1797 addr = reg_equiv_address[i];
1798 if (addr)
1799 {
1800 if (reg_renumber[i] < 0)
1801 {
1802 rtx reg = regno_reg_rtx[i];
1803 XEXP (reg, 0) = addr;
1804 REG_USERVAR_P (reg) = 0;
ab1fd483 1805 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1806 PUT_CODE (reg, MEM);
1807 }
1808 else if (reg_equiv_mem[i])
1809 XEXP (reg_equiv_mem[i], 0) = addr;
1810 }
1811 }
1812
1813#ifdef PRESERVE_DEATH_INFO_REGNO_P
1814 /* Make a pass over all the insns and remove death notes for things that
1815 are no longer registers or no longer die in the insn (e.g., an input
1816 and output pseudo being tied). */
1817
1818 for (insn = first; insn; insn = NEXT_INSN (insn))
1819 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1820 {
1821 rtx note, next;
1822
1823 for (note = REG_NOTES (insn); note; note = next)
1824 {
1825 next = XEXP (note, 1);
1826 if (REG_NOTE_KIND (note) == REG_DEAD
1827 && (GET_CODE (XEXP (note, 0)) != REG
1828 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1829 remove_note (insn, note);
1830 }
1831 }
1832#endif
1833
1834 /* Indicate that we no longer have known memory locations or constants. */
1835 reg_equiv_constant = 0;
1836 reg_equiv_memory_loc = 0;
5352b11a
RS
1837
1838 return failure;
32131a9c
RK
1839}
1840\f
1841/* Nonzero if, after spilling reg REGNO for non-groups,
1842 it will still be possible to find a group if we still need one. */
1843
1844static int
1845possible_group_p (regno, max_groups)
1846 int regno;
1847 int *max_groups;
1848{
1849 int i;
1850 int class = (int) NO_REGS;
1851
1852 for (i = 0; i < (int) N_REG_CLASSES; i++)
1853 if (max_groups[i] > 0)
1854 {
1855 class = i;
1856 break;
1857 }
1858
1859 if (class == (int) NO_REGS)
1860 return 1;
1861
1862 /* Consider each pair of consecutive registers. */
1863 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
1864 {
1865 /* Ignore pairs that include reg REGNO. */
1866 if (i == regno || i + 1 == regno)
1867 continue;
1868
1869 /* Ignore pairs that are outside the class that needs the group.
1870 ??? Here we fail to handle the case where two different classes
1871 independently need groups. But this never happens with our
1872 current machine descriptions. */
1873 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
1874 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
1875 continue;
1876
1877 /* A pair of consecutive regs we can still spill does the trick. */
1878 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
1879 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1880 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
1881 return 1;
1882
1883 /* A pair of one already spilled and one we can spill does it
1884 provided the one already spilled is not otherwise reserved. */
1885 if (spill_reg_order[i] < 0
1886 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1887 && spill_reg_order[i + 1] >= 0
1888 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
1889 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
1890 return 1;
1891 if (spill_reg_order[i + 1] < 0
1892 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
1893 && spill_reg_order[i] >= 0
1894 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
1895 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
1896 return 1;
1897 }
1898
1899 return 0;
1900}
1901\f
1902/* Count any groups that can be formed from the registers recently spilled.
1903 This is done class by class, in order of ascending class number. */
1904
1905static void
1906count_possible_groups (group_size, group_mode, max_groups)
1907 int *group_size, *max_groups;
1908 enum machine_mode *group_mode;
1909{
1910 int i;
1911 /* Now find all consecutive groups of spilled registers
1912 and mark each group off against the need for such groups.
1913 But don't count them against ordinary need, yet. */
1914
1915 for (i = 0; i < N_REG_CLASSES; i++)
1916 if (group_size[i] > 1)
1917 {
1918 char regmask[FIRST_PSEUDO_REGISTER];
1919 int j;
1920
1921 bzero (regmask, sizeof regmask);
1922 /* Make a mask of all the regs that are spill regs in class I. */
1923 for (j = 0; j < n_spills; j++)
1924 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
1925 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
1926 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1927 spill_regs[j]))
1928 regmask[spill_regs[j]] = 1;
1929 /* Find each consecutive group of them. */
1930 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
1931 if (regmask[j] && j + group_size[i] <= FIRST_PSEUDO_REGISTER
1932 /* Next line in case group-mode for this class
1933 demands an even-odd pair. */
1934 && HARD_REGNO_MODE_OK (j, group_mode[i]))
1935 {
1936 int k;
1937 for (k = 1; k < group_size[i]; k++)
1938 if (! regmask[j + k])
1939 break;
1940 if (k == group_size[i])
1941 {
1942 /* We found a group. Mark it off against this class's
1943 need for groups, and against each superclass too. */
1944 register enum reg_class *p;
1945 max_groups[i]--;
1946 p = reg_class_superclasses[i];
1947 while (*p != LIM_REG_CLASSES)
1948 max_groups[(int) *p++]--;
a8fdc208 1949 /* Don't count these registers again. */
32131a9c
RK
1950 for (k = 0; k < group_size[i]; k++)
1951 SET_HARD_REG_BIT (counted_for_groups, j + k);
1952 }
fa52261e
RS
1953 /* Skip to the last reg in this group. When j is incremented
1954 above, it will then point to the first reg of the next
1955 possible group. */
1956 j += k - 1;
32131a9c
RK
1957 }
1958 }
1959
1960}
1961\f
1962/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
1963 another mode that needs to be reloaded for the same register class CLASS.
1964 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
1965 ALLOCATE_MODE will never be smaller than OTHER_MODE.
1966
1967 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
1968 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
1969 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
1970 causes unnecessary failures on machines requiring alignment of register
1971 groups when the two modes are different sizes, because the larger mode has
1972 more strict alignment rules than the smaller mode. */
1973
1974static int
1975modes_equiv_for_class_p (allocate_mode, other_mode, class)
1976 enum machine_mode allocate_mode, other_mode;
1977 enum reg_class class;
1978{
1979 register int regno;
1980 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1981 {
1982 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
1983 && HARD_REGNO_MODE_OK (regno, allocate_mode)
1984 && ! HARD_REGNO_MODE_OK (regno, other_mode))
1985 return 0;
1986 }
1987 return 1;
1988}
1989
5352b11a
RS
1990/* Handle the failure to find a register to spill.
1991 INSN should be one of the insns which needed this particular spill reg. */
1992
1993static void
1994spill_failure (insn)
1995 rtx insn;
1996{
1997 if (asm_noperands (PATTERN (insn)) >= 0)
1998 error_for_asm (insn, "`asm' needs too many reloads");
1999 else
2000 abort ();
2001}
2002
32131a9c
RK
2003/* Add a new register to the tables of available spill-registers
2004 (as well as spilling all pseudos allocated to the register).
2005 I is the index of this register in potential_reload_regs.
2006 CLASS is the regclass whose need is being satisfied.
2007 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2008 so that this register can count off against them.
2009 MAX_NONGROUPS is 0 if this register is part of a group.
2010 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2011
2012static int
2013new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2014 int i;
2015 int class;
2016 int *max_needs;
2017 int *max_nongroups;
2018 int global;
2019 FILE *dumpfile;
2020{
2021 register enum reg_class *p;
2022 int val;
2023 int regno = potential_reload_regs[i];
2024
2025 if (i >= FIRST_PSEUDO_REGISTER)
2026 abort (); /* Caller failed to find any register. */
2027
2028 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2029 fatal ("fixed or forbidden register was spilled.\n\
2030This may be due to a compiler bug or to impossible asm statements.");
2031
2032 /* Make reg REGNO an additional reload reg. */
2033
2034 potential_reload_regs[i] = -1;
2035 spill_regs[n_spills] = regno;
2036 spill_reg_order[regno] = n_spills;
2037 if (dumpfile)
2038 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2039
2040 /* Clear off the needs we just satisfied. */
2041
2042 max_needs[class]--;
2043 p = reg_class_superclasses[class];
2044 while (*p != LIM_REG_CLASSES)
2045 max_needs[(int) *p++]--;
2046
2047 if (max_nongroups && max_nongroups[class] > 0)
2048 {
2049 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2050 max_nongroups[class]--;
2051 p = reg_class_superclasses[class];
2052 while (*p != LIM_REG_CLASSES)
2053 max_nongroups[(int) *p++]--;
2054 }
2055
2056 /* Spill every pseudo reg that was allocated to this reg
2057 or to something that overlaps this reg. */
2058
2059 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2060
2061 /* If there are some registers still to eliminate and this register
2062 wasn't ever used before, additional stack space may have to be
2063 allocated to store this register. Thus, we may have changed the offset
2064 between the stack and frame pointers, so mark that something has changed.
2065 (If new pseudos were spilled, thus requiring more space, VAL would have
2066 been set non-zero by the call to spill_hard_reg above since additional
2067 reloads may be needed in that case.
2068
2069 One might think that we need only set VAL to 1 if this is a call-used
2070 register. However, the set of registers that must be saved by the
2071 prologue is not identical to the call-used set. For example, the
2072 register used by the call insn for the return PC is a call-used register,
2073 but must be saved by the prologue. */
2074 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2075 val = 1;
2076
2077 regs_ever_live[spill_regs[n_spills]] = 1;
2078 n_spills++;
2079
2080 return val;
2081}
2082\f
2083/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2084 data that is dead in INSN. */
2085
2086static void
2087delete_dead_insn (insn)
2088 rtx insn;
2089{
2090 rtx prev = prev_real_insn (insn);
2091 rtx prev_dest;
2092
2093 /* If the previous insn sets a register that dies in our insn, delete it
2094 too. */
2095 if (prev && GET_CODE (PATTERN (prev)) == SET
2096 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2097 && reg_mentioned_p (prev_dest, PATTERN (insn))
2098 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2099 delete_dead_insn (prev);
2100
2101 PUT_CODE (insn, NOTE);
2102 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2103 NOTE_SOURCE_FILE (insn) = 0;
2104}
2105
2106/* Modify the home of pseudo-reg I.
2107 The new home is present in reg_renumber[I].
2108
2109 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2110 or it may be -1, meaning there is none or it is not relevant.
2111 This is used so that all pseudos spilled from a given hard reg
2112 can share one stack slot. */
2113
2114static void
2115alter_reg (i, from_reg)
2116 register int i;
2117 int from_reg;
2118{
2119 /* When outputting an inline function, this can happen
2120 for a reg that isn't actually used. */
2121 if (regno_reg_rtx[i] == 0)
2122 return;
2123
2124 /* If the reg got changed to a MEM at rtl-generation time,
2125 ignore it. */
2126 if (GET_CODE (regno_reg_rtx[i]) != REG)
2127 return;
2128
2129 /* Modify the reg-rtx to contain the new hard reg
2130 number or else to contain its pseudo reg number. */
2131 REGNO (regno_reg_rtx[i])
2132 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2133
2134 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2135 allocate a stack slot for it. */
2136
2137 if (reg_renumber[i] < 0
2138 && reg_n_refs[i] > 0
2139 && reg_equiv_constant[i] == 0
2140 && reg_equiv_memory_loc[i] == 0)
2141 {
2142 register rtx x;
2143 int inherent_size = PSEUDO_REGNO_BYTES (i);
2144 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2145 int adjust = 0;
2146
2147 /* Each pseudo reg has an inherent size which comes from its own mode,
2148 and a total size which provides room for paradoxical subregs
2149 which refer to the pseudo reg in wider modes.
2150
2151 We can use a slot already allocated if it provides both
2152 enough inherent space and enough total space.
2153 Otherwise, we allocate a new slot, making sure that it has no less
2154 inherent space, and no less total space, then the previous slot. */
2155 if (from_reg == -1)
2156 {
2157 /* No known place to spill from => no slot to reuse. */
2158 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2159#if BYTES_BIG_ENDIAN
2160 /* Cancel the big-endian correction done in assign_stack_local.
2161 Get the address of the beginning of the slot.
2162 This is so we can do a big-endian correction unconditionally
2163 below. */
2164 adjust = inherent_size - total_size;
2165#endif
2166 }
2167 /* Reuse a stack slot if possible. */
2168 else if (spill_stack_slot[from_reg] != 0
2169 && spill_stack_slot_width[from_reg] >= total_size
2170 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2171 >= inherent_size))
2172 x = spill_stack_slot[from_reg];
2173 /* Allocate a bigger slot. */
2174 else
2175 {
2176 /* Compute maximum size needed, both for inherent size
2177 and for total size. */
2178 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2179 if (spill_stack_slot[from_reg])
2180 {
2181 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2182 > inherent_size)
2183 mode = GET_MODE (spill_stack_slot[from_reg]);
2184 if (spill_stack_slot_width[from_reg] > total_size)
2185 total_size = spill_stack_slot_width[from_reg];
2186 }
2187 /* Make a slot with that size. */
2188 x = assign_stack_local (mode, total_size, -1);
2189#if BYTES_BIG_ENDIAN
2190 /* Cancel the big-endian correction done in assign_stack_local.
2191 Get the address of the beginning of the slot.
2192 This is so we can do a big-endian correction unconditionally
2193 below. */
2194 adjust = GET_MODE_SIZE (mode) - total_size;
2195#endif
2196 spill_stack_slot[from_reg] = x;
2197 spill_stack_slot_width[from_reg] = total_size;
2198 }
2199
2200#if BYTES_BIG_ENDIAN
2201 /* On a big endian machine, the "address" of the slot
2202 is the address of the low part that fits its inherent mode. */
2203 if (inherent_size < total_size)
2204 adjust += (total_size - inherent_size);
2205#endif /* BYTES_BIG_ENDIAN */
2206
2207 /* If we have any adjustment to make, or if the stack slot is the
2208 wrong mode, make a new stack slot. */
2209 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2210 {
2211 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2212 plus_constant (XEXP (x, 0), adjust));
2213 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2214 }
2215
2216 /* Save the stack slot for later. */
2217 reg_equiv_memory_loc[i] = x;
2218 }
2219}
2220
2221/* Mark the slots in regs_ever_live for the hard regs
2222 used by pseudo-reg number REGNO. */
2223
2224void
2225mark_home_live (regno)
2226 int regno;
2227{
2228 register int i, lim;
2229 i = reg_renumber[regno];
2230 if (i < 0)
2231 return;
2232 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2233 while (i < lim)
2234 regs_ever_live[i++] = 1;
2235}
2236\f
2237/* This function handles the tracking of elimination offsets around branches.
2238
2239 X is a piece of RTL being scanned.
2240
2241 INSN is the insn that it came from, if any.
2242
2243 INITIAL_P is non-zero if we are to set the offset to be the initial
2244 offset and zero if we are setting the offset of the label to be the
2245 current offset. */
2246
2247static void
2248set_label_offsets (x, insn, initial_p)
2249 rtx x;
2250 rtx insn;
2251 int initial_p;
2252{
2253 enum rtx_code code = GET_CODE (x);
2254 rtx tem;
2255 int i;
2256 struct elim_table *p;
2257
2258 switch (code)
2259 {
2260 case LABEL_REF:
8be386d9
RS
2261 if (LABEL_REF_NONLOCAL_P (x))
2262 return;
2263
32131a9c
RK
2264 x = XEXP (x, 0);
2265
2266 /* ... fall through ... */
2267
2268 case CODE_LABEL:
2269 /* If we know nothing about this label, set the desired offsets. Note
2270 that this sets the offset at a label to be the offset before a label
2271 if we don't know anything about the label. This is not correct for
2272 the label after a BARRIER, but is the best guess we can make. If
2273 we guessed wrong, we will suppress an elimination that might have
2274 been possible had we been able to guess correctly. */
2275
2276 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2277 {
2278 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2279 offsets_at[CODE_LABEL_NUMBER (x)][i]
2280 = (initial_p ? reg_eliminate[i].initial_offset
2281 : reg_eliminate[i].offset);
2282 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2283 }
2284
2285 /* Otherwise, if this is the definition of a label and it is
d45cf215 2286 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2287 that label. */
2288
2289 else if (x == insn
2290 && (tem = prev_nonnote_insn (insn)) != 0
2291 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2292 {
2293 num_not_at_initial_offset = 0;
2294 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2295 {
2296 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2297 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2298 if (reg_eliminate[i].can_eliminate
2299 && (reg_eliminate[i].offset
2300 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2301 num_not_at_initial_offset++;
2302 }
2303 }
32131a9c
RK
2304
2305 else
2306 /* If neither of the above cases is true, compare each offset
2307 with those previously recorded and suppress any eliminations
2308 where the offsets disagree. */
a8fdc208 2309
32131a9c
RK
2310 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2311 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2312 != (initial_p ? reg_eliminate[i].initial_offset
2313 : reg_eliminate[i].offset))
2314 reg_eliminate[i].can_eliminate = 0;
2315
2316 return;
2317
2318 case JUMP_INSN:
2319 set_label_offsets (PATTERN (insn), insn, initial_p);
2320
2321 /* ... fall through ... */
2322
2323 case INSN:
2324 case CALL_INSN:
2325 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2326 and hence must have all eliminations at their initial offsets. */
2327 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2328 if (REG_NOTE_KIND (tem) == REG_LABEL)
2329 set_label_offsets (XEXP (tem, 0), insn, 1);
2330 return;
2331
2332 case ADDR_VEC:
2333 case ADDR_DIFF_VEC:
2334 /* Each of the labels in the address vector must be at their initial
2335 offsets. We want the first first for ADDR_VEC and the second
2336 field for ADDR_DIFF_VEC. */
2337
2338 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2339 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2340 insn, initial_p);
2341 return;
2342
2343 case SET:
2344 /* We only care about setting PC. If the source is not RETURN,
2345 IF_THEN_ELSE, or a label, disable any eliminations not at
2346 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2347 isn't one of those possibilities. For branches to a label,
2348 call ourselves recursively.
2349
2350 Note that this can disable elimination unnecessarily when we have
2351 a non-local goto since it will look like a non-constant jump to
2352 someplace in the current function. This isn't a significant
2353 problem since such jumps will normally be when all elimination
2354 pairs are back to their initial offsets. */
2355
2356 if (SET_DEST (x) != pc_rtx)
2357 return;
2358
2359 switch (GET_CODE (SET_SRC (x)))
2360 {
2361 case PC:
2362 case RETURN:
2363 return;
2364
2365 case LABEL_REF:
2366 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2367 return;
2368
2369 case IF_THEN_ELSE:
2370 tem = XEXP (SET_SRC (x), 1);
2371 if (GET_CODE (tem) == LABEL_REF)
2372 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2373 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2374 break;
2375
2376 tem = XEXP (SET_SRC (x), 2);
2377 if (GET_CODE (tem) == LABEL_REF)
2378 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2379 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2380 break;
2381 return;
2382 }
2383
2384 /* If we reach here, all eliminations must be at their initial
2385 offset because we are doing a jump to a variable address. */
2386 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2387 if (p->offset != p->initial_offset)
2388 p->can_eliminate = 0;
2389 }
2390}
2391\f
2392/* Used for communication between the next two function to properly share
2393 the vector for an ASM_OPERANDS. */
2394
2395static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2396
a8fdc208 2397/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2398 replacement (such as sp), plus an offset.
2399
2400 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2401 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2402 MEM, we are allowed to replace a sum of a register and the constant zero
2403 with the register, which we cannot do outside a MEM. In addition, we need
2404 to record the fact that a register is referenced outside a MEM.
2405
2406 If INSN is nonzero, it is the insn containing X. If we replace a REG
2407 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2408 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2409 that the REG is being modified.
2410
2411 If we see a modification to a register we know about, take the
2412 appropriate action (see case SET, below).
2413
2414 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2415 replacements done assuming all offsets are at their initial values. If
2416 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2417 encounter, return the actual location so that find_reloads will do
2418 the proper thing. */
2419
2420rtx
2421eliminate_regs (x, mem_mode, insn)
2422 rtx x;
2423 enum machine_mode mem_mode;
2424 rtx insn;
2425{
2426 enum rtx_code code = GET_CODE (x);
2427 struct elim_table *ep;
2428 int regno;
2429 rtx new;
2430 int i, j;
2431 char *fmt;
2432 int copied = 0;
2433
2434 switch (code)
2435 {
2436 case CONST_INT:
2437 case CONST_DOUBLE:
2438 case CONST:
2439 case SYMBOL_REF:
2440 case CODE_LABEL:
2441 case PC:
2442 case CC0:
2443 case ASM_INPUT:
2444 case ADDR_VEC:
2445 case ADDR_DIFF_VEC:
2446 case RETURN:
2447 return x;
2448
2449 case REG:
2450 regno = REGNO (x);
2451
2452 /* First handle the case where we encounter a bare register that
2453 is eliminable. Replace it with a PLUS. */
2454 if (regno < FIRST_PSEUDO_REGISTER)
2455 {
2456 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2457 ep++)
2458 if (ep->from_rtx == x && ep->can_eliminate)
2459 {
2460 if (! mem_mode)
2461 ep->ref_outside_mem = 1;
2462 return plus_constant (ep->to_rtx, ep->previous_offset);
2463 }
2464
2465 }
2466 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2467 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2468 {
2469 /* In this case, find_reloads would attempt to either use an
2470 incorrect address (if something is not at its initial offset)
2471 or substitute an replaced address into an insn (which loses
2472 if the offset is changed by some later action). So we simply
2473 return the replaced stack slot (assuming it is changed by
2474 elimination) and ignore the fact that this is actually a
2475 reference to the pseudo. Ensure we make a copy of the
2476 address in case it is shared. */
fb3821f7
CH
2477 new = eliminate_regs (reg_equiv_memory_loc[regno],
2478 mem_mode, NULL_RTX);
32131a9c
RK
2479 if (new != reg_equiv_memory_loc[regno])
2480 return copy_rtx (new);
2481 }
2482 return x;
2483
2484 case PLUS:
2485 /* If this is the sum of an eliminable register and a constant, rework
2486 the sum. */
2487 if (GET_CODE (XEXP (x, 0)) == REG
2488 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2489 && CONSTANT_P (XEXP (x, 1)))
2490 {
2491 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2492 ep++)
2493 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2494 {
2495 if (! mem_mode)
2496 ep->ref_outside_mem = 1;
2497
2498 /* The only time we want to replace a PLUS with a REG (this
2499 occurs when the constant operand of the PLUS is the negative
2500 of the offset) is when we are inside a MEM. We won't want
2501 to do so at other times because that would change the
2502 structure of the insn in a way that reload can't handle.
2503 We special-case the commonest situation in
2504 eliminate_regs_in_insn, so just replace a PLUS with a
2505 PLUS here, unless inside a MEM. */
a23b64d5 2506 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2507 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2508 return ep->to_rtx;
2509 else
2510 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2511 plus_constant (XEXP (x, 1),
2512 ep->previous_offset));
2513 }
2514
2515 /* If the register is not eliminable, we are done since the other
2516 operand is a constant. */
2517 return x;
2518 }
2519
2520 /* If this is part of an address, we want to bring any constant to the
2521 outermost PLUS. We will do this by doing register replacement in
2522 our operands and seeing if a constant shows up in one of them.
2523
2524 We assume here this is part of an address (or a "load address" insn)
2525 since an eliminable register is not likely to appear in any other
2526 context.
2527
2528 If we have (plus (eliminable) (reg)), we want to produce
2529 (plus (plus (replacement) (reg) (const))). If this was part of a
2530 normal add insn, (plus (replacement) (reg)) will be pushed as a
2531 reload. This is the desired action. */
2532
2533 {
fb3821f7
CH
2534 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2535 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2536
2537 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2538 {
2539 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2540 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2541 we must replace the constant here since it may no longer
2542 be in the position of any operand. */
2543 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2544 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2545 && reg_renumber[REGNO (new1)] < 0
2546 && reg_equiv_constant != 0
2547 && reg_equiv_constant[REGNO (new1)] != 0)
2548 new1 = reg_equiv_constant[REGNO (new1)];
2549 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2550 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2551 && reg_renumber[REGNO (new0)] < 0
2552 && reg_equiv_constant[REGNO (new0)] != 0)
2553 new0 = reg_equiv_constant[REGNO (new0)];
2554
2555 new = form_sum (new0, new1);
2556
2557 /* As above, if we are not inside a MEM we do not want to
2558 turn a PLUS into something else. We might try to do so here
2559 for an addition of 0 if we aren't optimizing. */
2560 if (! mem_mode && GET_CODE (new) != PLUS)
2561 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2562 else
2563 return new;
2564 }
2565 }
2566 return x;
2567
2568 case EXPR_LIST:
2569 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2570 if (XEXP (x, 0))
2571 {
fb3821f7 2572 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2573 if (new != XEXP (x, 0))
2574 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2575 }
2576
2577 /* ... fall through ... */
2578
2579 case INSN_LIST:
2580 /* Now do eliminations in the rest of the chain. If this was
2581 an EXPR_LIST, this might result in allocating more memory than is
2582 strictly needed, but it simplifies the code. */
2583 if (XEXP (x, 1))
2584 {
fb3821f7 2585 new = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2586 if (new != XEXP (x, 1))
2587 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2588 }
2589 return x;
2590
2591 case CALL:
2592 case COMPARE:
2593 case MINUS:
2594 case MULT:
2595 case DIV: case UDIV:
2596 case MOD: case UMOD:
2597 case AND: case IOR: case XOR:
2598 case LSHIFT: case ASHIFT: case ROTATE:
2599 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2600 case NE: case EQ:
2601 case GE: case GT: case GEU: case GTU:
2602 case LE: case LT: case LEU: case LTU:
2603 {
fb3821f7
CH
2604 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2605 rtx new1
2606 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX) : 0;
32131a9c
RK
2607
2608 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2609 return gen_rtx (code, GET_MODE (x), new0, new1);
2610 }
2611 return x;
2612
2613 case PRE_INC:
2614 case POST_INC:
2615 case PRE_DEC:
2616 case POST_DEC:
2617 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2618 if (ep->to_rtx == XEXP (x, 0))
2619 {
2620 if (code == PRE_DEC || code == POST_DEC)
2621 ep->offset += GET_MODE_SIZE (mem_mode);
2622 else
2623 ep->offset -= GET_MODE_SIZE (mem_mode);
2624 }
2625
2626 /* Fall through to generic unary operation case. */
2627 case USE:
2628 case STRICT_LOW_PART:
2629 case NEG: case NOT:
2630 case SIGN_EXTEND: case ZERO_EXTEND:
2631 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2632 case FLOAT: case FIX:
2633 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2634 case ABS:
2635 case SQRT:
2636 case FFS:
fb3821f7 2637 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2638 if (new != XEXP (x, 0))
2639 return gen_rtx (code, GET_MODE (x), new);
2640 return x;
2641
2642 case SUBREG:
2643 /* Similar to above processing, but preserve SUBREG_WORD.
2644 Convert (subreg (mem)) to (mem) if not paradoxical.
2645 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2646 pseudo didn't get a hard reg, we must replace this with the
2647 eliminated version of the memory location because push_reloads
2648 may do the replacement in certain circumstances. */
2649 if (GET_CODE (SUBREG_REG (x)) == REG
2650 && (GET_MODE_SIZE (GET_MODE (x))
2651 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2652 && reg_equiv_memory_loc != 0
2653 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2654 {
2655 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
fb3821f7 2656 mem_mode, NULL_RTX);
32131a9c
RK
2657
2658 /* If we didn't change anything, we must retain the pseudo. */
2659 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2660 new = XEXP (x, 0);
2661 else
2662 /* Otherwise, ensure NEW isn't shared in case we have to reload
2663 it. */
2664 new = copy_rtx (new);
2665 }
2666 else
fb3821f7 2667 new = eliminate_regs (SUBREG_REG (x), mem_mode, NULL_RTX);
32131a9c
RK
2668
2669 if (new != XEXP (x, 0))
2670 {
2671 if (GET_CODE (new) == MEM
2672 && (GET_MODE_SIZE (GET_MODE (x))
2673 <= GET_MODE_SIZE (GET_MODE (new))))
2674 {
2675 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2676 enum machine_mode mode = GET_MODE (x);
2677
2678#if BYTES_BIG_ENDIAN
2679 offset += (MIN (UNITS_PER_WORD,
2680 GET_MODE_SIZE (GET_MODE (new)))
2681 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2682#endif
2683
2684 PUT_MODE (new, mode);
2685 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2686 return new;
2687 }
2688 else
2689 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2690 }
2691
2692 return x;
2693
2694 case CLOBBER:
2695 /* If clobbering a register that is the replacement register for an
d45cf215 2696 elimination we still think can be performed, note that it cannot
32131a9c
RK
2697 be performed. Otherwise, we need not be concerned about it. */
2698 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2699 if (ep->to_rtx == XEXP (x, 0))
2700 ep->can_eliminate = 0;
2701
2702 return x;
2703
2704 case ASM_OPERANDS:
2705 {
2706 rtx *temp_vec;
2707 /* Properly handle sharing input and constraint vectors. */
2708 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2709 {
2710 /* When we come to a new vector not seen before,
2711 scan all its elements; keep the old vector if none
2712 of them changes; otherwise, make a copy. */
2713 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2714 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2715 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2716 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
fb3821f7 2717 mem_mode, NULL_RTX);
32131a9c
RK
2718
2719 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2720 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2721 break;
2722
2723 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2724 new_asm_operands_vec = old_asm_operands_vec;
2725 else
2726 new_asm_operands_vec
2727 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2728 }
2729
2730 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2731 if (new_asm_operands_vec == old_asm_operands_vec)
2732 return x;
2733
2734 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2735 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2736 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2737 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2738 ASM_OPERANDS_SOURCE_FILE (x),
2739 ASM_OPERANDS_SOURCE_LINE (x));
2740 new->volatil = x->volatil;
2741 return new;
2742 }
2743
2744 case SET:
2745 /* Check for setting a register that we know about. */
2746 if (GET_CODE (SET_DEST (x)) == REG)
2747 {
2748 /* See if this is setting the replacement register for an
a8fdc208 2749 elimination.
32131a9c
RK
2750
2751 If DEST is the frame pointer, we do nothing because we assume that
2752 all assignments to the frame pointer are for non-local gotos and
2753 are being done at a time when they are valid and do not disturb
2754 anything else. Some machines want to eliminate a fake argument
2755 pointer with either the frame or stack pointer. Assignments to
2756 the frame pointer must not prevent this elimination. */
2757
2758 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2759 ep++)
2760 if (ep->to_rtx == SET_DEST (x)
2761 && SET_DEST (x) != frame_pointer_rtx)
2762 {
6dc42e49 2763 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2764 this elimination can't be done. */
2765 rtx src = SET_SRC (x);
2766
2767 if (GET_CODE (src) == PLUS
2768 && XEXP (src, 0) == SET_DEST (x)
2769 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2770 ep->offset -= INTVAL (XEXP (src, 1));
2771 else
2772 ep->can_eliminate = 0;
2773 }
2774
2775 /* Now check to see we are assigning to a register that can be
2776 eliminated. If so, it must be as part of a PARALLEL, since we
2777 will not have been called if this is a single SET. So indicate
2778 that we can no longer eliminate this reg. */
2779 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2780 ep++)
2781 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2782 ep->can_eliminate = 0;
2783 }
2784
2785 /* Now avoid the loop below in this common case. */
2786 {
fb3821f7
CH
2787 rtx new0 = eliminate_regs (SET_DEST (x), 0, NULL_RTX);
2788 rtx new1 = eliminate_regs (SET_SRC (x), 0, NULL_RTX);
32131a9c
RK
2789
2790 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2791 write a CLOBBER insn. */
2792 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
2793 && insn != 0)
2794 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
2795
2796 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
2797 return gen_rtx (SET, VOIDmode, new0, new1);
2798 }
2799
2800 return x;
2801
2802 case MEM:
2803 /* Our only special processing is to pass the mode of the MEM to our
2804 recursive call and copy the flags. While we are here, handle this
2805 case more efficiently. */
fb3821f7 2806 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), NULL_RTX);
32131a9c
RK
2807 if (new != XEXP (x, 0))
2808 {
2809 new = gen_rtx (MEM, GET_MODE (x), new);
2810 new->volatil = x->volatil;
2811 new->unchanging = x->unchanging;
2812 new->in_struct = x->in_struct;
2813 return new;
2814 }
2815 else
2816 return x;
2817 }
2818
2819 /* Process each of our operands recursively. If any have changed, make a
2820 copy of the rtx. */
2821 fmt = GET_RTX_FORMAT (code);
2822 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2823 {
2824 if (*fmt == 'e')
2825 {
fb3821f7 2826 new = eliminate_regs (XEXP (x, i), mem_mode, NULL_RTX);
32131a9c
RK
2827 if (new != XEXP (x, i) && ! copied)
2828 {
2829 rtx new_x = rtx_alloc (code);
2830 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2831 + (sizeof (new_x->fld[0])
2832 * GET_RTX_LENGTH (code))));
2833 x = new_x;
2834 copied = 1;
2835 }
2836 XEXP (x, i) = new;
2837 }
2838 else if (*fmt == 'E')
2839 {
2840 int copied_vec = 0;
2841 for (j = 0; j < XVECLEN (x, i); j++)
2842 {
2843 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2844 if (new != XVECEXP (x, i, j) && ! copied_vec)
2845 {
2846 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2847 &XVECEXP (x, i, 0));
2848 if (! copied)
2849 {
2850 rtx new_x = rtx_alloc (code);
2851 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2852 + (sizeof (new_x->fld[0])
2853 * GET_RTX_LENGTH (code))));
2854 x = new_x;
2855 copied = 1;
2856 }
2857 XVEC (x, i) = new_v;
2858 copied_vec = 1;
2859 }
2860 XVECEXP (x, i, j) = new;
2861 }
2862 }
2863 }
2864
2865 return x;
2866}
2867\f
2868/* Scan INSN and eliminate all eliminable registers in it.
2869
2870 If REPLACE is nonzero, do the replacement destructively. Also
2871 delete the insn as dead it if it is setting an eliminable register.
2872
2873 If REPLACE is zero, do all our allocations in reload_obstack.
2874
2875 If no eliminations were done and this insn doesn't require any elimination
2876 processing (these are not identical conditions: it might be updating sp,
2877 but not referencing fp; this needs to be seen during reload_as_needed so
2878 that the offset between fp and sp can be taken into consideration), zero
2879 is returned. Otherwise, 1 is returned. */
2880
2881static int
2882eliminate_regs_in_insn (insn, replace)
2883 rtx insn;
2884 int replace;
2885{
2886 rtx old_body = PATTERN (insn);
2887 rtx new_body;
2888 int val = 0;
2889 struct elim_table *ep;
2890
2891 if (! replace)
2892 push_obstacks (&reload_obstack, &reload_obstack);
2893
2894 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
2895 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
2896 {
2897 /* Check for setting an eliminable register. */
2898 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2899 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
2900 {
2901 /* In this case this insn isn't serving a useful purpose. We
2902 will delete it in reload_as_needed once we know that this
2903 elimination is, in fact, being done.
2904
2905 If REPLACE isn't set, we can't delete this insn, but neededn't
2906 process it since it won't be used unless something changes. */
2907 if (replace)
2908 delete_dead_insn (insn);
2909 val = 1;
2910 goto done;
2911 }
2912
2913 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
2914 in the insn is the negative of the offset in FROM. Substitute
2915 (set (reg) (reg to)) for the insn and change its code.
2916
2917 We have to do this here, rather than in eliminate_regs, do that we can
2918 change the insn code. */
2919
2920 if (GET_CODE (SET_SRC (old_body)) == PLUS
2921 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
2922 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
2923 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2924 ep++)
2925 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
2926 && ep->can_eliminate
2927 && ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
2928 {
2929 PATTERN (insn) = gen_rtx (SET, VOIDmode,
2930 SET_DEST (old_body), ep->to_rtx);
2931 INSN_CODE (insn) = -1;
2932 val = 1;
2933 goto done;
2934 }
2935 }
2936
2937 old_asm_operands_vec = 0;
2938
2939 /* Replace the body of this insn with a substituted form. If we changed
2940 something, return non-zero. If this is the final call for this
2941 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
2942
2943 If we are replacing a body that was a (set X (plus Y Z)), try to
2944 re-recognize the insn. We do this in case we had a simple addition
2945 but now can do this as a load-address. This saves an insn in this
2946 common case. */
2947
fb3821f7 2948 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
2949 if (new_body != old_body)
2950 {
2951 if (GET_CODE (old_body) != SET || GET_CODE (SET_SRC (old_body)) != PLUS
2952 || ! validate_change (insn, &PATTERN (insn), new_body, 0))
2953 PATTERN (insn) = new_body;
2954
2955 if (replace && REG_NOTES (insn))
fb3821f7 2956 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, NULL_RTX);
32131a9c
RK
2957 val = 1;
2958 }
a8fdc208 2959
32131a9c
RK
2960 /* Loop through all elimination pairs. See if any have changed and
2961 recalculate the number not at initial offset.
2962
a8efe40d
RK
2963 Compute the maximum offset (minimum offset if the stack does not
2964 grow downward) for each elimination pair.
2965
32131a9c
RK
2966 We also detect a cases where register elimination cannot be done,
2967 namely, if a register would be both changed and referenced outside a MEM
2968 in the resulting insn since such an insn is often undefined and, even if
2969 not, we cannot know what meaning will be given to it. Note that it is
2970 valid to have a register used in an address in an insn that changes it
2971 (presumably with a pre- or post-increment or decrement).
2972
2973 If anything changes, return nonzero. */
2974
2975 num_not_at_initial_offset = 0;
2976 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2977 {
2978 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
2979 ep->can_eliminate = 0;
2980
2981 ep->ref_outside_mem = 0;
2982
2983 if (ep->previous_offset != ep->offset)
2984 val = 1;
2985
2986 ep->previous_offset = ep->offset;
2987 if (ep->can_eliminate && ep->offset != ep->initial_offset)
2988 num_not_at_initial_offset++;
a8efe40d
RK
2989
2990#ifdef STACK_GROWS_DOWNWARD
2991 ep->max_offset = MAX (ep->max_offset, ep->offset);
2992#else
2993 ep->max_offset = MIN (ep->max_offset, ep->offset);
2994#endif
32131a9c
RK
2995 }
2996
2997 done:
2998 if (! replace)
2999 pop_obstacks ();
3000
3001 return val;
3002}
3003
3004/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3005 replacement we currently believe is valid, mark it as not eliminable if X
3006 modifies DEST in any way other than by adding a constant integer to it.
3007
3008 If DEST is the frame pointer, we do nothing because we assume that
3009 all assignments to the frame pointer are nonlocal gotos and are being done
3010 at a time when they are valid and do not disturb anything else.
3011 Some machines want to eliminate a fake argument pointer with either the
3012 frame or stack pointer. Assignments to the frame pointer must not prevent
3013 this elimination.
3014
3015 Called via note_stores from reload before starting its passes to scan
3016 the insns of the function. */
3017
3018static void
3019mark_not_eliminable (dest, x)
3020 rtx dest;
3021 rtx x;
3022{
3023 register int i;
3024
3025 /* A SUBREG of a hard register here is just changing its mode. We should
3026 not see a SUBREG of an eliminable hard register, but check just in
3027 case. */
3028 if (GET_CODE (dest) == SUBREG)
3029 dest = SUBREG_REG (dest);
3030
3031 if (dest == frame_pointer_rtx)
3032 return;
3033
3034 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3035 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3036 && (GET_CODE (x) != SET
3037 || GET_CODE (SET_SRC (x)) != PLUS
3038 || XEXP (SET_SRC (x), 0) != dest
3039 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3040 {
3041 reg_eliminate[i].can_eliminate_previous
3042 = reg_eliminate[i].can_eliminate = 0;
3043 num_eliminable--;
3044 }
3045}
3046\f
3047/* Kick all pseudos out of hard register REGNO.
3048 If GLOBAL is nonzero, try to find someplace else to put them.
3049 If DUMPFILE is nonzero, log actions taken on that file.
3050
3051 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3052 because we found we can't eliminate some register. In the case, no pseudos
3053 are allowed to be in the register, even if they are only in a block that
3054 doesn't require spill registers, unlike the case when we are spilling this
3055 hard reg to produce another spill register.
3056
3057 Return nonzero if any pseudos needed to be kicked out. */
3058
3059static int
3060spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3061 register int regno;
3062 int global;
3063 FILE *dumpfile;
3064 int cant_eliminate;
3065{
3066 int something_changed = 0;
3067 register int i;
3068
3069 SET_HARD_REG_BIT (forbidden_regs, regno);
3070
3071 /* Spill every pseudo reg that was allocated to this reg
3072 or to something that overlaps this reg. */
3073
3074 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3075 if (reg_renumber[i] >= 0
3076 && reg_renumber[i] <= regno
a8fdc208 3077 && (reg_renumber[i]
32131a9c
RK
3078 + HARD_REGNO_NREGS (reg_renumber[i],
3079 PSEUDO_REGNO_MODE (i))
3080 > regno))
3081 {
3082 enum reg_class class = REGNO_REG_CLASS (regno);
3083
3084 /* If this register belongs solely to a basic block which needed no
3085 spilling of any class that this register is contained in,
3086 leave it be, unless we are spilling this register because
3087 it was a hard register that can't be eliminated. */
3088
3089 if (! cant_eliminate
3090 && basic_block_needs[0]
3091 && reg_basic_block[i] >= 0
3092 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3093 {
3094 enum reg_class *p;
3095
3096 for (p = reg_class_superclasses[(int) class];
3097 *p != LIM_REG_CLASSES; p++)
3098 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3099 break;
a8fdc208 3100
32131a9c
RK
3101 if (*p == LIM_REG_CLASSES)
3102 continue;
3103 }
3104
3105 /* Mark it as no longer having a hard register home. */
3106 reg_renumber[i] = -1;
3107 /* We will need to scan everything again. */
3108 something_changed = 1;
3109 if (global)
3110 retry_global_alloc (i, forbidden_regs);
3111
3112 alter_reg (i, regno);
3113 if (dumpfile)
3114 {
3115 if (reg_renumber[i] == -1)
3116 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3117 else
3118 fprintf (dumpfile, " Register %d now in %d.\n\n",
3119 i, reg_renumber[i]);
3120 }
3121 }
3122
3123 return something_changed;
3124}
3125\f
3126/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3127
3128static void
3129scan_paradoxical_subregs (x)
3130 register rtx x;
3131{
3132 register int i;
3133 register char *fmt;
3134 register enum rtx_code code = GET_CODE (x);
3135
3136 switch (code)
3137 {
3138 case CONST_INT:
3139 case CONST:
3140 case SYMBOL_REF:
3141 case LABEL_REF:
3142 case CONST_DOUBLE:
3143 case CC0:
3144 case PC:
3145 case REG:
3146 case USE:
3147 case CLOBBER:
3148 return;
3149
3150 case SUBREG:
3151 if (GET_CODE (SUBREG_REG (x)) == REG
3152 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3153 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3154 = GET_MODE_SIZE (GET_MODE (x));
3155 return;
3156 }
3157
3158 fmt = GET_RTX_FORMAT (code);
3159 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3160 {
3161 if (fmt[i] == 'e')
3162 scan_paradoxical_subregs (XEXP (x, i));
3163 else if (fmt[i] == 'E')
3164 {
3165 register int j;
3166 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3167 scan_paradoxical_subregs (XVECEXP (x, i, j));
3168 }
3169 }
3170}
3171\f
3172struct hard_reg_n_uses { int regno; int uses; };
3173
3174static int
3175hard_reg_use_compare (p1, p2)
3176 struct hard_reg_n_uses *p1, *p2;
3177{
3178 int tem = p1->uses - p2->uses;
3179 if (tem != 0) return tem;
3180 /* If regs are equally good, sort by regno,
3181 so that the results of qsort leave nothing to chance. */
3182 return p1->regno - p2->regno;
3183}
3184
3185/* Choose the order to consider regs for use as reload registers
3186 based on how much trouble would be caused by spilling one.
3187 Store them in order of decreasing preference in potential_reload_regs. */
3188
3189static void
3190order_regs_for_reload ()
3191{
3192 register int i;
3193 register int o = 0;
3194 int large = 0;
3195
3196 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3197
3198 CLEAR_HARD_REG_SET (bad_spill_regs);
3199
3200 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3201 potential_reload_regs[i] = -1;
3202
3203 /* Count number of uses of each hard reg by pseudo regs allocated to it
3204 and then order them by decreasing use. */
3205
3206 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3207 {
3208 hard_reg_n_uses[i].uses = 0;
3209 hard_reg_n_uses[i].regno = i;
3210 }
3211
3212 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3213 {
3214 int regno = reg_renumber[i];
3215 if (regno >= 0)
3216 {
3217 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3218 while (regno < lim)
3219 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3220 }
3221 large += reg_n_refs[i];
3222 }
3223
3224 /* Now fixed registers (which cannot safely be used for reloading)
3225 get a very high use count so they will be considered least desirable.
3226 Registers used explicitly in the rtl code are almost as bad. */
3227
3228 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3229 {
3230 if (fixed_regs[i])
3231 {
3232 hard_reg_n_uses[i].uses += 2 * large + 2;
3233 SET_HARD_REG_BIT (bad_spill_regs, i);
3234 }
3235 else if (regs_explicitly_used[i])
3236 {
3237 hard_reg_n_uses[i].uses += large + 1;
3238 /* ??? We are doing this here because of the potential that
3239 bad code may be generated if a register explicitly used in
3240 an insn was used as a spill register for that insn. But
3241 not using these are spill registers may lose on some machine.
3242 We'll have to see how this works out. */
3243 SET_HARD_REG_BIT (bad_spill_regs, i);
3244 }
3245 }
3246 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3247 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3248
3249#ifdef ELIMINABLE_REGS
3250 /* If registers other than the frame pointer are eliminable, mark them as
3251 poor choices. */
3252 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3253 {
3254 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3255 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3256 }
3257#endif
3258
3259 /* Prefer registers not so far used, for use in temporary loading.
3260 Among them, if REG_ALLOC_ORDER is defined, use that order.
3261 Otherwise, prefer registers not preserved by calls. */
3262
3263#ifdef REG_ALLOC_ORDER
3264 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3265 {
3266 int regno = reg_alloc_order[i];
3267
3268 if (hard_reg_n_uses[regno].uses == 0)
3269 potential_reload_regs[o++] = regno;
3270 }
3271#else
3272 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3273 {
3274 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3275 potential_reload_regs[o++] = i;
3276 }
3277 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3278 {
3279 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3280 potential_reload_regs[o++] = i;
3281 }
3282#endif
3283
3284 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3285 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3286
3287 /* Now add the regs that are already used,
3288 preferring those used less often. The fixed and otherwise forbidden
3289 registers will be at the end of this list. */
3290
3291 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3292 if (hard_reg_n_uses[i].uses != 0)
3293 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3294}
3295\f
3296/* Reload pseudo-registers into hard regs around each insn as needed.
3297 Additional register load insns are output before the insn that needs it
3298 and perhaps store insns after insns that modify the reloaded pseudo reg.
3299
3300 reg_last_reload_reg and reg_reloaded_contents keep track of
3301 which pseudo-registers are already available in reload registers.
3302 We update these for the reloads that we perform,
3303 as the insns are scanned. */
3304
3305static void
3306reload_as_needed (first, live_known)
3307 rtx first;
3308 int live_known;
3309{
3310 register rtx insn;
3311 register int i;
3312 int this_block = 0;
3313 rtx x;
3314 rtx after_call = 0;
3315
3316 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3317 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3318 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3319 reg_has_output_reload = (char *) alloca (max_regno);
3320 for (i = 0; i < n_spills; i++)
3321 {
3322 reg_reloaded_contents[i] = -1;
3323 reg_reloaded_insn[i] = 0;
3324 }
3325
3326 /* Reset all offsets on eliminable registers to their initial values. */
3327#ifdef ELIMINABLE_REGS
3328 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3329 {
3330 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3331 reg_eliminate[i].initial_offset);
32131a9c
RK
3332 reg_eliminate[i].previous_offset
3333 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3334 }
3335#else
3336 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3337 reg_eliminate[0].previous_offset
3338 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3339#endif
3340
3341 num_not_at_initial_offset = 0;
3342
3343 for (insn = first; insn;)
3344 {
3345 register rtx next = NEXT_INSN (insn);
3346
3347 /* Notice when we move to a new basic block. */
aa2c50d6 3348 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3349 && insn == basic_block_head[this_block+1])
3350 ++this_block;
3351
3352 /* If we pass a label, copy the offsets from the label information
3353 into the current offsets of each elimination. */
3354 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3355 {
3356 num_not_at_initial_offset = 0;
3357 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3358 {
3359 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3360 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3361 if (reg_eliminate[i].can_eliminate
3362 && (reg_eliminate[i].offset
3363 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3364 num_not_at_initial_offset++;
3365 }
3366 }
32131a9c
RK
3367
3368 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3369 {
3370 rtx avoid_return_reg = 0;
3371
3372#ifdef SMALL_REGISTER_CLASSES
3373 /* Set avoid_return_reg if this is an insn
3374 that might use the value of a function call. */
3375 if (GET_CODE (insn) == CALL_INSN)
3376 {
3377 if (GET_CODE (PATTERN (insn)) == SET)
3378 after_call = SET_DEST (PATTERN (insn));
3379 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3380 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3381 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3382 else
3383 after_call = 0;
3384 }
3385 else if (after_call != 0
3386 && !(GET_CODE (PATTERN (insn)) == SET
3387 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3388 {
3389 if (reg_mentioned_p (after_call, PATTERN (insn)))
3390 avoid_return_reg = after_call;
3391 after_call = 0;
3392 }
3393#endif /* SMALL_REGISTER_CLASSES */
3394
2758481d
RS
3395 /* If this is a USE and CLOBBER of a MEM, ensure that any
3396 references to eliminable registers have been removed. */
3397
3398 if ((GET_CODE (PATTERN (insn)) == USE
3399 || GET_CODE (PATTERN (insn)) == CLOBBER)
3400 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3401 XEXP (XEXP (PATTERN (insn), 0), 0)
3402 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3403 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3404
32131a9c
RK
3405 /* If we need to do register elimination processing, do so.
3406 This might delete the insn, in which case we are done. */
3407 if (num_eliminable && GET_MODE (insn) == QImode)
3408 {
3409 eliminate_regs_in_insn (insn, 1);
3410 if (GET_CODE (insn) == NOTE)
3411 {
3412 insn = next;
3413 continue;
3414 }
3415 }
3416
3417 if (GET_MODE (insn) == VOIDmode)
3418 n_reloads = 0;
3419 /* First find the pseudo regs that must be reloaded for this insn.
3420 This info is returned in the tables reload_... (see reload.h).
3421 Also modify the body of INSN by substituting RELOAD
3422 rtx's for those pseudo regs. */
3423 else
3424 {
3425 bzero (reg_has_output_reload, max_regno);
3426 CLEAR_HARD_REG_SET (reg_is_output_reload);
3427
3428 find_reloads (insn, 1, spill_indirect_levels, live_known,
3429 spill_reg_order);
3430 }
3431
3432 if (n_reloads > 0)
3433 {
3c3eeea6
RK
3434 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3435 rtx p;
32131a9c
RK
3436 int class;
3437
3438 /* If this block has not had spilling done for a
a8fdc208 3439 particular class, deactivate any optional reloads
32131a9c
RK
3440 of that class lest they try to use a spill-reg which isn't
3441 available here. If we have any non-optionals that need a
3442 spill reg, abort. */
3443
3444 for (class = 0; class < N_REG_CLASSES; class++)
3445 if (basic_block_needs[class] != 0
3446 && basic_block_needs[class][this_block] == 0)
3447 for (i = 0; i < n_reloads; i++)
3448 if (class == (int) reload_reg_class[i])
3449 {
3450 if (reload_optional[i])
b07ef7b9
RK
3451 {
3452 reload_in[i] = reload_out[i] = 0;
3453 reload_secondary_p[i] = 0;
3454 }
3455 else if (reload_reg_rtx[i] == 0
3456 && (reload_in[i] != 0 || reload_out[i] != 0
3457 || reload_secondary_p[i] != 0))
32131a9c
RK
3458 abort ();
3459 }
3460
3461 /* Now compute which reload regs to reload them into. Perhaps
3462 reusing reload regs from previous insns, or else output
3463 load insns to reload them. Maybe output store insns too.
3464 Record the choices of reload reg in reload_reg_rtx. */
3465 choose_reload_regs (insn, avoid_return_reg);
3466
3467 /* Generate the insns to reload operands into or out of
3468 their reload regs. */
3469 emit_reload_insns (insn);
3470
3471 /* Substitute the chosen reload regs from reload_reg_rtx
3472 into the insn's body (or perhaps into the bodies of other
3473 load and store insn that we just made for reloading
3474 and that we moved the structure into). */
3475 subst_reloads ();
3c3eeea6
RK
3476
3477 /* If this was an ASM, make sure that all the reload insns
3478 we have generated are valid. If not, give an error
3479 and delete them. */
3480
3481 if (asm_noperands (PATTERN (insn)) >= 0)
3482 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3483 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3484 && (recog_memoized (p) < 0
3485 || (insn_extract (p),
3486 ! constrain_operands (INSN_CODE (p), 1))))
3487 {
3488 error_for_asm (insn,
3489 "`asm' operand requires impossible reload");
3490 PUT_CODE (p, NOTE);
3491 NOTE_SOURCE_FILE (p) = 0;
3492 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3493 }
32131a9c
RK
3494 }
3495 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3496 is no longer validly lying around to save a future reload.
3497 Note that this does not detect pseudos that were reloaded
3498 for this insn in order to be stored in
3499 (obeying register constraints). That is correct; such reload
3500 registers ARE still valid. */
3501 note_stores (PATTERN (insn), forget_old_reloads_1);
3502
3503 /* There may have been CLOBBER insns placed after INSN. So scan
3504 between INSN and NEXT and use them to forget old reloads. */
3505 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3506 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3507 note_stores (PATTERN (x), forget_old_reloads_1);
3508
3509#ifdef AUTO_INC_DEC
3510 /* Likewise for regs altered by auto-increment in this insn.
3511 But note that the reg-notes are not changed by reloading:
3512 they still contain the pseudo-regs, not the spill regs. */
3513 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3514 if (REG_NOTE_KIND (x) == REG_INC)
3515 {
3516 /* See if this pseudo reg was reloaded in this insn.
3517 If so, its last-reload info is still valid
3518 because it is based on this insn's reload. */
3519 for (i = 0; i < n_reloads; i++)
3520 if (reload_out[i] == XEXP (x, 0))
3521 break;
3522
3523 if (i != n_reloads)
3524 forget_old_reloads_1 (XEXP (x, 0));
3525 }
3526#endif
3527 }
3528 /* A reload reg's contents are unknown after a label. */
3529 if (GET_CODE (insn) == CODE_LABEL)
3530 for (i = 0; i < n_spills; i++)
3531 {
3532 reg_reloaded_contents[i] = -1;
3533 reg_reloaded_insn[i] = 0;
3534 }
3535
3536 /* Don't assume a reload reg is still good after a call insn
3537 if it is a call-used reg. */
3538 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == CALL_INSN)
3539 for (i = 0; i < n_spills; i++)
3540 if (call_used_regs[spill_regs[i]])
3541 {
3542 reg_reloaded_contents[i] = -1;
3543 reg_reloaded_insn[i] = 0;
3544 }
3545
3546 /* In case registers overlap, allow certain insns to invalidate
3547 particular hard registers. */
3548
3549#ifdef INSN_CLOBBERS_REGNO_P
3550 for (i = 0 ; i < n_spills ; i++)
3551 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3552 {
3553 reg_reloaded_contents[i] = -1;
3554 reg_reloaded_insn[i] = 0;
3555 }
3556#endif
3557
3558 insn = next;
3559
3560#ifdef USE_C_ALLOCA
3561 alloca (0);
3562#endif
3563 }
3564}
3565
3566/* Discard all record of any value reloaded from X,
3567 or reloaded in X from someplace else;
3568 unless X is an output reload reg of the current insn.
3569
3570 X may be a hard reg (the reload reg)
3571 or it may be a pseudo reg that was reloaded from. */
3572
3573static void
3574forget_old_reloads_1 (x)
3575 rtx x;
3576{
3577 register int regno;
3578 int nr;
0a2e51a9
RS
3579 int offset = 0;
3580
3581 /* note_stores does give us subregs of hard regs. */
3582 while (GET_CODE (x) == SUBREG)
3583 {
3584 offset += SUBREG_WORD (x);
3585 x = SUBREG_REG (x);
3586 }
32131a9c
RK
3587
3588 if (GET_CODE (x) != REG)
3589 return;
3590
0a2e51a9 3591 regno = REGNO (x) + offset;
32131a9c
RK
3592
3593 if (regno >= FIRST_PSEUDO_REGISTER)
3594 nr = 1;
3595 else
3596 {
3597 int i;
3598 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3599 /* Storing into a spilled-reg invalidates its contents.
3600 This can happen if a block-local pseudo is allocated to that reg
3601 and it wasn't spilled because this block's total need is 0.
3602 Then some insn might have an optional reload and use this reg. */
3603 for (i = 0; i < nr; i++)
3604 if (spill_reg_order[regno + i] >= 0
3605 /* But don't do this if the reg actually serves as an output
3606 reload reg in the current instruction. */
3607 && (n_reloads == 0
3608 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3609 {
3610 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3611 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3612 }
3613 }
3614
3615 /* Since value of X has changed,
3616 forget any value previously copied from it. */
3617
3618 while (nr-- > 0)
3619 /* But don't forget a copy if this is the output reload
3620 that establishes the copy's validity. */
3621 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3622 reg_last_reload_reg[regno + nr] = 0;
3623}
3624\f
3625/* For each reload, the mode of the reload register. */
3626static enum machine_mode reload_mode[MAX_RELOADS];
3627
3628/* For each reload, the largest number of registers it will require. */
3629static int reload_nregs[MAX_RELOADS];
3630
3631/* Comparison function for qsort to decide which of two reloads
3632 should be handled first. *P1 and *P2 are the reload numbers. */
3633
3634static int
3635reload_reg_class_lower (p1, p2)
3636 short *p1, *p2;
3637{
3638 register int r1 = *p1, r2 = *p2;
3639 register int t;
a8fdc208 3640
32131a9c
RK
3641 /* Consider required reloads before optional ones. */
3642 t = reload_optional[r1] - reload_optional[r2];
3643 if (t != 0)
3644 return t;
3645
3646 /* Count all solitary classes before non-solitary ones. */
3647 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3648 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3649 if (t != 0)
3650 return t;
3651
3652 /* Aside from solitaires, consider all multi-reg groups first. */
3653 t = reload_nregs[r2] - reload_nregs[r1];
3654 if (t != 0)
3655 return t;
3656
3657 /* Consider reloads in order of increasing reg-class number. */
3658 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3659 if (t != 0)
3660 return t;
3661
3662 /* If reloads are equally urgent, sort by reload number,
3663 so that the results of qsort leave nothing to chance. */
3664 return r1 - r2;
3665}
3666\f
3667/* The following HARD_REG_SETs indicate when each hard register is
3668 used for a reload of various parts of the current insn. */
3669
3670/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3671static HARD_REG_SET reload_reg_used;
3672/* If reg is in use for a RELOAD_FOR_INPUT_RELOAD_ADDRESS reload. */
3673static HARD_REG_SET reload_reg_used_in_input_addr;
3674/* If reg is in use for a RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reload. */
3675static HARD_REG_SET reload_reg_used_in_output_addr;
3676/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3677static HARD_REG_SET reload_reg_used_in_op_addr;
3678/* If reg is in use for a RELOAD_FOR_INPUT reload. */
3679static HARD_REG_SET reload_reg_used_in_input;
3680/* If reg is in use for a RELOAD_FOR_OUTPUT reload. */
3681static HARD_REG_SET reload_reg_used_in_output;
3682
3683/* If reg is in use as a reload reg for any sort of reload. */
3684static HARD_REG_SET reload_reg_used_at_all;
3685
3686/* Mark reg REGNO as in use for a reload of the sort spec'd by WHEN_NEEDED.
3687 MODE is used to indicate how many consecutive regs are actually used. */
3688
3689static void
3690mark_reload_reg_in_use (regno, when_needed, mode)
3691 int regno;
3692 enum reload_when_needed when_needed;
3693 enum machine_mode mode;
3694{
3695 int nregs = HARD_REGNO_NREGS (regno, mode);
3696 int i;
3697
3698 for (i = regno; i < nregs + regno; i++)
3699 {
3700 switch (when_needed)
3701 {
3702 case RELOAD_OTHER:
3703 SET_HARD_REG_BIT (reload_reg_used, i);
3704 break;
3705
3706 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3707 SET_HARD_REG_BIT (reload_reg_used_in_input_addr, i);
3708 break;
3709
3710 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3711 SET_HARD_REG_BIT (reload_reg_used_in_output_addr, i);
3712 break;
3713
3714 case RELOAD_FOR_OPERAND_ADDRESS:
3715 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3716 break;
3717
3718 case RELOAD_FOR_INPUT:
3719 SET_HARD_REG_BIT (reload_reg_used_in_input, i);
3720 break;
3721
3722 case RELOAD_FOR_OUTPUT:
3723 SET_HARD_REG_BIT (reload_reg_used_in_output, i);
3724 break;
3725 }
3726
3727 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3728 }
3729}
3730
3731/* 1 if reg REGNO is free as a reload reg for a reload of the sort
3732 specified by WHEN_NEEDED. */
3733
3734static int
3735reload_reg_free_p (regno, when_needed)
3736 int regno;
3737 enum reload_when_needed when_needed;
3738{
3739 /* In use for a RELOAD_OTHER means it's not available for anything. */
3740 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
3741 return 0;
3742 switch (when_needed)
3743 {
3744 case RELOAD_OTHER:
3745 /* In use for anything means not available for a RELOAD_OTHER. */
3746 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
3747
3748 /* The other kinds of use can sometimes share a register. */
3749 case RELOAD_FOR_INPUT:
3750 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3751 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3752 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno));
3753 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3754 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno)
3755 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno));
3756 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3757 return (! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3758 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3759 case RELOAD_FOR_OPERAND_ADDRESS:
3760 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3761 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3762 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3763 case RELOAD_FOR_OUTPUT:
3764 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3765 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3766 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3767 }
3768 abort ();
3769}
3770
3771/* Return 1 if the value in reload reg REGNO, as used by a reload
3772 needed for the part of the insn specified by WHEN_NEEDED,
3773 is not in use for a reload in any prior part of the insn.
3774
3775 We can assume that the reload reg was already tested for availability
3776 at the time it is needed, and we should not check this again,
3777 in case the reg has already been marked in use. */
3778
3779static int
3780reload_reg_free_before_p (regno, when_needed)
3781 int regno;
3782 enum reload_when_needed when_needed;
3783{
3784 switch (when_needed)
3785 {
3786 case RELOAD_OTHER:
3787 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3788 its use starts from the beginning, so nothing can use it earlier. */
3789 return 1;
3790
3791 /* If this use is for part of the insn,
3792 check the reg is not in use for any prior part. */
3793 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3794 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
3795 return 0;
3796 case RELOAD_FOR_OUTPUT:
3797 if (TEST_HARD_REG_BIT (reload_reg_used_in_input, regno))
3798 return 0;
3799 case RELOAD_FOR_OPERAND_ADDRESS:
3800 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno))
3801 return 0;
3802 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3803 case RELOAD_FOR_INPUT:
3804 return 1;
3805 }
3806 abort ();
3807}
3808
3809/* Return 1 if the value in reload reg REGNO, as used by a reload
3810 needed for the part of the insn specified by WHEN_NEEDED,
3811 is still available in REGNO at the end of the insn.
3812
3813 We can assume that the reload reg was already tested for availability
3814 at the time it is needed, and we should not check this again,
3815 in case the reg has already been marked in use. */
3816
3817static int
3818reload_reg_reaches_end_p (regno, when_needed)
3819 int regno;
3820 enum reload_when_needed when_needed;
3821{
3822 switch (when_needed)
3823 {
3824 case RELOAD_OTHER:
3825 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3826 its value must reach the end. */
3827 return 1;
3828
3829 /* If this use is for part of the insn,
3830 its value reaches if no subsequent part uses the same register. */
3831 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3832 case RELOAD_FOR_INPUT:
3833 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3834 || TEST_HARD_REG_BIT (reload_reg_used_in_output, regno))
3835 return 0;
3836 case RELOAD_FOR_OPERAND_ADDRESS:
3837 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno))
3838 return 0;
3839 case RELOAD_FOR_OUTPUT:
3840 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3841 return 1;
3842 }
3843 abort ();
3844}
3845\f
3846/* Vector of reload-numbers showing the order in which the reloads should
3847 be processed. */
3848short reload_order[MAX_RELOADS];
3849
3850/* Indexed by reload number, 1 if incoming value
3851 inherited from previous insns. */
3852char reload_inherited[MAX_RELOADS];
3853
3854/* For an inherited reload, this is the insn the reload was inherited from,
3855 if we know it. Otherwise, this is 0. */
3856rtx reload_inheritance_insn[MAX_RELOADS];
3857
3858/* If non-zero, this is a place to get the value of the reload,
3859 rather than using reload_in. */
3860rtx reload_override_in[MAX_RELOADS];
3861
3862/* For each reload, the index in spill_regs of the spill register used,
3863 or -1 if we did not need one of the spill registers for this reload. */
3864int reload_spill_index[MAX_RELOADS];
3865
3866/* Index of last register assigned as a spill register. We allocate in
3867 a round-robin fashio. */
3868
3869static last_spill_reg = 0;
3870
3871/* Find a spill register to use as a reload register for reload R.
3872 LAST_RELOAD is non-zero if this is the last reload for the insn being
3873 processed.
3874
3875 Set reload_reg_rtx[R] to the register allocated.
3876
3877 If NOERROR is nonzero, we return 1 if successful,
3878 or 0 if we couldn't find a spill reg and we didn't change anything. */
3879
3880static int
3881allocate_reload_reg (r, insn, last_reload, noerror)
3882 int r;
3883 rtx insn;
3884 int last_reload;
3885 int noerror;
3886{
3887 int i;
3888 int pass;
3889 int count;
3890 rtx new;
3891 int regno;
3892
3893 /* If we put this reload ahead, thinking it is a group,
3894 then insist on finding a group. Otherwise we can grab a
a8fdc208 3895 reg that some other reload needs.
32131a9c
RK
3896 (That can happen when we have a 68000 DATA_OR_FP_REG
3897 which is a group of data regs or one fp reg.)
3898 We need not be so restrictive if there are no more reloads
3899 for this insn.
3900
3901 ??? Really it would be nicer to have smarter handling
3902 for that kind of reg class, where a problem like this is normal.
3903 Perhaps those classes should be avoided for reloading
3904 by use of more alternatives. */
3905
3906 int force_group = reload_nregs[r] > 1 && ! last_reload;
3907
3908 /* If we want a single register and haven't yet found one,
3909 take any reg in the right class and not in use.
3910 If we want a consecutive group, here is where we look for it.
3911
3912 We use two passes so we can first look for reload regs to
3913 reuse, which are already in use for other reloads in this insn,
3914 and only then use additional registers.
3915 I think that maximizing reuse is needed to make sure we don't
3916 run out of reload regs. Suppose we have three reloads, and
3917 reloads A and B can share regs. These need two regs.
3918 Suppose A and B are given different regs.
3919 That leaves none for C. */
3920 for (pass = 0; pass < 2; pass++)
3921 {
3922 /* I is the index in spill_regs.
3923 We advance it round-robin between insns to use all spill regs
3924 equally, so that inherited reloads have a chance
3925 of leapfrogging each other. */
3926
3927 for (count = 0, i = last_spill_reg; count < n_spills; count++)
3928 {
3929 int class = (int) reload_reg_class[r];
3930
3931 i = (i + 1) % n_spills;
3932
3933 if (reload_reg_free_p (spill_regs[i], reload_when_needed[r])
3934 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
3935 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
3936 /* Look first for regs to share, then for unshared. */
3937 && (pass || TEST_HARD_REG_BIT (reload_reg_used_at_all,
3938 spill_regs[i])))
3939 {
3940 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
3941 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
3942 (on 68000) got us two FP regs. If NR is 1,
3943 we would reject both of them. */
3944 if (force_group)
3945 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
3946 /* If we need only one reg, we have already won. */
3947 if (nr == 1)
3948 {
3949 /* But reject a single reg if we demand a group. */
3950 if (force_group)
3951 continue;
3952 break;
3953 }
3954 /* Otherwise check that as many consecutive regs as we need
3955 are available here.
3956 Also, don't use for a group registers that are
3957 needed for nongroups. */
3958 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
3959 while (nr > 1)
3960 {
3961 regno = spill_regs[i] + nr - 1;
3962 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
3963 && spill_reg_order[regno] >= 0
3964 && reload_reg_free_p (regno, reload_when_needed[r])
3965 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
3966 regno)))
3967 break;
3968 nr--;
3969 }
3970 if (nr == 1)
3971 break;
3972 }
3973 }
3974
3975 /* If we found something on pass 1, omit pass 2. */
3976 if (count < n_spills)
3977 break;
3978 }
3979
3980 /* We should have found a spill register by now. */
3981 if (count == n_spills)
3982 {
3983 if (noerror)
3984 return 0;
3985 abort ();
3986 }
3987
3988 last_spill_reg = i;
3989
3990 /* Mark as in use for this insn the reload regs we use for this. */
3991 mark_reload_reg_in_use (spill_regs[i], reload_when_needed[r],
3992 reload_mode[r]);
3993
3994 new = spill_reg_rtx[i];
3995
3996 if (new == 0 || GET_MODE (new) != reload_mode[r])
3997 spill_reg_rtx[i] = new = gen_rtx (REG, reload_mode[r], spill_regs[i]);
3998
3999 reload_reg_rtx[r] = new;
4000 reload_spill_index[r] = i;
4001 regno = true_regnum (new);
4002
4003 /* Detect when the reload reg can't hold the reload mode.
4004 This used to be one `if', but Sequent compiler can't handle that. */
4005 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4006 {
4007 enum machine_mode test_mode = VOIDmode;
4008 if (reload_in[r])
4009 test_mode = GET_MODE (reload_in[r]);
4010 /* If reload_in[r] has VOIDmode, it means we will load it
4011 in whatever mode the reload reg has: to wit, reload_mode[r].
4012 We have already tested that for validity. */
4013 /* Aside from that, we need to test that the expressions
4014 to reload from or into have modes which are valid for this
4015 reload register. Otherwise the reload insns would be invalid. */
4016 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4017 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4018 if (! (reload_out[r] != 0
4019 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4020 /* The reg is OK. */
4021 return 1;
4022 }
4023
4024 /* The reg is not OK. */
4025 if (noerror)
4026 return 0;
4027
4028 if (asm_noperands (PATTERN (insn)) < 0)
4029 /* It's the compiler's fault. */
4030 abort ();
4031
4032 /* It's the user's fault; the operand's mode and constraint
4033 don't match. Disable this reload so we don't crash in final. */
4034 error_for_asm (insn,
4035 "`asm' operand constraint incompatible with operand size");
4036 reload_in[r] = 0;
4037 reload_out[r] = 0;
4038 reload_reg_rtx[r] = 0;
4039 reload_optional[r] = 1;
4040 reload_secondary_p[r] = 1;
4041
4042 return 1;
4043}
4044\f
4045/* Assign hard reg targets for the pseudo-registers we must reload
4046 into hard regs for this insn.
4047 Also output the instructions to copy them in and out of the hard regs.
4048
4049 For machines with register classes, we are responsible for
4050 finding a reload reg in the proper class. */
4051
4052static void
4053choose_reload_regs (insn, avoid_return_reg)
4054 rtx insn;
4055 /* This argument is currently ignored. */
4056 rtx avoid_return_reg;
4057{
4058 register int i, j;
4059 int max_group_size = 1;
4060 enum reg_class group_class = NO_REGS;
4061 int inheritance;
4062
4063 rtx save_reload_reg_rtx[MAX_RELOADS];
4064 char save_reload_inherited[MAX_RELOADS];
4065 rtx save_reload_inheritance_insn[MAX_RELOADS];
4066 rtx save_reload_override_in[MAX_RELOADS];
4067 int save_reload_spill_index[MAX_RELOADS];
4068 HARD_REG_SET save_reload_reg_used;
4069 HARD_REG_SET save_reload_reg_used_in_input_addr;
4070 HARD_REG_SET save_reload_reg_used_in_output_addr;
4071 HARD_REG_SET save_reload_reg_used_in_op_addr;
4072 HARD_REG_SET save_reload_reg_used_in_input;
4073 HARD_REG_SET save_reload_reg_used_in_output;
4074 HARD_REG_SET save_reload_reg_used_at_all;
4075
4076 bzero (reload_inherited, MAX_RELOADS);
4077 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4078 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4079
4080 CLEAR_HARD_REG_SET (reload_reg_used);
4081 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4082 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr);
4083 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr);
4084 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4085 CLEAR_HARD_REG_SET (reload_reg_used_in_output);
4086 CLEAR_HARD_REG_SET (reload_reg_used_in_input);
4087
4088 /* Distinguish output-only and input-only reloads
4089 because they can overlap with other things. */
4090 for (j = 0; j < n_reloads; j++)
4091 if (reload_when_needed[j] == RELOAD_OTHER
4092 && ! reload_needed_for_multiple[j])
4093 {
4094 if (reload_in[j] == 0)
4095 {
4096 /* But earlyclobber operands must stay as RELOAD_OTHER. */
4097 for (i = 0; i < n_earlyclobbers; i++)
4098 if (rtx_equal_p (reload_out[j], reload_earlyclobbers[i]))
4099 break;
4100 if (i == n_earlyclobbers)
4101 reload_when_needed[j] = RELOAD_FOR_OUTPUT;
4102 }
4103 if (reload_out[j] == 0)
4104 reload_when_needed[j] = RELOAD_FOR_INPUT;
4105
4106 if (reload_secondary_reload[j] >= 0
4107 && ! reload_needed_for_multiple[reload_secondary_reload[j]])
4108 reload_when_needed[reload_secondary_reload[j]]
4109 = reload_when_needed[j];
4110 }
4111
4112#ifdef SMALL_REGISTER_CLASSES
4113 /* Don't bother with avoiding the return reg
4114 if we have no mandatory reload that could use it. */
4115 if (avoid_return_reg)
4116 {
4117 int do_avoid = 0;
4118 int regno = REGNO (avoid_return_reg);
4119 int nregs
4120 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4121 int r;
4122
4123 for (r = regno; r < regno + nregs; r++)
4124 if (spill_reg_order[r] >= 0)
4125 for (j = 0; j < n_reloads; j++)
4126 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4127 && (reload_in[j] != 0 || reload_out[j] != 0
4128 || reload_secondary_p[j])
4129 &&
4130 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4131 do_avoid = 1;
4132 if (!do_avoid)
4133 avoid_return_reg = 0;
4134 }
4135#endif /* SMALL_REGISTER_CLASSES */
4136
4137#if 0 /* Not needed, now that we can always retry without inheritance. */
4138 /* See if we have more mandatory reloads than spill regs.
4139 If so, then we cannot risk optimizations that could prevent
a8fdc208 4140 reloads from sharing one spill register.
32131a9c
RK
4141
4142 Since we will try finding a better register than reload_reg_rtx
4143 unless it is equal to reload_in or reload_out, count such reloads. */
4144
4145 {
4146 int tem = 0;
4147#ifdef SMALL_REGISTER_CLASSES
4148 int tem = (avoid_return_reg != 0);
a8fdc208 4149#endif
32131a9c
RK
4150 for (j = 0; j < n_reloads; j++)
4151 if (! reload_optional[j]
4152 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4153 && (reload_reg_rtx[j] == 0
4154 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4155 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4156 tem++;
4157 if (tem > n_spills)
4158 must_reuse = 1;
4159 }
4160#endif
4161
4162#ifdef SMALL_REGISTER_CLASSES
4163 /* Don't use the subroutine call return reg for a reload
4164 if we are supposed to avoid it. */
4165 if (avoid_return_reg)
4166 {
4167 int regno = REGNO (avoid_return_reg);
4168 int nregs
4169 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4170 int r;
4171
4172 for (r = regno; r < regno + nregs; r++)
4173 if (spill_reg_order[r] >= 0)
4174 SET_HARD_REG_BIT (reload_reg_used, r);
4175 }
4176#endif /* SMALL_REGISTER_CLASSES */
4177
4178 /* In order to be certain of getting the registers we need,
4179 we must sort the reloads into order of increasing register class.
4180 Then our grabbing of reload registers will parallel the process
a8fdc208 4181 that provided the reload registers.
32131a9c
RK
4182
4183 Also note whether any of the reloads wants a consecutive group of regs.
4184 If so, record the maximum size of the group desired and what
4185 register class contains all the groups needed by this insn. */
4186
4187 for (j = 0; j < n_reloads; j++)
4188 {
4189 reload_order[j] = j;
4190 reload_spill_index[j] = -1;
4191
4192 reload_mode[j]
4193 = (reload_strict_low[j] && reload_out[j]
4194 ? GET_MODE (SUBREG_REG (reload_out[j]))
4195 : (reload_inmode[j] == VOIDmode
4196 || (GET_MODE_SIZE (reload_outmode[j])
4197 > GET_MODE_SIZE (reload_inmode[j])))
4198 ? reload_outmode[j] : reload_inmode[j]);
4199
4200 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4201
4202 if (reload_nregs[j] > 1)
4203 {
4204 max_group_size = MAX (reload_nregs[j], max_group_size);
4205 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4206 }
4207
4208 /* If we have already decided to use a certain register,
4209 don't use it in another way. */
4210 if (reload_reg_rtx[j])
4211 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]),
4212 reload_when_needed[j], reload_mode[j]);
4213 }
4214
4215 if (n_reloads > 1)
4216 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4217
4218 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4219 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4220 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4221 sizeof reload_inheritance_insn);
4222 bcopy (reload_override_in, save_reload_override_in,
4223 sizeof reload_override_in);
4224 bcopy (reload_spill_index, save_reload_spill_index,
4225 sizeof reload_spill_index);
4226 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4227 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4228 COPY_HARD_REG_SET (save_reload_reg_used_in_output,
4229 reload_reg_used_in_output);
4230 COPY_HARD_REG_SET (save_reload_reg_used_in_input,
4231 reload_reg_used_in_input);
4232 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr,
4233 reload_reg_used_in_input_addr);
4234 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr,
4235 reload_reg_used_in_output_addr);
4236 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4237 reload_reg_used_in_op_addr);
4238
4239 /* Try first with inheritance, then turning it off. */
4240
4241 for (inheritance = 1; inheritance >= 0; inheritance--)
4242 {
4243 /* Process the reloads in order of preference just found.
4244 Beyond this point, subregs can be found in reload_reg_rtx.
4245
4246 This used to look for an existing reloaded home for all
4247 of the reloads, and only then perform any new reloads.
4248 But that could lose if the reloads were done out of reg-class order
4249 because a later reload with a looser constraint might have an old
4250 home in a register needed by an earlier reload with a tighter constraint.
4251
4252 To solve this, we make two passes over the reloads, in the order
4253 described above. In the first pass we try to inherit a reload
4254 from a previous insn. If there is a later reload that needs a
4255 class that is a proper subset of the class being processed, we must
4256 also allocate a spill register during the first pass.
4257
4258 Then make a second pass over the reloads to allocate any reloads
4259 that haven't been given registers yet. */
4260
4261 for (j = 0; j < n_reloads; j++)
4262 {
4263 register int r = reload_order[j];
4264
4265 /* Ignore reloads that got marked inoperative. */
4266 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4267 continue;
4268
4269 /* If find_reloads chose a to use reload_in or reload_out as a reload
4270 register, we don't need to chose one. Otherwise, try even if it found
4271 one since we might save an insn if we find the value lying around. */
4272 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4273 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4274 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4275 continue;
4276
4277#if 0 /* No longer needed for correct operation.
4278 It might give better code, or might not; worth an experiment? */
4279 /* If this is an optional reload, we can't inherit from earlier insns
4280 until we are sure that any non-optional reloads have been allocated.
4281 The following code takes advantage of the fact that optional reloads
4282 are at the end of reload_order. */
4283 if (reload_optional[r] != 0)
4284 for (i = 0; i < j; i++)
4285 if ((reload_out[reload_order[i]] != 0
4286 || reload_in[reload_order[i]] != 0
4287 || reload_secondary_p[reload_order[i]])
4288 && ! reload_optional[reload_order[i]]
4289 && reload_reg_rtx[reload_order[i]] == 0)
4290 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4291#endif
4292
4293 /* First see if this pseudo is already available as reloaded
4294 for a previous insn. We cannot try to inherit for reloads
4295 that are smaller than the maximum number of registers needed
4296 for groups unless the register we would allocate cannot be used
4297 for the groups.
4298
4299 We could check here to see if this is a secondary reload for
4300 an object that is already in a register of the desired class.
4301 This would avoid the need for the secondary reload register.
4302 But this is complex because we can't easily determine what
4303 objects might want to be loaded via this reload. So let a register
4304 be allocated here. In `emit_reload_insns' we suppress one of the
4305 loads in the case described above. */
4306
4307 if (inheritance)
4308 {
4309 register int regno = -1;
4310
4311 if (reload_in[r] == 0)
4312 ;
4313 else if (GET_CODE (reload_in[r]) == REG)
4314 regno = REGNO (reload_in[r]);
4315 else if (GET_CODE (reload_in_reg[r]) == REG)
4316 regno = REGNO (reload_in_reg[r]);
4317#if 0
4318 /* This won't work, since REGNO can be a pseudo reg number.
4319 Also, it takes much more hair to keep track of all the things
4320 that can invalidate an inherited reload of part of a pseudoreg. */
4321 else if (GET_CODE (reload_in[r]) == SUBREG
4322 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4323 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4324#endif
4325
4326 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4327 {
4328 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4329
4330 if (reg_reloaded_contents[i] == regno
4331 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4332 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4333 spill_regs[i])
4334 && (reload_nregs[r] == max_group_size
4335 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4336 spill_regs[i]))
4337 && reload_reg_free_p (spill_regs[i], reload_when_needed[r])
4338 && reload_reg_free_before_p (spill_regs[i],
4339 reload_when_needed[r]))
4340 {
4341 /* If a group is needed, verify that all the subsequent
4342 registers still have their values intact. */
4343 int nr
4344 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4345 int k;
4346
4347 for (k = 1; k < nr; k++)
4348 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4349 != regno)
4350 break;
4351
4352 if (k == nr)
4353 {
4354 /* Mark the register as in use for this part of
4355 the insn. */
4356 mark_reload_reg_in_use (spill_regs[i],
4357 reload_when_needed[r],
4358 reload_mode[r]);
4359 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4360 reload_inherited[r] = 1;
4361 reload_inheritance_insn[r] = reg_reloaded_insn[i];
4362 reload_spill_index[r] = i;
4363 }
4364 }
4365 }
4366 }
4367
4368 /* Here's another way to see if the value is already lying around. */
4369 if (inheritance
4370 && reload_in[r] != 0
4371 && ! reload_inherited[r]
4372 && reload_out[r] == 0
4373 && (CONSTANT_P (reload_in[r])
4374 || GET_CODE (reload_in[r]) == PLUS
4375 || GET_CODE (reload_in[r]) == REG
4376 || GET_CODE (reload_in[r]) == MEM)
4377 && (reload_nregs[r] == max_group_size
4378 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4379 {
4380 register rtx equiv
4381 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 4382 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
4383 int regno;
4384
4385 if (equiv != 0)
4386 {
4387 if (GET_CODE (equiv) == REG)
4388 regno = REGNO (equiv);
4389 else if (GET_CODE (equiv) == SUBREG)
4390 {
4391 regno = REGNO (SUBREG_REG (equiv));
4392 if (regno < FIRST_PSEUDO_REGISTER)
4393 regno += SUBREG_WORD (equiv);
4394 }
4395 else
4396 abort ();
4397 }
4398
4399 /* If we found a spill reg, reject it unless it is free
4400 and of the desired class. */
4401 if (equiv != 0
4402 && ((spill_reg_order[regno] >= 0
4403 && ! reload_reg_free_before_p (regno,
4404 reload_when_needed[r]))
4405 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4406 regno)))
4407 equiv = 0;
4408
4409 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4410 equiv = 0;
4411
4412 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4413 equiv = 0;
4414
4415 /* We found a register that contains the value we need.
4416 If this register is the same as an `earlyclobber' operand
4417 of the current insn, just mark it as a place to reload from
4418 since we can't use it as the reload register itself. */
4419
4420 if (equiv != 0)
4421 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
4422 if (reg_overlap_mentioned_for_reload_p (equiv,
4423 reload_earlyclobbers[i]))
32131a9c
RK
4424 {
4425 reload_override_in[r] = equiv;
4426 equiv = 0;
4427 break;
4428 }
4429
4430 /* JRV: If the equiv register we have found is explicitly
4431 clobbered in the current insn, mark but don't use, as above. */
4432
4433 if (equiv != 0 && regno_clobbered_p (regno, insn))
4434 {
4435 reload_override_in[r] = equiv;
4436 equiv = 0;
4437 }
4438
4439 /* If we found an equivalent reg, say no code need be generated
4440 to load it, and use it as our reload reg. */
4441 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4442 {
4443 reload_reg_rtx[r] = equiv;
4444 reload_inherited[r] = 1;
4445 /* If it is a spill reg,
4446 mark the spill reg as in use for this insn. */
4447 i = spill_reg_order[regno];
4448 if (i >= 0)
4449 mark_reload_reg_in_use (regno, reload_when_needed[r],
4450 reload_mode[r]);
4451 }
4452 }
4453
4454 /* If we found a register to use already, or if this is an optional
4455 reload, we are done. */
4456 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
4457 continue;
4458
4459#if 0 /* No longer needed for correct operation. Might or might not
4460 give better code on the average. Want to experiment? */
4461
4462 /* See if there is a later reload that has a class different from our
4463 class that intersects our class or that requires less register
4464 than our reload. If so, we must allocate a register to this
4465 reload now, since that reload might inherit a previous reload
4466 and take the only available register in our class. Don't do this
4467 for optional reloads since they will force all previous reloads
4468 to be allocated. Also don't do this for reloads that have been
4469 turned off. */
4470
4471 for (i = j + 1; i < n_reloads; i++)
4472 {
4473 int s = reload_order[i];
4474
d45cf215
RS
4475 if ((reload_in[s] == 0 && reload_out[s] == 0
4476 && ! reload_secondary_p[s])
32131a9c
RK
4477 || reload_optional[s])
4478 continue;
4479
4480 if ((reload_reg_class[s] != reload_reg_class[r]
4481 && reg_classes_intersect_p (reload_reg_class[r],
4482 reload_reg_class[s]))
4483 || reload_nregs[s] < reload_nregs[r])
4484 break;
4485 }
4486
4487 if (i == n_reloads)
4488 continue;
4489
4490 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
4491#endif
4492 }
4493
4494 /* Now allocate reload registers for anything non-optional that
4495 didn't get one yet. */
4496 for (j = 0; j < n_reloads; j++)
4497 {
4498 register int r = reload_order[j];
4499
4500 /* Ignore reloads that got marked inoperative. */
4501 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4502 continue;
4503
4504 /* Skip reloads that already have a register allocated or are
4505 optional. */
4506 if (reload_reg_rtx[r] != 0 || reload_optional[r])
4507 continue;
4508
4509 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
4510 break;
4511 }
4512
4513 /* If that loop got all the way, we have won. */
4514 if (j == n_reloads)
4515 break;
4516
4517 fail:
4518 /* Loop around and try without any inheritance. */
4519 /* First undo everything done by the failed attempt
4520 to allocate with inheritance. */
4521 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
4522 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
4523 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
4524 sizeof reload_inheritance_insn);
4525 bcopy (save_reload_override_in, reload_override_in,
4526 sizeof reload_override_in);
4527 bcopy (save_reload_spill_index, reload_spill_index,
4528 sizeof reload_spill_index);
4529 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
4530 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
4531 COPY_HARD_REG_SET (reload_reg_used_in_input,
4532 save_reload_reg_used_in_input);
4533 COPY_HARD_REG_SET (reload_reg_used_in_output,
4534 save_reload_reg_used_in_output);
4535 COPY_HARD_REG_SET (reload_reg_used_in_input_addr,
4536 save_reload_reg_used_in_input_addr);
4537 COPY_HARD_REG_SET (reload_reg_used_in_output_addr,
4538 save_reload_reg_used_in_output_addr);
4539 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
4540 save_reload_reg_used_in_op_addr);
4541 }
4542
4543 /* If we thought we could inherit a reload, because it seemed that
4544 nothing else wanted the same reload register earlier in the insn,
4545 verify that assumption, now that all reloads have been assigned. */
4546
4547 for (j = 0; j < n_reloads; j++)
4548 {
4549 register int r = reload_order[j];
4550
4551 if (reload_inherited[r] && reload_reg_rtx[r] != 0
4552 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
4553 reload_when_needed[r]))
4554 reload_inherited[r] = 0;
4555
4556 /* If we found a better place to reload from,
4557 validate it in the same fashion, if it is a reload reg. */
4558 if (reload_override_in[r]
4559 && (GET_CODE (reload_override_in[r]) == REG
4560 || GET_CODE (reload_override_in[r]) == SUBREG))
4561 {
4562 int regno = true_regnum (reload_override_in[r]);
4563 if (spill_reg_order[regno] >= 0
4564 && ! reload_reg_free_before_p (regno, reload_when_needed[r]))
4565 reload_override_in[r] = 0;
4566 }
4567 }
4568
4569 /* Now that reload_override_in is known valid,
4570 actually override reload_in. */
4571 for (j = 0; j < n_reloads; j++)
4572 if (reload_override_in[j])
4573 reload_in[j] = reload_override_in[j];
4574
4575 /* If this reload won't be done because it has been cancelled or is
4576 optional and not inherited, clear reload_reg_rtx so other
4577 routines (such as subst_reloads) don't get confused. */
4578 for (j = 0; j < n_reloads; j++)
4579 if ((reload_optional[j] && ! reload_inherited[j])
4580 || (reload_in[j] == 0 && reload_out[j] == 0
4581 && ! reload_secondary_p[j]))
4582 reload_reg_rtx[j] = 0;
4583
4584 /* Record which pseudos and which spill regs have output reloads. */
4585 for (j = 0; j < n_reloads; j++)
4586 {
4587 register int r = reload_order[j];
4588
4589 i = reload_spill_index[r];
4590
4591 /* I is nonneg if this reload used one of the spill regs.
4592 If reload_reg_rtx[r] is 0, this is an optional reload
4593 that we opted to ignore. */
4594 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
4595 && reload_reg_rtx[r] != 0)
4596 {
4597 register int nregno = REGNO (reload_out[r]);
4598 int nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
4599
4600 while (--nr >= 0)
4601 {
4602 reg_has_output_reload[nregno + nr] = 1;
4603 if (i >= 0)
4604 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
4605 }
4606
4607 if (reload_when_needed[r] != RELOAD_OTHER
4608 && reload_when_needed[r] != RELOAD_FOR_OUTPUT)
4609 abort ();
4610 }
4611 }
4612}
4613\f
4614/* Output insns to reload values in and out of the chosen reload regs. */
4615
4616static void
4617emit_reload_insns (insn)
4618 rtx insn;
4619{
4620 register int j;
4621 rtx following_insn = NEXT_INSN (insn);
a8efe40d 4622 rtx before_insn = insn;
32131a9c
RK
4623 rtx first_output_reload_insn = NEXT_INSN (insn);
4624 rtx first_other_reload_insn = insn;
4625 rtx first_operand_address_reload_insn = insn;
4626 int special;
4627 /* Values to be put in spill_reg_store are put here first. */
4628 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
4629
d45cf215 4630 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
4631 must go in front of the first USE insn, not in front of INSN. */
4632
4633 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
4634 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
4635 while (GET_CODE (PREV_INSN (before_insn)) == INSN
4636 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
4637 first_other_reload_insn = first_operand_address_reload_insn
4638 = before_insn = PREV_INSN (before_insn);
4639
32131a9c
RK
4640 /* Now output the instructions to copy the data into and out of the
4641 reload registers. Do these in the order that the reloads were reported,
4642 since reloads of base and index registers precede reloads of operands
4643 and the operands may need the base and index registers reloaded. */
4644
4645 for (j = 0; j < n_reloads; j++)
4646 {
4647 register rtx old;
4648 rtx oldequiv_reg = 0;
4649 rtx this_reload_insn = 0;
4650 rtx store_insn = 0;
4651
4652 old = reload_in[j];
4653 if (old != 0 && ! reload_inherited[j]
4654 && ! rtx_equal_p (reload_reg_rtx[j], old)
4655 && reload_reg_rtx[j] != 0)
4656 {
4657 register rtx reloadreg = reload_reg_rtx[j];
4658 rtx oldequiv = 0;
4659 enum machine_mode mode;
4660 rtx where;
d445b551 4661 rtx reload_insn;
32131a9c
RK
4662
4663 /* Determine the mode to reload in.
4664 This is very tricky because we have three to choose from.
4665 There is the mode the insn operand wants (reload_inmode[J]).
4666 There is the mode of the reload register RELOADREG.
4667 There is the intrinsic mode of the operand, which we could find
4668 by stripping some SUBREGs.
4669 It turns out that RELOADREG's mode is irrelevant:
4670 we can change that arbitrarily.
4671
4672 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
4673 then the reload reg may not support QImode moves, so use SImode.
4674 If foo is in memory due to spilling a pseudo reg, this is safe,
4675 because the QImode value is in the least significant part of a
4676 slot big enough for a SImode. If foo is some other sort of
4677 memory reference, then it is impossible to reload this case,
4678 so previous passes had better make sure this never happens.
4679
4680 Then consider a one-word union which has SImode and one of its
4681 members is a float, being fetched as (SUBREG:SF union:SI).
4682 We must fetch that as SFmode because we could be loading into
4683 a float-only register. In this case OLD's mode is correct.
4684
4685 Consider an immediate integer: it has VOIDmode. Here we need
4686 to get a mode from something else.
4687
4688 In some cases, there is a fourth mode, the operand's
4689 containing mode. If the insn specifies a containing mode for
4690 this operand, it overrides all others.
4691
4692 I am not sure whether the algorithm here is always right,
4693 but it does the right things in those cases. */
4694
4695 mode = GET_MODE (old);
4696 if (mode == VOIDmode)
4697 mode = reload_inmode[j];
4698 if (reload_strict_low[j])
4699 mode = GET_MODE (SUBREG_REG (reload_in[j]));
4700
4701#ifdef SECONDARY_INPUT_RELOAD_CLASS
4702 /* If we need a secondary register for this operation, see if
4703 the value is already in a register in that class. Don't
4704 do this if the secondary register will be used as a scratch
4705 register. */
4706
4707 if (reload_secondary_reload[j] >= 0
4708 && reload_secondary_icode[j] == CODE_FOR_nothing)
4709 oldequiv
4710 = find_equiv_reg (old, insn,
4711 reload_reg_class[reload_secondary_reload[j]],
fb3821f7 4712 -1, NULL_PTR, 0, mode);
32131a9c
RK
4713#endif
4714
4715 /* If reloading from memory, see if there is a register
4716 that already holds the same value. If so, reload from there.
4717 We can pass 0 as the reload_reg_p argument because
4718 any other reload has either already been emitted,
4719 in which case find_equiv_reg will see the reload-insn,
4720 or has yet to be emitted, in which case it doesn't matter
4721 because we will use this equiv reg right away. */
4722
4723 if (oldequiv == 0
4724 && (GET_CODE (old) == MEM
4725 || (GET_CODE (old) == REG
4726 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4727 && reg_renumber[REGNO (old)] < 0)))
4728 oldequiv = find_equiv_reg (old, insn, GENERAL_REGS,
fb3821f7 4729 -1, NULL_PTR, 0, mode);
32131a9c
RK
4730
4731 if (oldequiv)
4732 {
4733 int regno = true_regnum (oldequiv);
4734
4735 /* If OLDEQUIV is a spill register, don't use it for this
4736 if any other reload needs it at an earlier stage of this insn
a8fdc208 4737 or at this stage. */
32131a9c
RK
4738 if (spill_reg_order[regno] >= 0
4739 && (! reload_reg_free_p (regno, reload_when_needed[j])
4740 || ! reload_reg_free_before_p (regno,
4741 reload_when_needed[j])))
4742 oldequiv = 0;
4743
4744 /* If OLDEQUIV is not a spill register,
4745 don't use it if any other reload wants it. */
4746 if (spill_reg_order[regno] < 0)
4747 {
4748 int k;
4749 for (k = 0; k < n_reloads; k++)
4750 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
4751 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
4752 oldequiv))
32131a9c
RK
4753 {
4754 oldequiv = 0;
4755 break;
4756 }
4757 }
4758 }
4759
4760 if (oldequiv == 0)
4761 oldequiv = old;
4762 else if (GET_CODE (oldequiv) == REG)
4763 oldequiv_reg = oldequiv;
4764 else if (GET_CODE (oldequiv) == SUBREG)
4765 oldequiv_reg = SUBREG_REG (oldequiv);
4766
4767 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
4768 then load RELOADREG from OLDEQUIV. */
4769
4770 if (GET_MODE (reloadreg) != mode)
4771 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
4772 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
4773 oldequiv = SUBREG_REG (oldequiv);
4774 if (GET_MODE (oldequiv) != VOIDmode
4775 && mode != GET_MODE (oldequiv))
4776 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
4777
4778 /* Decide where to put reload insn for this reload. */
4779 switch (reload_when_needed[j])
4780 {
4781 case RELOAD_FOR_INPUT:
4782 case RELOAD_OTHER:
4783 where = first_operand_address_reload_insn;
4784 break;
4785 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
4786 where = first_other_reload_insn;
4787 break;
4788 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
4789 where = first_output_reload_insn;
4790 break;
4791 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4792 where = before_insn;
32131a9c
RK
4793 }
4794
4795 special = 0;
4796
4797 /* Auto-increment addresses must be reloaded in a special way. */
4798 if (GET_CODE (oldequiv) == POST_INC
4799 || GET_CODE (oldequiv) == POST_DEC
4800 || GET_CODE (oldequiv) == PRE_INC
4801 || GET_CODE (oldequiv) == PRE_DEC)
4802 {
4803 /* We are not going to bother supporting the case where a
4804 incremented register can't be copied directly from
4805 OLDEQUIV since this seems highly unlikely. */
4806 if (reload_secondary_reload[j] >= 0)
4807 abort ();
4808 /* Prevent normal processing of this reload. */
4809 special = 1;
4810 /* Output a special code sequence for this case. */
4811 this_reload_insn
4812 = inc_for_reload (reloadreg, oldequiv, reload_inc[j], where);
4813 }
4814
4815 /* If we are reloading a pseudo-register that was set by the previous
4816 insn, see if we can get rid of that pseudo-register entirely
4817 by redirecting the previous insn into our reload register. */
4818
4819 else if (optimize && GET_CODE (old) == REG
4820 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4821 && dead_or_set_p (insn, old)
4822 /* This is unsafe if some other reload
4823 uses the same reg first. */
4824 && (reload_when_needed[j] == RELOAD_OTHER
4825 || reload_when_needed[j] == RELOAD_FOR_INPUT
4826 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS))
4827 {
4828 rtx temp = PREV_INSN (insn);
4829 while (temp && GET_CODE (temp) == NOTE)
4830 temp = PREV_INSN (temp);
4831 if (temp
4832 && GET_CODE (temp) == INSN
4833 && GET_CODE (PATTERN (temp)) == SET
4834 && SET_DEST (PATTERN (temp)) == old
4835 /* Make sure we can access insn_operand_constraint. */
4836 && asm_noperands (PATTERN (temp)) < 0
4837 /* This is unsafe if prev insn rejects our reload reg. */
4838 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
4839 reloadreg)
4840 /* This is unsafe if operand occurs more than once in current
4841 insn. Perhaps some occurrences aren't reloaded. */
4842 && count_occurrences (PATTERN (insn), old) == 1
4843 /* Don't risk splitting a matching pair of operands. */
4844 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
4845 {
4846 /* Store into the reload register instead of the pseudo. */
4847 SET_DEST (PATTERN (temp)) = reloadreg;
4848 /* If these are the only uses of the pseudo reg,
4849 pretend for GDB it lives in the reload reg we used. */
4850 if (reg_n_deaths[REGNO (old)] == 1
4851 && reg_n_sets[REGNO (old)] == 1)
4852 {
4853 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
4854 alter_reg (REGNO (old), -1);
4855 }
4856 special = 1;
4857 }
4858 }
4859
4860 /* We can't do that, so output an insn to load RELOADREG.
4861 Keep them in the following order:
4862 all reloads for input reload addresses,
4863 all reloads for ordinary input operands,
4864 all reloads for addresses of non-reloaded operands,
4865 the insn being reloaded,
4866 all reloads for addresses of output reloads,
4867 the output reloads. */
4868 if (! special)
4869 {
4870#ifdef SECONDARY_INPUT_RELOAD_CLASS
4871 rtx second_reload_reg = 0;
4872 enum insn_code icode;
4873
4874 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
4875 and icode, if any. If OLDEQUIV and OLD are different or
4876 if this is an in-out reload, recompute whether or not we
4877 still need a secondary register and what the icode should
4878 be. If we still need a secondary register and the class or
4879 icode is different, go back to reloading from OLD if using
4880 OLDEQUIV means that we got the wrong type of register. We
4881 cannot have different class or icode due to an in-out reload
4882 because we don't make such reloads when both the input and
4883 output need secondary reload registers. */
32131a9c
RK
4884
4885 if (reload_secondary_reload[j] >= 0)
4886 {
4887 int secondary_reload = reload_secondary_reload[j];
1554c2c6
RK
4888 rtx real_oldequiv = oldequiv;
4889 rtx real_old = old;
4890
4891 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
4892 and similarly for OLD.
4893 See comments in find_secondary_reload in reload.c. */
4894 if (GET_CODE (oldequiv) == REG
4895 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
4896 && reg_equiv_mem[REGNO (oldequiv)] != 0)
4897 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
4898
4899 if (GET_CODE (old) == REG
4900 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4901 && reg_equiv_mem[REGNO (old)] != 0)
4902 real_old = reg_equiv_mem[REGNO (old)];
4903
32131a9c
RK
4904 second_reload_reg = reload_reg_rtx[secondary_reload];
4905 icode = reload_secondary_icode[j];
4906
d445b551
RK
4907 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
4908 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
4909 {
4910 enum reg_class new_class
4911 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 4912 mode, real_oldequiv);
32131a9c
RK
4913
4914 if (new_class == NO_REGS)
4915 second_reload_reg = 0;
4916 else
4917 {
4918 enum insn_code new_icode;
4919 enum machine_mode new_mode;
4920
4921 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
4922 REGNO (second_reload_reg)))
1554c2c6 4923 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
4924 else
4925 {
4926 new_icode = reload_in_optab[(int) mode];
4927 if (new_icode != CODE_FOR_nothing
4928 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 4929 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 4930 (reloadreg, mode)))
a8fdc208
RS
4931 || (insn_operand_predicate[(int) new_icode][1]
4932 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 4933 (real_oldequiv, mode)))))
32131a9c
RK
4934 new_icode = CODE_FOR_nothing;
4935
4936 if (new_icode == CODE_FOR_nothing)
4937 new_mode = mode;
4938 else
4939 new_mode = insn_operand_mode[new_icode][2];
4940
4941 if (GET_MODE (second_reload_reg) != new_mode)
4942 {
4943 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
4944 new_mode))
1554c2c6 4945 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
4946 else
4947 second_reload_reg
3aaa90c7
MM
4948 = gen_rtx (REG, new_mode,
4949 REGNO (second_reload_reg));
32131a9c
RK
4950 }
4951 }
4952 }
4953 }
4954
4955 /* If we still need a secondary reload register, check
4956 to see if it is being used as a scratch or intermediate
1554c2c6
RK
4957 register and generate code appropriately. If we need
4958 a scratch register, use REAL_OLDEQUIV since the form of
4959 the insn may depend on the actual address if it is
4960 a MEM. */
32131a9c
RK
4961
4962 if (second_reload_reg)
4963 {
4964 if (icode != CODE_FOR_nothing)
4965 {
d445b551 4966 reload_insn = emit_insn_before (GEN_FCN (icode)
1554c2c6
RK
4967 (reloadreg,
4968 real_oldequiv,
d445b551
RK
4969 second_reload_reg),
4970 where);
4971 if (this_reload_insn == 0)
4972 this_reload_insn = reload_insn;
32131a9c
RK
4973 special = 1;
4974 }
4975 else
4976 {
4977 /* See if we need a scratch register to load the
4978 intermediate register (a tertiary reload). */
4979 enum insn_code tertiary_icode
4980 = reload_secondary_icode[secondary_reload];
4981
4982 if (tertiary_icode != CODE_FOR_nothing)
4983 {
4984 rtx third_reload_reg
4985 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
4986
d445b551
RK
4987 reload_insn
4988 = emit_insn_before ((GEN_FCN (tertiary_icode)
4989 (second_reload_reg,
1554c2c6 4990 real_oldequiv,
d445b551
RK
4991 third_reload_reg)),
4992 where);
4993 if (this_reload_insn == 0)
4994 this_reload_insn = reload_insn;
32131a9c
RK
4995 }
4996 else
4997 {
d445b551
RK
4998 reload_insn
4999 = gen_input_reload (second_reload_reg,
fe751ebf 5000 oldequiv, where);
d445b551
RK
5001 if (this_reload_insn == 0)
5002 this_reload_insn = reload_insn;
32131a9c
RK
5003 oldequiv = second_reload_reg;
5004 }
5005 }
5006 }
5007 }
5008#endif
5009
5010 if (! special)
d445b551 5011 {
3c3eeea6 5012 reload_insn = gen_input_reload (reloadreg, oldequiv, where);
d445b551
RK
5013 if (this_reload_insn == 0)
5014 this_reload_insn = reload_insn;
5015 }
32131a9c
RK
5016
5017#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5018 /* We may have to make a REG_DEAD note for the secondary reload
5019 register in the insns we just made. Find the last insn that
5020 mentioned the register. */
5021 if (! special && second_reload_reg
5022 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5023 {
5024 rtx prev;
5025
5026 for (prev = where;
5027 prev != PREV_INSN (this_reload_insn);
5028 prev = PREV_INSN (prev))
5029 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5030 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5031 PATTERN (prev)))
32131a9c
RK
5032 {
5033 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5034 second_reload_reg,
5035 REG_NOTES (prev));
5036 break;
5037 }
5038 }
5039#endif
5040 }
5041
5042 /* Update where to put other reload insns. */
5043 if (this_reload_insn)
5044 switch (reload_when_needed[j])
5045 {
5046 case RELOAD_FOR_INPUT:
5047 case RELOAD_OTHER:
5048 if (first_other_reload_insn == first_operand_address_reload_insn)
5049 first_other_reload_insn = this_reload_insn;
5050 break;
5051 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 5052 if (first_operand_address_reload_insn == before_insn)
32131a9c 5053 first_operand_address_reload_insn = this_reload_insn;
a8efe40d 5054 if (first_other_reload_insn == before_insn)
32131a9c
RK
5055 first_other_reload_insn = this_reload_insn;
5056 }
5057
5058 /* reload_inc[j] was formerly processed here. */
5059 }
5060
5061 /* Add a note saying the input reload reg
5062 dies in this insn, if anyone cares. */
5063#ifdef PRESERVE_DEATH_INFO_REGNO_P
5064 if (old != 0
5065 && reload_reg_rtx[j] != old
5066 && reload_reg_rtx[j] != 0
5067 && reload_out[j] == 0
5068 && ! reload_inherited[j]
5069 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5070 {
5071 register rtx reloadreg = reload_reg_rtx[j];
5072
a8fdc208 5073#if 0
32131a9c
RK
5074 /* We can't abort here because we need to support this for sched.c.
5075 It's not terrible to miss a REG_DEAD note, but we should try
5076 to figure out how to do this correctly. */
5077 /* The code below is incorrect for address-only reloads. */
5078 if (reload_when_needed[j] != RELOAD_OTHER
5079 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5080 abort ();
5081#endif
5082
5083 /* Add a death note to this insn, for an input reload. */
5084
5085 if ((reload_when_needed[j] == RELOAD_OTHER
5086 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5087 && ! dead_or_set_p (insn, reloadreg))
5088 REG_NOTES (insn)
5089 = gen_rtx (EXPR_LIST, REG_DEAD,
5090 reloadreg, REG_NOTES (insn));
5091 }
5092
5093 /* When we inherit a reload, the last marked death of the reload reg
5094 may no longer really be a death. */
5095 if (reload_reg_rtx[j] != 0
5096 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5097 && reload_inherited[j])
5098 {
5099 /* Handle inheriting an output reload.
5100 Remove the death note from the output reload insn. */
5101 if (reload_spill_index[j] >= 0
5102 && GET_CODE (reload_in[j]) == REG
5103 && spill_reg_store[reload_spill_index[j]] != 0
5104 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5105 REG_DEAD, REGNO (reload_reg_rtx[j])))
5106 remove_death (REGNO (reload_reg_rtx[j]),
5107 spill_reg_store[reload_spill_index[j]]);
5108 /* Likewise for input reloads that were inherited. */
5109 else if (reload_spill_index[j] >= 0
5110 && GET_CODE (reload_in[j]) == REG
5111 && spill_reg_store[reload_spill_index[j]] == 0
5112 && reload_inheritance_insn[j] != 0
a8fdc208 5113 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5114 REGNO (reload_reg_rtx[j])))
5115 remove_death (REGNO (reload_reg_rtx[j]),
5116 reload_inheritance_insn[j]);
5117 else
5118 {
5119 rtx prev;
5120
5121 /* We got this register from find_equiv_reg.
5122 Search back for its last death note and get rid of it.
5123 But don't search back too far.
5124 Don't go past a place where this reg is set,
5125 since a death note before that remains valid. */
5126 for (prev = PREV_INSN (insn);
5127 prev && GET_CODE (prev) != CODE_LABEL;
5128 prev = PREV_INSN (prev))
5129 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5130 && dead_or_set_p (prev, reload_reg_rtx[j]))
5131 {
5132 if (find_regno_note (prev, REG_DEAD,
5133 REGNO (reload_reg_rtx[j])))
5134 remove_death (REGNO (reload_reg_rtx[j]), prev);
5135 break;
5136 }
5137 }
5138 }
5139
5140 /* We might have used find_equiv_reg above to choose an alternate
5141 place from which to reload. If so, and it died, we need to remove
5142 that death and move it to one of the insns we just made. */
5143
5144 if (oldequiv_reg != 0
5145 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5146 {
5147 rtx prev, prev1;
5148
5149 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5150 prev = PREV_INSN (prev))
5151 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5152 && dead_or_set_p (prev, oldequiv_reg))
5153 {
5154 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5155 {
5156 for (prev1 = this_reload_insn;
5157 prev1; prev1 = PREV_INSN (prev1))
5158 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5159 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5160 PATTERN (prev1)))
32131a9c
RK
5161 {
5162 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5163 oldequiv_reg,
5164 REG_NOTES (prev1));
5165 break;
5166 }
5167 remove_death (REGNO (oldequiv_reg), prev);
5168 }
5169 break;
5170 }
5171 }
5172#endif
5173
5174 /* If we are reloading a register that was recently stored in with an
5175 output-reload, see if we can prove there was
5176 actually no need to store the old value in it. */
5177
5178 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5179 /* This is unsafe if some other reload uses the same reg first. */
5180 && (reload_when_needed[j] == RELOAD_OTHER
5181 || reload_when_needed[j] == RELOAD_FOR_INPUT
5182 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS)
5183 && GET_CODE (reload_in[j]) == REG
5184#if 0
5185 /* There doesn't seem to be any reason to restrict this to pseudos
5186 and doing so loses in the case where we are copying from a
5187 register of the wrong class. */
5188 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5189#endif
5190 && spill_reg_store[reload_spill_index[j]] != 0
5191 && dead_or_set_p (insn, reload_in[j])
5192 /* This is unsafe if operand occurs more than once in current
5193 insn. Perhaps some occurrences weren't reloaded. */
5194 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5195 delete_output_reload (insn, j,
5196 spill_reg_store[reload_spill_index[j]]);
5197
5198 /* Input-reloading is done. Now do output-reloading,
5199 storing the value from the reload-register after the main insn
5200 if reload_out[j] is nonzero.
5201
5202 ??? At some point we need to support handling output reloads of
5203 JUMP_INSNs or insns that set cc0. */
5204 old = reload_out[j];
5205 if (old != 0
5206 && reload_reg_rtx[j] != old
5207 && reload_reg_rtx[j] != 0)
5208 {
5209 register rtx reloadreg = reload_reg_rtx[j];
5210 register rtx second_reloadreg = 0;
5211 rtx prev_insn = PREV_INSN (first_output_reload_insn);
5212 rtx note, p;
5213 enum machine_mode mode;
5214 int special = 0;
5215
5216 /* An output operand that dies right away does need a reload,
5217 but need not be copied from it. Show the new location in the
5218 REG_UNUSED note. */
5219 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5220 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5221 {
5222 XEXP (note, 0) = reload_reg_rtx[j];
5223 continue;
5224 }
5225 else if (GET_CODE (old) == SCRATCH)
5226 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5227 but we don't want to make an output reload. */
5228 continue;
5229
5230#if 0
5231 /* Strip off of OLD any size-increasing SUBREGs such as
5232 (SUBREG:SI foo:QI 0). */
5233
5234 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5235 && (GET_MODE_SIZE (GET_MODE (old))
5236 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5237 old = SUBREG_REG (old);
5238#endif
5239
5240 /* If is a JUMP_INSN, we can't support output reloads yet. */
5241 if (GET_CODE (insn) == JUMP_INSN)
5242 abort ();
5243
5244 /* Determine the mode to reload in.
5245 See comments above (for input reloading). */
5246
5247 mode = GET_MODE (old);
5248 if (mode == VOIDmode)
5249 abort (); /* Should never happen for an output. */
5250
5251 /* A strict-low-part output operand needs to be reloaded
5252 in the mode of the entire value. */
5253 if (reload_strict_low[j])
5254 {
5255 mode = GET_MODE (SUBREG_REG (reload_out[j]));
5256 /* Encapsulate OLD into that mode. */
5257 /* If OLD is a subreg, then strip it, since the subreg will
5258 be altered by this very reload. */
5259 while (GET_CODE (old) == SUBREG && GET_MODE (old) != mode)
5260 old = SUBREG_REG (old);
5261 if (GET_MODE (old) != VOIDmode
5262 && mode != GET_MODE (old))
5263 old = gen_rtx (SUBREG, mode, old, 0);
5264 }
5265
5266 if (GET_MODE (reloadreg) != mode)
5267 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5268
5269#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5270
5271 /* If we need two reload regs, set RELOADREG to the intermediate
5272 one, since it will be stored into OUT. We might need a secondary
5273 register only for an input reload, so check again here. */
5274
1554c2c6 5275 if (reload_secondary_reload[j] >= 0)
32131a9c 5276 {
1554c2c6 5277 rtx real_old = old;
32131a9c 5278
1554c2c6
RK
5279 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5280 && reg_equiv_mem[REGNO (old)] != 0)
5281 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 5282
1554c2c6
RK
5283 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5284 mode, real_old)
5285 != NO_REGS))
5286 {
5287 second_reloadreg = reloadreg;
5288 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
32131a9c 5289
1554c2c6
RK
5290 /* See if RELOADREG is to be used as a scratch register
5291 or as an intermediate register. */
5292 if (reload_secondary_icode[j] != CODE_FOR_nothing)
32131a9c 5293 {
1554c2c6
RK
5294 emit_insn_before ((GEN_FCN (reload_secondary_icode[j])
5295 (real_old, second_reloadreg,
5296 reloadreg)),
5297 first_output_reload_insn);
5298 special = 1;
32131a9c
RK
5299 }
5300 else
1554c2c6
RK
5301 {
5302 /* See if we need both a scratch and intermediate reload
5303 register. */
5304 int secondary_reload = reload_secondary_reload[j];
5305 enum insn_code tertiary_icode
5306 = reload_secondary_icode[secondary_reload];
5307 rtx pat;
32131a9c 5308
1554c2c6
RK
5309 if (GET_MODE (reloadreg) != mode)
5310 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5311
5312 if (tertiary_icode != CODE_FOR_nothing)
5313 {
5314 rtx third_reloadreg
5315 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5316 pat = (GEN_FCN (tertiary_icode)
5317 (reloadreg, second_reloadreg, third_reloadreg));
5318 }
5319 else
5320 pat = gen_move_insn (reloadreg, second_reloadreg);
5321
5322 emit_insn_before (pat, first_output_reload_insn);
5323 }
32131a9c
RK
5324 }
5325 }
5326#endif
5327
5328 /* Output the last reload insn. */
5329 if (! special)
0dadecf6
RK
5330 {
5331#ifdef SECONDARY_MEMORY_NEEDED
5332 /* If we need a memory location to do the move, do it that way. */
5333 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
5334 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
5335 REGNO_REG_CLASS (REGNO (reloadreg)),
5336 GET_MODE (reloadreg)))
5337 {
5338 /* Get the memory to use and rewrite both registers to
5339 its mode. */
5340 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg));
5341
5342 if (GET_MODE (loc) != GET_MODE (reloadreg))
5343 reloadreg = gen_rtx (REG, GET_MODE (loc),
5344 REGNO (reloadreg));
5345
5346 if (GET_MODE (loc) != GET_MODE (old))
5347 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
5348
5349 emit_insn_before (gen_move_insn (loc, reloadreg),
5350 first_output_reload_insn);
5351 emit_insn_before (gen_move_insn (old, loc),
5352 first_output_reload_insn);
5353 }
5354 else
5355#endif
5356 emit_insn_before (gen_move_insn (old, reloadreg),
5357 first_output_reload_insn);
5358 }
32131a9c
RK
5359
5360#ifdef PRESERVE_DEATH_INFO_REGNO_P
5361 /* If final will look at death notes for this reg,
5362 put one on the last output-reload insn to use it. Similarly
5363 for any secondary register. */
5364 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
5365 for (p = PREV_INSN (first_output_reload_insn);
5366 p != prev_insn; p = PREV_INSN (p))
5367 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5368 && reg_overlap_mentioned_for_reload_p (reloadreg,
5369 PATTERN (p)))
32131a9c
RK
5370 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5371 reloadreg, REG_NOTES (p));
5372
5373#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5374 if (! special
5375 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
5376 for (p = PREV_INSN (first_output_reload_insn);
5377 p != prev_insn; p = PREV_INSN (p))
5378 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5379 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
5380 PATTERN (p)))
32131a9c
RK
5381 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5382 second_reloadreg, REG_NOTES (p));
5383#endif
5384#endif
5385 /* Look at all insns we emitted, just to be safe. */
5386 for (p = NEXT_INSN (prev_insn); p != first_output_reload_insn;
5387 p = NEXT_INSN (p))
5388 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
5389 {
5390 /* If this output reload doesn't come from a spill reg,
5391 clear any memory of reloaded copies of the pseudo reg.
5392 If this output reload comes from a spill reg,
5393 reg_has_output_reload will make this do nothing. */
5394 note_stores (PATTERN (p), forget_old_reloads_1);
5395
5396 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
5397 store_insn = p;
5398 }
5399
5400 first_output_reload_insn = NEXT_INSN (prev_insn);
5401 }
5402
5403 if (reload_spill_index[j] >= 0)
5404 new_spill_reg_store[reload_spill_index[j]] = store_insn;
5405 }
5406
32131a9c
RK
5407 /* Move death notes from INSN
5408 to output-operand-address and output reload insns. */
5409#ifdef PRESERVE_DEATH_INFO_REGNO_P
5410 {
5411 rtx insn1;
5412 /* Loop over those insns, last ones first. */
5413 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
5414 insn1 = PREV_INSN (insn1))
5415 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
5416 {
5417 rtx source = SET_SRC (PATTERN (insn1));
5418 rtx dest = SET_DEST (PATTERN (insn1));
5419
5420 /* The note we will examine next. */
5421 rtx reg_notes = REG_NOTES (insn);
5422 /* The place that pointed to this note. */
5423 rtx *prev_reg_note = &REG_NOTES (insn);
5424
5425 /* If the note is for something used in the source of this
5426 reload insn, or in the output address, move the note. */
5427 while (reg_notes)
5428 {
5429 rtx next_reg_notes = XEXP (reg_notes, 1);
5430 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
5431 && GET_CODE (XEXP (reg_notes, 0)) == REG
5432 && ((GET_CODE (dest) != REG
bfa30b22
RK
5433 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5434 dest))
5435 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5436 source)))
32131a9c
RK
5437 {
5438 *prev_reg_note = next_reg_notes;
5439 XEXP (reg_notes, 1) = REG_NOTES (insn1);
5440 REG_NOTES (insn1) = reg_notes;
5441 }
5442 else
5443 prev_reg_note = &XEXP (reg_notes, 1);
5444
5445 reg_notes = next_reg_notes;
5446 }
5447 }
5448 }
5449#endif
5450
5451 /* For all the spill regs newly reloaded in this instruction,
5452 record what they were reloaded from, so subsequent instructions
d445b551
RK
5453 can inherit the reloads.
5454
5455 Update spill_reg_store for the reloads of this insn.
e9e79d69 5456 Copy the elements that were updated in the loop above. */
32131a9c
RK
5457
5458 for (j = 0; j < n_reloads; j++)
5459 {
5460 register int r = reload_order[j];
5461 register int i = reload_spill_index[r];
5462
5463 /* I is nonneg if this reload used one of the spill regs.
5464 If reload_reg_rtx[r] is 0, this is an optional reload
5465 that we opted to ignore. */
d445b551 5466
32131a9c
RK
5467 if (i >= 0 && reload_reg_rtx[r] != 0)
5468 {
5469 /* First, clear out memory of what used to be in this spill reg.
5470 If consecutive registers are used, clear them all. */
5471 int nr
5472 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
5473 int k;
5474
5475 for (k = 0; k < nr; k++)
5476 {
5477 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
5478 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
5479 }
5480
5481 /* Maybe the spill reg contains a copy of reload_out. */
5482 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5483 {
5484 register int nregno = REGNO (reload_out[r]);
d445b551
RK
5485
5486 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 5487 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 5488
32131a9c
RK
5489 for (k = 0; k < nr; k++)
5490 {
5491 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5492 = nregno;
5493 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
5494 }
5495 }
d445b551 5496
32131a9c
RK
5497 /* Maybe the spill reg contains a copy of reload_in. */
5498 else if (reload_out[r] == 0
5499 && reload_in[r] != 0
5500 && (GET_CODE (reload_in[r]) == REG
5501 || GET_CODE (reload_in_reg[r]) == REG))
5502 {
5503 register int nregno;
5504 if (GET_CODE (reload_in[r]) == REG)
5505 nregno = REGNO (reload_in[r]);
5506 else
5507 nregno = REGNO (reload_in_reg[r]);
5508
5509 /* If there are two separate reloads (one in and one out)
5510 for the same (hard or pseudo) reg,
a8fdc208 5511 leave reg_last_reload_reg set
32131a9c
RK
5512 based on the output reload.
5513 Otherwise, set it from this input reload. */
5514 if (!reg_has_output_reload[nregno]
5515 /* But don't do so if another input reload
5516 will clobber this one's value. */
5517 && reload_reg_reaches_end_p (spill_regs[i],
5518 reload_when_needed[r]))
5519 {
5520 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551
RK
5521
5522 /* Unless we inherited this reload, show we haven't
5523 recently done a store. */
5524 if (! reload_inherited[r])
5525 spill_reg_store[i] = 0;
5526
32131a9c
RK
5527 for (k = 0; k < nr; k++)
5528 {
5529 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5530 = nregno;
5531 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
5532 = insn;
5533 }
5534 }
5535 }
5536 }
5537
5538 /* The following if-statement was #if 0'd in 1.34 (or before...).
5539 It's reenabled in 1.35 because supposedly nothing else
5540 deals with this problem. */
5541
5542 /* If a register gets output-reloaded from a non-spill register,
5543 that invalidates any previous reloaded copy of it.
5544 But forget_old_reloads_1 won't get to see it, because
5545 it thinks only about the original insn. So invalidate it here. */
5546 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5547 {
5548 register int nregno = REGNO (reload_out[r]);
5549 reg_last_reload_reg[nregno] = 0;
5550 }
5551 }
5552}
5553\f
5554/* Emit code before BEFORE_INSN to perform an input reload of IN to RELOADREG.
3c3eeea6 5555 Returns first insn emitted. */
32131a9c
RK
5556
5557rtx
3c3eeea6 5558gen_input_reload (reloadreg, in, before_insn)
32131a9c
RK
5559 rtx reloadreg;
5560 rtx in;
5561 rtx before_insn;
5562{
5563 register rtx prev_insn = PREV_INSN (before_insn);
5564
a8fdc208 5565 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
5566 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
5567 register that didn't get a hard register. In that case we can just
5568 call emit_move_insn.
5569
5570 We can also be asked to reload a PLUS that adds either two registers or
5571 a register and a constant or MEM. This can occur during frame pointer
5572 elimination. That case if handled by trying to emit a single insn
5573 to perform the add. If it is not valid, we use a two insn sequence.
5574
5575 Finally, we could be called to handle an 'o' constraint by putting
5576 an address into a register. In that case, we first try to do this
5577 with a named pattern of "reload_load_address". If no such pattern
5578 exists, we just emit a SET insn and hope for the best (it will normally
5579 be valid on machines that use 'o').
5580
5581 This entire process is made complex because reload will never
5582 process the insns we generate here and so we must ensure that
5583 they will fit their constraints and also by the fact that parts of
5584 IN might be being reloaded separately and replaced with spill registers.
5585 Because of this, we are, in some sense, just guessing the right approach
5586 here. The one listed above seems to work.
5587
5588 ??? At some point, this whole thing needs to be rethought. */
5589
5590 if (GET_CODE (in) == PLUS
5591 && GET_CODE (XEXP (in, 0)) == REG
5592 && (GET_CODE (XEXP (in, 1)) == REG
5593 || CONSTANT_P (XEXP (in, 1))
5594 || GET_CODE (XEXP (in, 1)) == MEM))
5595 {
5596 /* We need to compute the sum of what is either a register and a
5597 constant, a register and memory, or a hard register and a pseudo
5598 register and put it into the reload register. The best possible way
5599 of doing this is if the machine has a three-operand ADD insn that
5600 accepts the required operands.
5601
5602 The simplest approach is to try to generate such an insn and see if it
5603 is recognized and matches its constraints. If so, it can be used.
5604
5605 It might be better not to actually emit the insn unless it is valid,
0009eff2 5606 but we need to pass the insn as an operand to `recog' and
b36d7dd7 5607 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 5608 not valid than to dummy things up. */
a8fdc208 5609
af929c62 5610 rtx op0, op1, tem, insn;
32131a9c 5611 int code;
a8fdc208 5612
af929c62
RK
5613 op0 = find_replacement (&XEXP (in, 0));
5614 op1 = find_replacement (&XEXP (in, 1));
5615
32131a9c
RK
5616 /* Since constraint checking is strict, commutativity won't be
5617 checked, so we need to do that here to avoid spurious failure
5618 if the add instruction is two-address and the second operand
5619 of the add is the same as the reload reg, which is frequently
5620 the case. If the insn would be A = B + A, rearrange it so
5621 it will be A = A + B as constrain_operands expects. */
a8fdc208 5622
32131a9c
RK
5623 if (GET_CODE (XEXP (in, 1)) == REG
5624 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
5625 tem = op0, op0 = op1, op1 = tem;
5626
5627 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
5628 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c
RK
5629
5630 insn = emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in),
5631 before_insn);
5632 code = recog_memoized (insn);
5633
5634 if (code >= 0)
5635 {
5636 insn_extract (insn);
5637 /* We want constrain operands to treat this insn strictly in
5638 its validity determination, i.e., the way it would after reload
5639 has completed. */
5640 if (constrain_operands (code, 1))
5641 return insn;
5642 }
5643
5644 if (PREV_INSN (insn))
5645 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
5646 if (NEXT_INSN (insn))
5647 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
5648
5649 /* If that failed, we must use a conservative two-insn sequence.
5650 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
5651 register since "move" will be able to handle an arbitrary operand,
5652 unlike add which can't, in general. Then add the registers.
32131a9c
RK
5653
5654 If there is another way to do this for a specific machine, a
5655 DEFINE_PEEPHOLE should be specified that recognizes the sequence
5656 we emit below. */
5657
af929c62
RK
5658 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
5659 || (GET_CODE (op1) == REG
5660 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
5661 tem = op0, op0 = op1, op1 = tem;
32131a9c 5662
af929c62
RK
5663 emit_insn_before (gen_move_insn (reloadreg, op0), before_insn);
5664 emit_insn_before (gen_add2_insn (reloadreg, op1), before_insn);
32131a9c
RK
5665 }
5666
0dadecf6
RK
5667#ifdef SECONDARY_MEMORY_NEEDED
5668 /* If we need a memory location to do the move, do it that way. */
5669 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
5670 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
5671 REGNO_REG_CLASS (REGNO (reloadreg)),
5672 GET_MODE (reloadreg)))
5673 {
5674 /* Get the memory to use and rewrite both registers to its mode. */
5675 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg));
5676
5677 if (GET_MODE (loc) != GET_MODE (reloadreg))
5678 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
5679
5680 if (GET_MODE (loc) != GET_MODE (in))
5681 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
5682
0dadecf6 5683 emit_insn_before (gen_move_insn (loc, in), before_insn);
58c8c593 5684 emit_insn_before (gen_move_insn (reloadreg, loc), before_insn);
0dadecf6
RK
5685 }
5686#endif
5687
32131a9c
RK
5688 /* If IN is a simple operand, use gen_move_insn. */
5689 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
3c3eeea6 5690 emit_insn_before (gen_move_insn (reloadreg, in), before_insn);
32131a9c
RK
5691
5692#ifdef HAVE_reload_load_address
5693 else if (HAVE_reload_load_address)
3c3eeea6 5694 emit_insn_before (gen_reload_load_address (reloadreg, in), before_insn);
32131a9c
RK
5695#endif
5696
5697 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
5698 else
3c3eeea6 5699 emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in), before_insn);
32131a9c
RK
5700
5701 /* Return the first insn emitted.
5702 We can not just return PREV_INSN (before_insn), because there may have
5703 been multiple instructions emitted. Also note that gen_move_insn may
5704 emit more than one insn itself, so we can not assume that there is one
5705 insn emitted per emit_insn_before call. */
5706
5707 return NEXT_INSN (prev_insn);
5708}
5709\f
5710/* Delete a previously made output-reload
5711 whose result we now believe is not needed.
5712 First we double-check.
5713
5714 INSN is the insn now being processed.
5715 OUTPUT_RELOAD_INSN is the insn of the output reload.
5716 J is the reload-number for this insn. */
5717
5718static void
5719delete_output_reload (insn, j, output_reload_insn)
5720 rtx insn;
5721 int j;
5722 rtx output_reload_insn;
5723{
5724 register rtx i1;
5725
5726 /* Get the raw pseudo-register referred to. */
5727
5728 rtx reg = reload_in[j];
5729 while (GET_CODE (reg) == SUBREG)
5730 reg = SUBREG_REG (reg);
5731
5732 /* If the pseudo-reg we are reloading is no longer referenced
5733 anywhere between the store into it and here,
5734 and no jumps or labels intervene, then the value can get
5735 here through the reload reg alone.
5736 Otherwise, give up--return. */
5737 for (i1 = NEXT_INSN (output_reload_insn);
5738 i1 != insn; i1 = NEXT_INSN (i1))
5739 {
5740 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
5741 return;
5742 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
5743 && reg_mentioned_p (reg, PATTERN (i1)))
5744 return;
5745 }
5746
5747 /* If this insn will store in the pseudo again,
5748 the previous store can be removed. */
5749 if (reload_out[j] == reload_in[j])
5750 delete_insn (output_reload_insn);
5751
5752 /* See if the pseudo reg has been completely replaced
5753 with reload regs. If so, delete the store insn
5754 and forget we had a stack slot for the pseudo. */
5755 else if (reg_n_deaths[REGNO (reg)] == 1
5756 && reg_basic_block[REGNO (reg)] >= 0
5757 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
5758 {
5759 rtx i2;
5760
5761 /* We know that it was used only between here
5762 and the beginning of the current basic block.
5763 (We also know that the last use before INSN was
5764 the output reload we are thinking of deleting, but never mind that.)
5765 Search that range; see if any ref remains. */
5766 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5767 {
d445b551
RK
5768 rtx set = single_set (i2);
5769
32131a9c
RK
5770 /* Uses which just store in the pseudo don't count,
5771 since if they are the only uses, they are dead. */
d445b551 5772 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5773 continue;
5774 if (GET_CODE (i2) == CODE_LABEL
5775 || GET_CODE (i2) == JUMP_INSN)
5776 break;
5777 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
5778 && reg_mentioned_p (reg, PATTERN (i2)))
5779 /* Some other ref remains;
5780 we can't do anything. */
5781 return;
5782 }
5783
5784 /* Delete the now-dead stores into this pseudo. */
5785 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5786 {
d445b551
RK
5787 rtx set = single_set (i2);
5788
5789 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5790 delete_insn (i2);
5791 if (GET_CODE (i2) == CODE_LABEL
5792 || GET_CODE (i2) == JUMP_INSN)
5793 break;
5794 }
5795
5796 /* For the debugging info,
5797 say the pseudo lives in this reload reg. */
5798 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
5799 alter_reg (REGNO (reg), -1);
5800 }
5801}
5802
5803\f
a8fdc208 5804/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 5805 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
5806 is a register or memory location;
5807 so reloading involves incrementing that location.
5808
5809 INC_AMOUNT is the number to increment or decrement by (always positive).
5810 This cannot be deduced from VALUE.
5811
5812 INSN is the insn before which the new insns should be emitted.
5813
5814 The return value is the first of the insns emitted. */
5815
5816static rtx
5817inc_for_reload (reloadreg, value, inc_amount, insn)
5818 rtx reloadreg;
5819 rtx value;
5820 int inc_amount;
5821 rtx insn;
5822{
5823 /* REG or MEM to be copied and incremented. */
5824 rtx incloc = XEXP (value, 0);
5825 /* Nonzero if increment after copying. */
5826 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
0009eff2
RK
5827 rtx prev = PREV_INSN (insn);
5828 rtx inc;
5829 rtx add_insn;
5830 int code;
32131a9c
RK
5831
5832 /* No hard register is equivalent to this register after
5833 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
5834 we could inc/dec that register as well (maybe even using it for
5835 the source), but I'm not sure it's worth worrying about. */
5836 if (GET_CODE (incloc) == REG)
5837 reg_last_reload_reg[REGNO (incloc)] = 0;
5838
5839 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
5840 inc_amount = - inc_amount;
5841
fb3821f7 5842 inc = GEN_INT (inc_amount);
0009eff2
RK
5843
5844 /* If this is post-increment, first copy the location to the reload reg. */
5845 if (post)
5846 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5847
5848 /* See if we can directly increment INCLOC. Use a method similar to that
5849 in gen_input_reload. */
5850
5851 add_insn = emit_insn_before (gen_rtx (SET, VOIDmode, incloc,
5852 gen_rtx (PLUS, GET_MODE (incloc),
5853 incloc, inc)), insn);
5854
5855 code = recog_memoized (add_insn);
5856 if (code >= 0)
32131a9c 5857 {
0009eff2
RK
5858 insn_extract (add_insn);
5859 if (constrain_operands (code, 1))
32131a9c 5860 {
0009eff2
RK
5861 /* If this is a pre-increment and we have incremented the value
5862 where it lives, copy the incremented value to RELOADREG to
5863 be used as an address. */
5864
5865 if (! post)
5866 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5867 return NEXT_INSN (prev);
32131a9c
RK
5868 }
5869 }
0009eff2
RK
5870
5871 if (PREV_INSN (add_insn))
5872 NEXT_INSN (PREV_INSN (add_insn)) = NEXT_INSN (add_insn);
5873 if (NEXT_INSN (add_insn))
5874 PREV_INSN (NEXT_INSN (add_insn)) = PREV_INSN (add_insn);
5875
5876 /* If couldn't do the increment directly, must increment in RELOADREG.
5877 The way we do this depends on whether this is pre- or post-increment.
5878 For pre-increment, copy INCLOC to the reload register, increment it
5879 there, then save back. */
5880
5881 if (! post)
5882 {
5883 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5884 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5885 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
5886 }
32131a9c
RK
5887 else
5888 {
0009eff2
RK
5889 /* Postincrement.
5890 Because this might be a jump insn or a compare, and because RELOADREG
5891 may not be available after the insn in an input reload, we must do
5892 the incrementation before the insn being reloaded for.
5893
5894 We have already copied INCLOC to RELOADREG. Increment the copy in
5895 RELOADREG, save that back, then decrement RELOADREG so it has
5896 the original value. */
5897
5898 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5899 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
fb3821f7 5900 emit_insn_before (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)),
0009eff2 5901 insn);
32131a9c 5902 }
0009eff2
RK
5903
5904 return NEXT_INSN (prev);
32131a9c
RK
5905}
5906\f
5907/* Return 1 if we are certain that the constraint-string STRING allows
5908 the hard register REG. Return 0 if we can't be sure of this. */
5909
5910static int
5911constraint_accepts_reg_p (string, reg)
5912 char *string;
5913 rtx reg;
5914{
5915 int value = 0;
5916 int regno = true_regnum (reg);
5917 int c;
5918
5919 /* Initialize for first alternative. */
5920 value = 0;
5921 /* Check that each alternative contains `g' or `r'. */
5922 while (1)
5923 switch (c = *string++)
5924 {
5925 case 0:
5926 /* If an alternative lacks `g' or `r', we lose. */
5927 return value;
5928 case ',':
5929 /* If an alternative lacks `g' or `r', we lose. */
5930 if (value == 0)
5931 return 0;
5932 /* Initialize for next alternative. */
5933 value = 0;
5934 break;
5935 case 'g':
5936 case 'r':
5937 /* Any general reg wins for this alternative. */
5938 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
5939 value = 1;
5940 break;
5941 default:
5942 /* Any reg in specified class wins for this alternative. */
5943 {
0009eff2 5944 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 5945
0009eff2 5946 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
5947 value = 1;
5948 }
5949 }
5950}
5951\f
d445b551
RK
5952/* Return the number of places FIND appears within X, but don't count
5953 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
5954
5955static int
5956count_occurrences (x, find)
5957 register rtx x, find;
5958{
5959 register int i, j;
5960 register enum rtx_code code;
5961 register char *format_ptr;
5962 int count;
5963
5964 if (x == find)
5965 return 1;
5966 if (x == 0)
5967 return 0;
5968
5969 code = GET_CODE (x);
5970
5971 switch (code)
5972 {
5973 case REG:
5974 case QUEUED:
5975 case CONST_INT:
5976 case CONST_DOUBLE:
5977 case SYMBOL_REF:
5978 case CODE_LABEL:
5979 case PC:
5980 case CC0:
5981 return 0;
d445b551
RK
5982
5983 case SET:
5984 if (SET_DEST (x) == find)
5985 return count_occurrences (SET_SRC (x), find);
5986 break;
32131a9c
RK
5987 }
5988
5989 format_ptr = GET_RTX_FORMAT (code);
5990 count = 0;
5991
5992 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5993 {
5994 switch (*format_ptr++)
5995 {
5996 case 'e':
5997 count += count_occurrences (XEXP (x, i), find);
5998 break;
5999
6000 case 'E':
6001 if (XVEC (x, i) != NULL)
6002 {
6003 for (j = 0; j < XVECLEN (x, i); j++)
6004 count += count_occurrences (XVECEXP (x, i, j), find);
6005 }
6006 break;
6007 }
6008 }
6009 return count;
6010}
This page took 0.706184 seconds and 5 git commands to generate.