]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
(expand_divmod): Eliminate the generation of branches
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
a8efe40d 2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
ff2da9fc 21#include <stdio.h>
32131a9c
RK
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
32131a9c
RK
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69\f
70/* During reload_as_needed, element N contains a REG rtx for the hard reg
71 into which pseudo reg N has been reloaded (perhaps for a previous insn). */
72static rtx *reg_last_reload_reg;
73
74/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
75 for an output reload that stores into reg N. */
76static char *reg_has_output_reload;
77
78/* Indicates which hard regs are reload-registers for an output reload
79 in the current insn. */
80static HARD_REG_SET reg_is_output_reload;
81
82/* Element N is the constant value to which pseudo reg N is equivalent,
83 or zero if pseudo reg N is not equivalent to a constant.
84 find_reloads looks at this in order to replace pseudo reg N
85 with the constant it stands for. */
86rtx *reg_equiv_constant;
87
88/* Element N is a memory location to which pseudo reg N is equivalent,
89 prior to any register elimination (such as frame pointer to stack
90 pointer). Depending on whether or not it is a valid address, this value
91 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 92rtx *reg_equiv_memory_loc;
32131a9c
RK
93
94/* Element N is the address of stack slot to which pseudo reg N is equivalent.
95 This is used when the address is not valid as a memory address
96 (because its displacement is too big for the machine.) */
97rtx *reg_equiv_address;
98
99/* Element N is the memory slot to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a memory slot. */
101rtx *reg_equiv_mem;
102
103/* Widest width in which each pseudo reg is referred to (via subreg). */
104static int *reg_max_ref_width;
105
106/* Element N is the insn that initialized reg N from its equivalent
107 constant or memory slot. */
108static rtx *reg_equiv_init;
109
110/* During reload_as_needed, element N contains the last pseudo regno
111 reloaded into the Nth reload register. This vector is in parallel
112 with spill_regs. If that pseudo reg occupied more than one register,
113 reg_reloaded_contents points to that pseudo for each spill register in
114 use; all of these must remain set for an inheritance to occur. */
115static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
116
117/* During reload_as_needed, element N contains the insn for which
118 the Nth reload register was last used. This vector is in parallel
119 with spill_regs, and its contents are significant only when
120 reg_reloaded_contents is significant. */
121static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
122
123/* Number of spill-regs so far; number of valid elements of spill_regs. */
124static int n_spills;
125
126/* In parallel with spill_regs, contains REG rtx's for those regs.
127 Holds the last rtx used for any given reg, or 0 if it has never
128 been used for spilling yet. This rtx is reused, provided it has
129 the proper mode. */
130static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
131
132/* In parallel with spill_regs, contains nonzero for a spill reg
133 that was stored after the last time it was used.
134 The precise value is the insn generated to do the store. */
135static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
136
137/* This table is the inverse mapping of spill_regs:
138 indexed by hard reg number,
139 it contains the position of that reg in spill_regs,
140 or -1 for something that is not in spill_regs. */
141static short spill_reg_order[FIRST_PSEUDO_REGISTER];
142
143/* This reg set indicates registers that may not be used for retrying global
144 allocation. The registers that may not be used include all spill registers
145 and the frame pointer (if we are using one). */
146HARD_REG_SET forbidden_regs;
147
148/* This reg set indicates registers that are not good for spill registers.
149 They will not be used to complete groups of spill registers. This includes
150 all fixed registers, registers that may be eliminated, and registers
151 explicitly used in the rtl.
152
153 (spill_reg_order prevents these registers from being used to start a
154 group.) */
155static HARD_REG_SET bad_spill_regs;
156
157/* Describes order of use of registers for reloading
158 of spilled pseudo-registers. `spills' is the number of
159 elements that are actually valid; new ones are added at the end. */
160static short spill_regs[FIRST_PSEUDO_REGISTER];
161
162/* Describes order of preference for putting regs into spill_regs.
163 Contains the numbers of all the hard regs, in order most preferred first.
164 This order is different for each function.
165 It is set up by order_regs_for_reload.
166 Empty elements at the end contain -1. */
167static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
168
169/* 1 for a hard register that appears explicitly in the rtl
170 (for example, function value registers, special registers
171 used by insns, structure value pointer registers). */
172static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
173
174/* Indicates if a register was counted against the need for
175 groups. 0 means it can count against max_nongroup instead. */
176static HARD_REG_SET counted_for_groups;
177
178/* Indicates if a register was counted against the need for
179 non-groups. 0 means it can become part of a new group.
180 During choose_reload_regs, 1 here means don't use this reg
181 as part of a group, even if it seems to be otherwise ok. */
182static HARD_REG_SET counted_for_nongroups;
183
184/* Nonzero if indirect addressing is supported on the machine; this means
185 that spilling (REG n) does not require reloading it into a register in
186 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
187 value indicates the level of indirect addressing supported, e.g., two
188 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
189 a hard register. */
190
191static char spill_indirect_levels;
192
193/* Nonzero if indirect addressing is supported when the innermost MEM is
194 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
195 which these are valid is the same as spill_indirect_levels, above. */
196
197char indirect_symref_ok;
198
199/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
200
201char double_reg_address_ok;
202
203/* Record the stack slot for each spilled hard register. */
204
205static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
206
207/* Width allocated so far for that stack slot. */
208
209static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
210
211/* Indexed by register class and basic block number, nonzero if there is
212 any need for a spill register of that class in that basic block.
213 The pointer is 0 if we did stupid allocation and don't know
214 the structure of basic blocks. */
215
216char *basic_block_needs[N_REG_CLASSES];
217
218/* First uid used by insns created by reload in this function.
219 Used in find_equiv_reg. */
220int reload_first_uid;
221
222/* Flag set by local-alloc or global-alloc if anything is live in
223 a call-clobbered reg across calls. */
224
225int caller_save_needed;
226
227/* Set to 1 while reload_as_needed is operating.
228 Required by some machines to handle any generated moves differently. */
229
230int reload_in_progress = 0;
231
232/* These arrays record the insn_code of insns that may be needed to
233 perform input and output reloads of special objects. They provide a
234 place to pass a scratch register. */
235
236enum insn_code reload_in_optab[NUM_MACHINE_MODES];
237enum insn_code reload_out_optab[NUM_MACHINE_MODES];
238
d45cf215 239/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
240 The allocated storage can be freed once find_reloads has processed the
241 insn. */
242
243struct obstack reload_obstack;
244char *reload_firstobj;
245
246#define obstack_chunk_alloc xmalloc
247#define obstack_chunk_free free
248
32131a9c
RK
249/* List of labels that must never be deleted. */
250extern rtx forced_labels;
251\f
252/* This structure is used to record information about register eliminations.
253 Each array entry describes one possible way of eliminating a register
254 in favor of another. If there is more than one way of eliminating a
255 particular register, the most preferred should be specified first. */
256
257static struct elim_table
258{
259 int from; /* Register number to be eliminated. */
260 int to; /* Register number used as replacement. */
261 int initial_offset; /* Initial difference between values. */
262 int can_eliminate; /* Non-zero if this elimination can be done. */
263 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
264 insns made by reload. */
265 int offset; /* Current offset between the two regs. */
a8efe40d 266 int max_offset; /* Maximum offset between the two regs. */
32131a9c
RK
267 int previous_offset; /* Offset at end of previous insn. */
268 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
269 rtx from_rtx; /* REG rtx for the register to be eliminated.
270 We cannot simply compare the number since
271 we might then spuriously replace a hard
272 register corresponding to a pseudo
273 assigned to the reg to be eliminated. */
274 rtx to_rtx; /* REG rtx for the replacement. */
275} reg_eliminate[] =
276
277/* If a set of eliminable registers was specified, define the table from it.
278 Otherwise, default to the normal case of the frame pointer being
279 replaced by the stack pointer. */
280
281#ifdef ELIMINABLE_REGS
282 ELIMINABLE_REGS;
283#else
284 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
285#endif
286
287#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
288
289/* Record the number of pending eliminations that have an offset not equal
290 to their initial offset. If non-zero, we use a new copy of each
291 replacement result in any insns encountered. */
292static int num_not_at_initial_offset;
293
294/* Count the number of registers that we may be able to eliminate. */
295static int num_eliminable;
296
297/* For each label, we record the offset of each elimination. If we reach
298 a label by more than one path and an offset differs, we cannot do the
299 elimination. This information is indexed by the number of the label.
300 The first table is an array of flags that records whether we have yet
301 encountered a label and the second table is an array of arrays, one
302 entry in the latter array for each elimination. */
303
304static char *offsets_known_at;
305static int (*offsets_at)[NUM_ELIMINABLE_REGS];
306
307/* Number of labels in the current function. */
308
309static int num_labels;
310\f
311void mark_home_live ();
312static void count_possible_groups ();
313static int possible_group_p ();
314static void scan_paradoxical_subregs ();
315static void reload_as_needed ();
316static int modes_equiv_for_class_p ();
317static void alter_reg ();
318static void delete_dead_insn ();
5352b11a 319static void spill_failure ();
32131a9c
RK
320static int new_spill_reg();
321static void set_label_offsets ();
322static int eliminate_regs_in_insn ();
323static void mark_not_eliminable ();
324static int spill_hard_reg ();
325static void choose_reload_regs ();
326static void emit_reload_insns ();
327static void delete_output_reload ();
328static void forget_old_reloads_1 ();
329static void order_regs_for_reload ();
330static rtx inc_for_reload ();
331static int constraint_accepts_reg_p ();
332static int count_occurrences ();
333
334extern void remove_death ();
335extern rtx adj_offsettable_operand ();
336extern rtx form_sum ();
337\f
338void
339init_reload ()
340{
341 register int i;
342
343 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
344 Set spill_indirect_levels to the number of levels such addressing is
345 permitted, zero if it is not permitted at all. */
346
347 register rtx tem
348 = gen_rtx (MEM, Pmode,
349 gen_rtx (PLUS, Pmode,
350 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 351 GEN_INT (4)));
32131a9c
RK
352 spill_indirect_levels = 0;
353
354 while (memory_address_p (QImode, tem))
355 {
356 spill_indirect_levels++;
357 tem = gen_rtx (MEM, Pmode, tem);
358 }
359
360 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
361
362 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
363 indirect_symref_ok = memory_address_p (QImode, tem);
364
365 /* See if reg+reg is a valid (and offsettable) address. */
366
a8fdc208 367 tem = gen_rtx (PLUS, Pmode,
32131a9c
RK
368 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
369 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM));
370 /* This way, we make sure that reg+reg is an offsettable address. */
371 tem = plus_constant (tem, 4);
372
373 double_reg_address_ok = memory_address_p (QImode, tem);
374
375 /* Initialize obstack for our rtl allocation. */
376 gcc_obstack_init (&reload_obstack);
377 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
378
379#ifdef HAVE_SECONDARY_RELOADS
380
381 /* Initialize the optabs for doing special input and output reloads. */
382
383 for (i = 0; i < NUM_MACHINE_MODES; i++)
384 reload_in_optab[i] = reload_out_optab[i] = CODE_FOR_nothing;
385
386#ifdef HAVE_reload_inqi
387 if (HAVE_reload_inqi)
388 reload_in_optab[(int) QImode] = CODE_FOR_reload_inqi;
389#endif
390#ifdef HAVE_reload_inhi
391 if (HAVE_reload_inhi)
392 reload_in_optab[(int) HImode] = CODE_FOR_reload_inhi;
393#endif
394#ifdef HAVE_reload_insi
395 if (HAVE_reload_insi)
396 reload_in_optab[(int) SImode] = CODE_FOR_reload_insi;
397#endif
398#ifdef HAVE_reload_indi
399 if (HAVE_reload_indi)
400 reload_in_optab[(int) DImode] = CODE_FOR_reload_indi;
401#endif
402#ifdef HAVE_reload_inti
403 if (HAVE_reload_inti)
404 reload_in_optab[(int) TImode] = CODE_FOR_reload_inti;
405#endif
406#ifdef HAVE_reload_insf
407 if (HAVE_reload_insf)
408 reload_in_optab[(int) SFmode] = CODE_FOR_reload_insf;
409#endif
410#ifdef HAVE_reload_indf
411 if (HAVE_reload_indf)
412 reload_in_optab[(int) DFmode] = CODE_FOR_reload_indf;
413#endif
414#ifdef HAVE_reload_inxf
415 if (HAVE_reload_inxf)
416 reload_in_optab[(int) XFmode] = CODE_FOR_reload_inxf;
417#endif
418#ifdef HAVE_reload_intf
419 if (HAVE_reload_intf)
420 reload_in_optab[(int) TFmode] = CODE_FOR_reload_intf;
421#endif
422
423#ifdef HAVE_reload_outqi
424 if (HAVE_reload_outqi)
425 reload_out_optab[(int) QImode] = CODE_FOR_reload_outqi;
426#endif
427#ifdef HAVE_reload_outhi
428 if (HAVE_reload_outhi)
429 reload_out_optab[(int) HImode] = CODE_FOR_reload_outhi;
430#endif
431#ifdef HAVE_reload_outsi
432 if (HAVE_reload_outsi)
433 reload_out_optab[(int) SImode] = CODE_FOR_reload_outsi;
434#endif
435#ifdef HAVE_reload_outdi
436 if (HAVE_reload_outdi)
437 reload_out_optab[(int) DImode] = CODE_FOR_reload_outdi;
438#endif
439#ifdef HAVE_reload_outti
440 if (HAVE_reload_outti)
441 reload_out_optab[(int) TImode] = CODE_FOR_reload_outti;
442#endif
443#ifdef HAVE_reload_outsf
444 if (HAVE_reload_outsf)
445 reload_out_optab[(int) SFmode] = CODE_FOR_reload_outsf;
446#endif
447#ifdef HAVE_reload_outdf
448 if (HAVE_reload_outdf)
449 reload_out_optab[(int) DFmode] = CODE_FOR_reload_outdf;
450#endif
451#ifdef HAVE_reload_outxf
452 if (HAVE_reload_outxf)
453 reload_out_optab[(int) XFmode] = CODE_FOR_reload_outxf;
454#endif
455#ifdef HAVE_reload_outtf
456 if (HAVE_reload_outtf)
457 reload_out_optab[(int) TFmode] = CODE_FOR_reload_outtf;
458#endif
459
460#endif /* HAVE_SECONDARY_RELOADS */
461
462}
463
464/* Main entry point for the reload pass, and only entry point
465 in this file.
466
467 FIRST is the first insn of the function being compiled.
468
469 GLOBAL nonzero means we were called from global_alloc
470 and should attempt to reallocate any pseudoregs that we
471 displace from hard regs we will use for reloads.
472 If GLOBAL is zero, we do not have enough information to do that,
473 so any pseudo reg that is spilled must go to the stack.
474
475 DUMPFILE is the global-reg debugging dump file stream, or 0.
476 If it is nonzero, messages are written to it to describe
477 which registers are seized as reload regs, which pseudo regs
5352b11a 478 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 479
5352b11a
RS
480 Return value is nonzero if reload failed
481 and we must not do any more for this function. */
482
483int
32131a9c
RK
484reload (first, global, dumpfile)
485 rtx first;
486 int global;
487 FILE *dumpfile;
488{
489 register int class;
490 register int i;
491 register rtx insn;
492 register struct elim_table *ep;
493
494 int something_changed;
495 int something_needs_reloads;
496 int something_needs_elimination;
497 int new_basic_block_needs;
a8efe40d
RK
498 enum reg_class caller_save_spill_class = NO_REGS;
499 int caller_save_group_size = 1;
32131a9c 500
5352b11a
RS
501 /* Nonzero means we couldn't get enough spill regs. */
502 int failure = 0;
503
32131a9c
RK
504 /* The basic block number currently being processed for INSN. */
505 int this_block;
506
507 /* Make sure even insns with volatile mem refs are recognizable. */
508 init_recog ();
509
510 /* Enable find_equiv_reg to distinguish insns made by reload. */
511 reload_first_uid = get_max_uid ();
512
513 for (i = 0; i < N_REG_CLASSES; i++)
514 basic_block_needs[i] = 0;
515
0dadecf6
RK
516#ifdef SECONDARY_MEMORY_NEEDED
517 /* Initialize the secondary memory table. */
518 clear_secondary_mem ();
519#endif
520
32131a9c
RK
521 /* Remember which hard regs appear explicitly
522 before we merge into `regs_ever_live' the ones in which
523 pseudo regs have been allocated. */
524 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
525
526 /* We don't have a stack slot for any spill reg yet. */
527 bzero (spill_stack_slot, sizeof spill_stack_slot);
528 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
529
a8efe40d
RK
530 /* Initialize the save area information for caller-save, in case some
531 are needed. */
532 init_save_areas ();
a8fdc208 533
32131a9c
RK
534 /* Compute which hard registers are now in use
535 as homes for pseudo registers.
536 This is done here rather than (eg) in global_alloc
537 because this point is reached even if not optimizing. */
538
539 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
540 mark_home_live (i);
541
542 /* Make sure that the last insn in the chain
543 is not something that needs reloading. */
fb3821f7 544 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
545
546 /* Find all the pseudo registers that didn't get hard regs
547 but do have known equivalent constants or memory slots.
548 These include parameters (known equivalent to parameter slots)
549 and cse'd or loop-moved constant memory addresses.
550
551 Record constant equivalents in reg_equiv_constant
552 so they will be substituted by find_reloads.
553 Record memory equivalents in reg_mem_equiv so they can
554 be substituted eventually by altering the REG-rtx's. */
555
556 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
557 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
558 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
559 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
560 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
561 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
562 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
563 bzero (reg_equiv_init, max_regno * sizeof (rtx));
564 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
565 bzero (reg_equiv_address, max_regno * sizeof (rtx));
566 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
567 bzero (reg_max_ref_width, max_regno * sizeof (int));
568
569 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
570 Also find all paradoxical subregs
571 and find largest such for each pseudo. */
572
573 for (insn = first; insn; insn = NEXT_INSN (insn))
574 {
575 rtx set = single_set (insn);
576
577 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
578 {
fb3821f7 579 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
580 if (note
581#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 582 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
583 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
584#endif
585 )
32131a9c
RK
586 {
587 rtx x = XEXP (note, 0);
588 i = REGNO (SET_DEST (set));
589 if (i > LAST_VIRTUAL_REGISTER)
590 {
591 if (GET_CODE (x) == MEM)
592 reg_equiv_memory_loc[i] = x;
593 else if (CONSTANT_P (x))
594 {
595 if (LEGITIMATE_CONSTANT_P (x))
596 reg_equiv_constant[i] = x;
597 else
598 reg_equiv_memory_loc[i]
d445b551 599 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
600 }
601 else
602 continue;
603
604 /* If this register is being made equivalent to a MEM
605 and the MEM is not SET_SRC, the equivalencing insn
606 is one with the MEM as a SET_DEST and it occurs later.
607 So don't mark this insn now. */
608 if (GET_CODE (x) != MEM
609 || rtx_equal_p (SET_SRC (set), x))
610 reg_equiv_init[i] = insn;
611 }
612 }
613 }
614
615 /* If this insn is setting a MEM from a register equivalent to it,
616 this is the equivalencing insn. */
617 else if (set && GET_CODE (SET_DEST (set)) == MEM
618 && GET_CODE (SET_SRC (set)) == REG
619 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
620 && rtx_equal_p (SET_DEST (set),
621 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
622 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
623
624 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
625 scan_paradoxical_subregs (PATTERN (insn));
626 }
627
628 /* Does this function require a frame pointer? */
629
630 frame_pointer_needed = (! flag_omit_frame_pointer
631#ifdef EXIT_IGNORE_STACK
632 /* ?? If EXIT_IGNORE_STACK is set, we will not save
633 and restore sp for alloca. So we can't eliminate
634 the frame pointer in that case. At some point,
635 we should improve this by emitting the
636 sp-adjusting insns for this case. */
637 || (current_function_calls_alloca
638 && EXIT_IGNORE_STACK)
639#endif
640 || FRAME_POINTER_REQUIRED);
641
642 num_eliminable = 0;
643
644 /* Initialize the table of registers to eliminate. The way we do this
645 depends on how the eliminable registers were defined. */
646#ifdef ELIMINABLE_REGS
647 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
648 {
649 ep->can_eliminate = ep->can_eliminate_previous
650 = (CAN_ELIMINATE (ep->from, ep->to)
651 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
652 }
653#else
654 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
655 = ! frame_pointer_needed;
656#endif
657
658 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 659 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
660 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
661 We depend on this. */
662 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
663 {
664 num_eliminable += ep->can_eliminate;
665 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
666 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
667 }
668
669 num_labels = max_label_num () - get_first_label_num ();
670
671 /* Allocate the tables used to store offset information at labels. */
672 offsets_known_at = (char *) alloca (num_labels);
673 offsets_at
674 = (int (*)[NUM_ELIMINABLE_REGS])
675 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
676
677 offsets_known_at -= get_first_label_num ();
678 offsets_at -= get_first_label_num ();
679
680 /* Alter each pseudo-reg rtx to contain its hard reg number.
681 Assign stack slots to the pseudos that lack hard regs or equivalents.
682 Do not touch virtual registers. */
683
684 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
685 alter_reg (i, -1);
686
687 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
688 because the stack size may be a part of the offset computation for
689 register elimination. */
690 assign_stack_local (BLKmode, 0, 0);
691
692 /* If we have some registers we think can be eliminated, scan all insns to
693 see if there is an insn that sets one of these registers to something
694 other than itself plus a constant. If so, the register cannot be
695 eliminated. Doing this scan here eliminates an extra pass through the
696 main reload loop in the most common case where register elimination
697 cannot be done. */
698 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
699 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
700 || GET_CODE (insn) == CALL_INSN)
701 note_stores (PATTERN (insn), mark_not_eliminable);
702
703#ifndef REGISTER_CONSTRAINTS
704 /* If all the pseudo regs have hard regs,
705 except for those that are never referenced,
706 we know that no reloads are needed. */
707 /* But that is not true if there are register constraints, since
708 in that case some pseudos might be in the wrong kind of hard reg. */
709
710 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
711 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
712 break;
713
b8093d02 714 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
715 return;
716#endif
717
718 /* Compute the order of preference for hard registers to spill.
719 Store them by decreasing preference in potential_reload_regs. */
720
721 order_regs_for_reload ();
722
723 /* So far, no hard regs have been spilled. */
724 n_spills = 0;
725 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
726 spill_reg_order[i] = -1;
727
728 /* On most machines, we can't use any register explicitly used in the
729 rtl as a spill register. But on some, we have to. Those will have
730 taken care to keep the life of hard regs as short as possible. */
731
732#ifdef SMALL_REGISTER_CLASSES
733 CLEAR_HARD_REG_SET (forbidden_regs);
734#else
735 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
736#endif
737
738 /* Spill any hard regs that we know we can't eliminate. */
739 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
740 if (! ep->can_eliminate)
741 {
742 spill_hard_reg (ep->from, global, dumpfile, 1);
743 regs_ever_live[ep->from] = 1;
744 }
745
746 if (global)
747 for (i = 0; i < N_REG_CLASSES; i++)
748 {
749 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
750 bzero (basic_block_needs[i], n_basic_blocks);
751 }
752
753 /* This loop scans the entire function each go-round
754 and repeats until one repetition spills no additional hard regs. */
755
d45cf215 756 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
757 to require another pass. Note that getting an additional reload
758 reg does not necessarily imply any pseudo reg was spilled;
759 sometimes we find a reload reg that no pseudo reg was allocated in. */
760 something_changed = 1;
761 /* This flag is set if there are any insns that require reloading. */
762 something_needs_reloads = 0;
763 /* This flag is set if there are any insns that require register
764 eliminations. */
765 something_needs_elimination = 0;
766 while (something_changed)
767 {
768 rtx after_call = 0;
769
770 /* For each class, number of reload regs needed in that class.
771 This is the maximum over all insns of the needs in that class
772 of the individual insn. */
773 int max_needs[N_REG_CLASSES];
774 /* For each class, size of group of consecutive regs
775 that is needed for the reloads of this class. */
776 int group_size[N_REG_CLASSES];
777 /* For each class, max number of consecutive groups needed.
778 (Each group contains group_size[CLASS] consecutive registers.) */
779 int max_groups[N_REG_CLASSES];
780 /* For each class, max number needed of regs that don't belong
781 to any of the groups. */
782 int max_nongroups[N_REG_CLASSES];
783 /* For each class, the machine mode which requires consecutive
784 groups of regs of that class.
785 If two different modes ever require groups of one class,
786 they must be the same size and equally restrictive for that class,
787 otherwise we can't handle the complexity. */
788 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
789 /* Record the insn where each maximum need is first found. */
790 rtx max_needs_insn[N_REG_CLASSES];
791 rtx max_groups_insn[N_REG_CLASSES];
792 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 793 rtx x;
0dadecf6 794 int starting_frame_size = get_frame_size ();
32131a9c
RK
795
796 something_changed = 0;
797 bzero (max_needs, sizeof max_needs);
798 bzero (max_groups, sizeof max_groups);
799 bzero (max_nongroups, sizeof max_nongroups);
5352b11a
RS
800 bzero (max_needs_insn, sizeof max_needs_insn);
801 bzero (max_groups_insn, sizeof max_groups_insn);
802 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
32131a9c
RK
803 bzero (group_size, sizeof group_size);
804 for (i = 0; i < N_REG_CLASSES; i++)
805 group_mode[i] = VOIDmode;
806
807 /* Keep track of which basic blocks are needing the reloads. */
808 this_block = 0;
809
810 /* Remember whether any element of basic_block_needs
811 changes from 0 to 1 in this pass. */
812 new_basic_block_needs = 0;
813
814 /* Reset all offsets on eliminable registers to their initial values. */
815#ifdef ELIMINABLE_REGS
816 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
817 {
818 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
819 ep->previous_offset = ep->offset
820 = ep->max_offset = ep->initial_offset;
32131a9c
RK
821 }
822#else
823#ifdef INITIAL_FRAME_POINTER_OFFSET
824 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
825#else
826 if (!FRAME_POINTER_REQUIRED)
827 abort ();
828 reg_eliminate[0].initial_offset = 0;
829#endif
a8efe40d 830 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
831 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
832#endif
833
834 num_not_at_initial_offset = 0;
835
836 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
837
838 /* Set a known offset for each forced label to be at the initial offset
839 of each elimination. We do this because we assume that all
840 computed jumps occur from a location where each elimination is
841 at its initial offset. */
842
843 for (x = forced_labels; x; x = XEXP (x, 1))
844 if (XEXP (x, 0))
fb3821f7 845 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
846
847 /* For each pseudo register that has an equivalent location defined,
848 try to eliminate any eliminable registers (such as the frame pointer)
849 assuming initial offsets for the replacement register, which
850 is the normal case.
851
852 If the resulting location is directly addressable, substitute
853 the MEM we just got directly for the old REG.
854
855 If it is not addressable but is a constant or the sum of a hard reg
856 and constant, it is probably not addressable because the constant is
857 out of range, in that case record the address; we will generate
858 hairy code to compute the address in a register each time it is
a8fdc208 859 needed.
32131a9c
RK
860
861 If the location is not addressable, but does not have one of the
862 above forms, assign a stack slot. We have to do this to avoid the
863 potential of producing lots of reloads if, e.g., a location involves
864 a pseudo that didn't get a hard register and has an equivalent memory
865 location that also involves a pseudo that didn't get a hard register.
866
867 Perhaps at some point we will improve reload_when_needed handling
868 so this problem goes away. But that's very hairy. */
869
870 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
871 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
872 {
fb3821f7 873 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
874
875 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
876 XEXP (x, 0)))
877 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
878 else if (CONSTANT_P (XEXP (x, 0))
879 || (GET_CODE (XEXP (x, 0)) == PLUS
880 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
881 && (REGNO (XEXP (XEXP (x, 0), 0))
882 < FIRST_PSEUDO_REGISTER)
883 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
884 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
885 else
886 {
887 /* Make a new stack slot. Then indicate that something
a8fdc208 888 changed so we go back and recompute offsets for
32131a9c
RK
889 eliminable registers because the allocation of memory
890 below might change some offset. reg_equiv_{mem,address}
891 will be set up for this pseudo on the next pass around
892 the loop. */
893 reg_equiv_memory_loc[i] = 0;
894 reg_equiv_init[i] = 0;
895 alter_reg (i, -1);
896 something_changed = 1;
897 }
898 }
a8fdc208 899
d45cf215 900 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
901 bookkeeping. */
902 if (something_changed)
903 continue;
904
a8efe40d
RK
905 /* If caller-saves needs a group, initialize the group to include
906 the size and mode required for caller-saves. */
907
908 if (caller_save_group_size > 1)
909 {
910 group_mode[(int) caller_save_spill_class] = Pmode;
911 group_size[(int) caller_save_spill_class] = caller_save_group_size;
912 }
913
32131a9c
RK
914 /* Compute the most additional registers needed by any instruction.
915 Collect information separately for each class of regs. */
916
917 for (insn = first; insn; insn = NEXT_INSN (insn))
918 {
919 if (global && this_block + 1 < n_basic_blocks
920 && insn == basic_block_head[this_block+1])
921 ++this_block;
922
923 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
924 might include REG_LABEL), we need to see what effects this
925 has on the known offsets at labels. */
926
927 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
928 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
929 && REG_NOTES (insn) != 0))
930 set_label_offsets (insn, insn, 0);
931
932 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
933 {
934 /* Nonzero means don't use a reload reg that overlaps
935 the place where a function value can be returned. */
936 rtx avoid_return_reg = 0;
937
938 rtx old_body = PATTERN (insn);
939 int old_code = INSN_CODE (insn);
940 rtx old_notes = REG_NOTES (insn);
941 int did_elimination = 0;
942
943 /* Initially, count RELOAD_OTHER reloads.
944 Later, merge in the other kinds. */
945 int insn_needs[N_REG_CLASSES];
946 int insn_groups[N_REG_CLASSES];
947 int insn_total_groups = 0;
948
949 /* Count RELOAD_FOR_INPUT_RELOAD_ADDRESS reloads. */
950 int insn_needs_for_inputs[N_REG_CLASSES];
951 int insn_groups_for_inputs[N_REG_CLASSES];
952 int insn_total_groups_for_inputs = 0;
953
954 /* Count RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reloads. */
955 int insn_needs_for_outputs[N_REG_CLASSES];
956 int insn_groups_for_outputs[N_REG_CLASSES];
957 int insn_total_groups_for_outputs = 0;
958
959 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
960 int insn_needs_for_operands[N_REG_CLASSES];
961 int insn_groups_for_operands[N_REG_CLASSES];
962 int insn_total_groups_for_operands = 0;
963
32131a9c
RK
964#if 0 /* This wouldn't work nowadays, since optimize_bit_field
965 looks for non-strict memory addresses. */
966 /* Optimization: a bit-field instruction whose field
967 happens to be a byte or halfword in memory
968 can be changed to a move instruction. */
969
970 if (GET_CODE (PATTERN (insn)) == SET)
971 {
972 rtx dest = SET_DEST (PATTERN (insn));
973 rtx src = SET_SRC (PATTERN (insn));
974
975 if (GET_CODE (dest) == ZERO_EXTRACT
976 || GET_CODE (dest) == SIGN_EXTRACT)
977 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
978 if (GET_CODE (src) == ZERO_EXTRACT
979 || GET_CODE (src) == SIGN_EXTRACT)
980 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
981 }
982#endif
983
984 /* If needed, eliminate any eliminable registers. */
985 if (num_eliminable)
986 did_elimination = eliminate_regs_in_insn (insn, 0);
987
988#ifdef SMALL_REGISTER_CLASSES
989 /* Set avoid_return_reg if this is an insn
990 that might use the value of a function call. */
991 if (GET_CODE (insn) == CALL_INSN)
992 {
993 if (GET_CODE (PATTERN (insn)) == SET)
994 after_call = SET_DEST (PATTERN (insn));
995 else if (GET_CODE (PATTERN (insn)) == PARALLEL
996 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
997 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
998 else
999 after_call = 0;
1000 }
1001 else if (after_call != 0
1002 && !(GET_CODE (PATTERN (insn)) == SET
1003 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1004 {
1005 if (reg_mentioned_p (after_call, PATTERN (insn)))
1006 avoid_return_reg = after_call;
1007 after_call = 0;
1008 }
1009#endif /* SMALL_REGISTER_CLASSES */
1010
1011 /* Analyze the instruction. */
1012 find_reloads (insn, 0, spill_indirect_levels, global,
1013 spill_reg_order);
1014
1015 /* Remember for later shortcuts which insns had any reloads or
1016 register eliminations.
1017
1018 One might think that it would be worthwhile to mark insns
1019 that need register replacements but not reloads, but this is
1020 not safe because find_reloads may do some manipulation of
1021 the insn (such as swapping commutative operands), which would
1022 be lost when we restore the old pattern after register
1023 replacement. So the actions of find_reloads must be redone in
1024 subsequent passes or in reload_as_needed.
1025
1026 However, it is safe to mark insns that need reloads
1027 but not register replacement. */
1028
1029 PUT_MODE (insn, (did_elimination ? QImode
1030 : n_reloads ? HImode
1031 : VOIDmode));
1032
1033 /* Discard any register replacements done. */
1034 if (did_elimination)
1035 {
1036 obstack_free (&reload_obstack, reload_firstobj);
1037 PATTERN (insn) = old_body;
1038 INSN_CODE (insn) = old_code;
1039 REG_NOTES (insn) = old_notes;
1040 something_needs_elimination = 1;
1041 }
1042
a8efe40d 1043 /* If this insn has no reloads, we need not do anything except
a8fdc208 1044 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1045 caller-save needs reloads. */
1046
1047 if (n_reloads == 0
1048 && ! (GET_CODE (insn) == CALL_INSN
1049 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1050 continue;
1051
1052 something_needs_reloads = 1;
1053
a8efe40d
RK
1054 for (i = 0; i < N_REG_CLASSES; i++)
1055 {
1056 insn_needs[i] = 0, insn_groups[i] = 0;
1057 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1058 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1059 insn_needs_for_operands[i] = 0, insn_groups_for_operands[i] = 0;
1060 }
1061
32131a9c
RK
1062 /* Count each reload once in every class
1063 containing the reload's own class. */
1064
1065 for (i = 0; i < n_reloads; i++)
1066 {
1067 register enum reg_class *p;
e85ddd99 1068 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1069 int size;
1070 enum machine_mode mode;
1071 int *this_groups;
1072 int *this_needs;
1073 int *this_total_groups;
1074
1075 /* Don't count the dummy reloads, for which one of the
1076 regs mentioned in the insn can be used for reloading.
1077 Don't count optional reloads.
1078 Don't count reloads that got combined with others. */
1079 if (reload_reg_rtx[i] != 0
1080 || reload_optional[i] != 0
1081 || (reload_out[i] == 0 && reload_in[i] == 0
1082 && ! reload_secondary_p[i]))
1083 continue;
1084
e85ddd99
RK
1085 /* Show that a reload register of this class is needed
1086 in this basic block. We do not use insn_needs and
1087 insn_groups because they are overly conservative for
1088 this purpose. */
1089 if (global && ! basic_block_needs[(int) class][this_block])
1090 {
1091 basic_block_needs[(int) class][this_block] = 1;
1092 new_basic_block_needs = 1;
1093 }
1094
32131a9c
RK
1095 /* Decide which time-of-use to count this reload for. */
1096 switch (reload_when_needed[i])
1097 {
1098 case RELOAD_OTHER:
1099 case RELOAD_FOR_OUTPUT:
1100 case RELOAD_FOR_INPUT:
1101 this_needs = insn_needs;
1102 this_groups = insn_groups;
1103 this_total_groups = &insn_total_groups;
1104 break;
1105
1106 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
1107 this_needs = insn_needs_for_inputs;
1108 this_groups = insn_groups_for_inputs;
1109 this_total_groups = &insn_total_groups_for_inputs;
1110 break;
1111
1112 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
1113 this_needs = insn_needs_for_outputs;
1114 this_groups = insn_groups_for_outputs;
1115 this_total_groups = &insn_total_groups_for_outputs;
1116 break;
1117
1118 case RELOAD_FOR_OPERAND_ADDRESS:
1119 this_needs = insn_needs_for_operands;
1120 this_groups = insn_groups_for_operands;
1121 this_total_groups = &insn_total_groups_for_operands;
1122 break;
1123 }
1124
1125 mode = reload_inmode[i];
1126 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1127 mode = reload_outmode[i];
e85ddd99 1128 size = CLASS_MAX_NREGS (class, mode);
32131a9c
RK
1129 if (size > 1)
1130 {
1131 enum machine_mode other_mode, allocate_mode;
1132
1133 /* Count number of groups needed separately from
1134 number of individual regs needed. */
e85ddd99
RK
1135 this_groups[(int) class]++;
1136 p = reg_class_superclasses[(int) class];
32131a9c
RK
1137 while (*p != LIM_REG_CLASSES)
1138 this_groups[(int) *p++]++;
1139 (*this_total_groups)++;
1140
1141 /* Record size and mode of a group of this class. */
1142 /* If more than one size group is needed,
1143 make all groups the largest needed size. */
e85ddd99 1144 if (group_size[(int) class] < size)
32131a9c 1145 {
e85ddd99 1146 other_mode = group_mode[(int) class];
32131a9c
RK
1147 allocate_mode = mode;
1148
e85ddd99
RK
1149 group_size[(int) class] = size;
1150 group_mode[(int) class] = mode;
32131a9c
RK
1151 }
1152 else
1153 {
1154 other_mode = mode;
e85ddd99 1155 allocate_mode = group_mode[(int) class];
32131a9c
RK
1156 }
1157
1158 /* Crash if two dissimilar machine modes both need
1159 groups of consecutive regs of the same class. */
1160
1161 if (other_mode != VOIDmode
1162 && other_mode != allocate_mode
1163 && ! modes_equiv_for_class_p (allocate_mode,
1164 other_mode,
e85ddd99 1165 class))
32131a9c
RK
1166 abort ();
1167 }
1168 else if (size == 1)
1169 {
e85ddd99
RK
1170 this_needs[(int) class] += 1;
1171 p = reg_class_superclasses[(int) class];
32131a9c
RK
1172 while (*p != LIM_REG_CLASSES)
1173 this_needs[(int) *p++] += 1;
1174 }
1175 else
1176 abort ();
1177 }
1178
1179 /* All reloads have been counted for this insn;
1180 now merge the various times of use.
1181 This sets insn_needs, etc., to the maximum total number
1182 of registers needed at any point in this insn. */
1183
1184 for (i = 0; i < N_REG_CLASSES; i++)
1185 {
1186 int this_max;
1187 this_max = insn_needs_for_inputs[i];
1188 if (insn_needs_for_outputs[i] > this_max)
1189 this_max = insn_needs_for_outputs[i];
1190 if (insn_needs_for_operands[i] > this_max)
1191 this_max = insn_needs_for_operands[i];
1192 insn_needs[i] += this_max;
1193 this_max = insn_groups_for_inputs[i];
1194 if (insn_groups_for_outputs[i] > this_max)
1195 this_max = insn_groups_for_outputs[i];
1196 if (insn_groups_for_operands[i] > this_max)
1197 this_max = insn_groups_for_operands[i];
1198 insn_groups[i] += this_max;
32131a9c 1199 }
a8efe40d 1200
32131a9c
RK
1201 insn_total_groups += MAX (insn_total_groups_for_inputs,
1202 MAX (insn_total_groups_for_outputs,
1203 insn_total_groups_for_operands));
1204
a8efe40d
RK
1205 /* If this is a CALL_INSN and caller-saves will need
1206 a spill register, act as if the spill register is
1207 needed for this insn. However, the spill register
1208 can be used by any reload of this insn, so we only
1209 need do something if no need for that class has
a8fdc208 1210 been recorded.
a8efe40d
RK
1211
1212 The assumption that every CALL_INSN will trigger a
1213 caller-save is highly conservative, however, the number
1214 of cases where caller-saves will need a spill register but
1215 a block containing a CALL_INSN won't need a spill register
1216 of that class should be quite rare.
1217
1218 If a group is needed, the size and mode of the group will
d45cf215 1219 have been set up at the beginning of this loop. */
a8efe40d
RK
1220
1221 if (GET_CODE (insn) == CALL_INSN
1222 && caller_save_spill_class != NO_REGS)
1223 {
1224 int *caller_save_needs
1225 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1226
1227 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1228 {
1229 register enum reg_class *p
1230 = reg_class_superclasses[(int) caller_save_spill_class];
1231
1232 caller_save_needs[(int) caller_save_spill_class]++;
1233
1234 while (*p != LIM_REG_CLASSES)
0aaa6af8 1235 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1236 }
1237
1238 if (caller_save_group_size > 1)
1239 insn_total_groups = MAX (insn_total_groups, 1);
d1c1397e
RS
1240
1241
1242 /* Show that this basic block will need a register of
1243 this class. */
1244
1245 if (global
1246 && ! (basic_block_needs[(int) caller_save_spill_class]
1247 [this_block]))
1248 {
1249 basic_block_needs[(int) caller_save_spill_class]
1250 [this_block] = 1;
1251 new_basic_block_needs = 1;
1252 }
a8efe40d
RK
1253 }
1254
32131a9c
RK
1255#ifdef SMALL_REGISTER_CLASSES
1256 /* If this insn stores the value of a function call,
1257 and that value is in a register that has been spilled,
1258 and if the insn needs a reload in a class
1259 that might use that register as the reload register,
1260 then add add an extra need in that class.
1261 This makes sure we have a register available that does
1262 not overlap the return value. */
1263 if (avoid_return_reg)
1264 {
1265 int regno = REGNO (avoid_return_reg);
1266 int nregs
1267 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1268 int r;
1269 int inc_groups = 0;
1270 for (r = regno; r < regno + nregs; r++)
1271 if (spill_reg_order[r] >= 0)
1272 for (i = 0; i < N_REG_CLASSES; i++)
1273 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1274 {
1275 if (insn_needs[i] > 0)
1276 insn_needs[i]++;
1277 if (insn_groups[i] > 0
1278 && nregs > 1)
1279 inc_groups = 1;
1280 }
1281 if (inc_groups)
1282 insn_groups[i]++;
1283 }
1284#endif /* SMALL_REGISTER_CLASSES */
1285
1286 /* For each class, collect maximum need of any insn. */
1287
1288 for (i = 0; i < N_REG_CLASSES; i++)
1289 {
1290 if (max_needs[i] < insn_needs[i])
5352b11a
RS
1291 {
1292 max_needs[i] = insn_needs[i];
1293 max_needs_insn[i] = insn;
1294 }
32131a9c 1295 if (max_groups[i] < insn_groups[i])
5352b11a
RS
1296 {
1297 max_groups[i] = insn_groups[i];
1298 max_groups_insn[i] = insn;
1299 }
32131a9c
RK
1300 if (insn_total_groups > 0)
1301 if (max_nongroups[i] < insn_needs[i])
5352b11a
RS
1302 {
1303 max_nongroups[i] = insn_needs[i];
1304 max_nongroups_insn[i] = insn;
1305 }
32131a9c
RK
1306 }
1307 }
1308 /* Note that there is a continue statement above. */
1309 }
1310
0dadecf6
RK
1311 /* If we allocated any new memory locations, make another pass
1312 since it might have changed elimination offsets. */
1313 if (starting_frame_size != get_frame_size ())
1314 something_changed = 1;
1315
d445b551 1316 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1317 will need a spill register. */
32131a9c 1318
d445b551 1319 if (caller_save_needed
a8efe40d
RK
1320 && ! setup_save_areas (&something_changed)
1321 && caller_save_spill_class == NO_REGS)
32131a9c 1322 {
a8efe40d
RK
1323 /* The class we will need depends on whether the machine
1324 supports the sum of two registers for an address; see
1325 find_address_reloads for details. */
1326
a8fdc208 1327 caller_save_spill_class
a8efe40d
RK
1328 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1329 caller_save_group_size
1330 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1331 something_changed = 1;
32131a9c
RK
1332 }
1333
1334 /* Now deduct from the needs for the registers already
1335 available (already spilled). */
1336
1337 CLEAR_HARD_REG_SET (counted_for_groups);
1338 CLEAR_HARD_REG_SET (counted_for_nongroups);
1339
1340 /* First find all regs alone in their class
1341 and count them (if desired) for non-groups.
1342 We would be screwed if a group took the only reg in a class
d445b551 1343 for which a non-group reload is needed.
32131a9c
RK
1344 (Note there is still a bug; if a class has 2 regs,
1345 both could be stolen by groups and we would lose the same way.
1346 With luck, no machine will need a nongroup in a 2-reg class.) */
1347
1348 for (i = 0; i < n_spills; i++)
1349 {
1350 register enum reg_class *p;
1351 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1352
1353 if (reg_class_size[class] == 1 && max_nongroups[class] > 0)
1354 {
1355 max_needs[class]--;
1356 p = reg_class_superclasses[class];
1357 while (*p != LIM_REG_CLASSES)
1358 max_needs[(int) *p++]--;
1359
1360 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1361 max_nongroups[class]--;
1362 p = reg_class_superclasses[class];
1363 while (*p != LIM_REG_CLASSES)
1364 {
1365 if (max_nongroups[(int) *p] > 0)
1366 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1367 max_nongroups[(int) *p++]--;
1368 }
1369 }
1370 }
1371
1372 /* Now find all consecutive groups of spilled registers
1373 and mark each group off against the need for such groups.
1374 But don't count them against ordinary need, yet. */
1375
1376 count_possible_groups (group_size, group_mode, max_groups);
1377
1378 /* Now count all spill regs against the individual need,
a8fdc208 1379 This includes those counted above for groups,
32131a9c
RK
1380 but not those previously counted for nongroups.
1381
1382 Those that weren't counted_for_groups can also count against
1383 the not-in-group need. */
1384
1385 for (i = 0; i < n_spills; i++)
1386 {
1387 register enum reg_class *p;
1388 class = (int) REGNO_REG_CLASS (spill_regs[i]);
1389
1390 /* Those counted at the beginning shouldn't be counted twice. */
1391 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
1392 {
1393 max_needs[class]--;
1394 p = reg_class_superclasses[class];
1395 while (*p != LIM_REG_CLASSES)
1396 max_needs[(int) *p++]--;
1397
1398 if (! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i]))
1399 {
1400 if (max_nongroups[class] > 0)
1401 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1402 max_nongroups[class]--;
1403 p = reg_class_superclasses[class];
1404 while (*p != LIM_REG_CLASSES)
1405 {
1406 if (max_nongroups[(int) *p] > 0)
1407 SET_HARD_REG_BIT (counted_for_nongroups,
1408 spill_regs[i]);
1409 max_nongroups[(int) *p++]--;
1410 }
1411 }
1412 }
1413 }
1414
5c23c401
RK
1415 /* See if anything that happened changes which eliminations are valid.
1416 For example, on the Sparc, whether or not the frame pointer can
1417 be eliminated can depend on what registers have been used. We need
1418 not check some conditions again (such as flag_omit_frame_pointer)
1419 since they can't have changed. */
1420
1421 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1422 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1423#ifdef ELIMINABLE_REGS
1424 || ! CAN_ELIMINATE (ep->from, ep->to)
1425#endif
1426 )
1427 ep->can_eliminate = 0;
1428
32131a9c
RK
1429 /* Look for the case where we have discovered that we can't replace
1430 register A with register B and that means that we will now be
1431 trying to replace register A with register C. This means we can
1432 no longer replace register C with register B and we need to disable
1433 such an elimination, if it exists. This occurs often with A == ap,
1434 B == sp, and C == fp. */
a8fdc208 1435
32131a9c
RK
1436 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1437 {
1438 struct elim_table *op;
1439 register int new_to = -1;
1440
1441 if (! ep->can_eliminate && ep->can_eliminate_previous)
1442 {
1443 /* Find the current elimination for ep->from, if there is a
1444 new one. */
1445 for (op = reg_eliminate;
1446 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1447 if (op->from == ep->from && op->can_eliminate)
1448 {
1449 new_to = op->to;
1450 break;
1451 }
1452
1453 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1454 disable it. */
1455 for (op = reg_eliminate;
1456 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1457 if (op->from == new_to && op->to == ep->to)
1458 op->can_eliminate = 0;
1459 }
1460 }
1461
1462 /* See if any registers that we thought we could eliminate the previous
1463 time are no longer eliminable. If so, something has changed and we
1464 must spill the register. Also, recompute the number of eliminable
1465 registers and see if the frame pointer is needed; it is if there is
1466 no elimination of the frame pointer that we can perform. */
1467
1468 frame_pointer_needed = 1;
1469 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1470 {
1471 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1472 frame_pointer_needed = 0;
1473
1474 if (! ep->can_eliminate && ep->can_eliminate_previous)
1475 {
1476 ep->can_eliminate_previous = 0;
1477 spill_hard_reg (ep->from, global, dumpfile, 1);
1478 regs_ever_live[ep->from] = 1;
1479 something_changed = 1;
1480 num_eliminable--;
1481 }
1482 }
1483
1484 /* If all needs are met, we win. */
1485
1486 for (i = 0; i < N_REG_CLASSES; i++)
1487 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1488 break;
1489 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1490 break;
1491
1492 /* Not all needs are met; must spill more hard regs. */
1493
1494 /* If any element of basic_block_needs changed from 0 to 1,
1495 re-spill all the regs already spilled. This may spill
1496 additional pseudos that didn't spill before. */
1497
1498 if (new_basic_block_needs)
1499 for (i = 0; i < n_spills; i++)
1500 something_changed
1501 |= spill_hard_reg (spill_regs[i], global, dumpfile, 0);
1502
1503 /* Now find more reload regs to satisfy the remaining need
1504 Do it by ascending class number, since otherwise a reg
1505 might be spilled for a big class and might fail to count
1506 for a smaller class even though it belongs to that class.
1507
1508 Count spilled regs in `spills', and add entries to
1509 `spill_regs' and `spill_reg_order'.
1510
1511 ??? Note there is a problem here.
1512 When there is a need for a group in a high-numbered class,
1513 and also need for non-group regs that come from a lower class,
1514 the non-group regs are chosen first. If there aren't many regs,
1515 they might leave no room for a group.
1516
1517 This was happening on the 386. To fix it, we added the code
1518 that calls possible_group_p, so that the lower class won't
1519 break up the last possible group.
1520
1521 Really fixing the problem would require changes above
1522 in counting the regs already spilled, and in choose_reload_regs.
1523 It might be hard to avoid introducing bugs there. */
1524
1525 for (class = 0; class < N_REG_CLASSES; class++)
1526 {
1527 /* First get the groups of registers.
1528 If we got single registers first, we might fragment
1529 possible groups. */
1530 while (max_groups[class] > 0)
1531 {
1532 /* If any single spilled regs happen to form groups,
1533 count them now. Maybe we don't really need
1534 to spill another group. */
1535 count_possible_groups (group_size, group_mode, max_groups);
1536
1537 /* Groups of size 2 (the only groups used on most machines)
1538 are treated specially. */
1539 if (group_size[class] == 2)
1540 {
1541 /* First, look for a register that will complete a group. */
1542 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1543 {
1544 int j = potential_reload_regs[i];
1545 int other;
1546 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1547 &&
1548 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1549 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1550 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1551 && HARD_REGNO_MODE_OK (other, group_mode[class])
1552 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1553 other)
1554 /* We don't want one part of another group.
1555 We could get "two groups" that overlap! */
1556 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1557 ||
1558 (j < FIRST_PSEUDO_REGISTER - 1
1559 && (other = j + 1, spill_reg_order[other] >= 0)
1560 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1561 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1562 && HARD_REGNO_MODE_OK (j, group_mode[class])
1563 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1564 other)
1565 && ! TEST_HARD_REG_BIT (counted_for_groups,
1566 other))))
1567 {
1568 register enum reg_class *p;
1569
1570 /* We have found one that will complete a group,
1571 so count off one group as provided. */
1572 max_groups[class]--;
1573 p = reg_class_superclasses[class];
1574 while (*p != LIM_REG_CLASSES)
1575 max_groups[(int) *p++]--;
1576
1577 /* Indicate both these regs are part of a group. */
1578 SET_HARD_REG_BIT (counted_for_groups, j);
1579 SET_HARD_REG_BIT (counted_for_groups, other);
1580 break;
1581 }
1582 }
1583 /* We can't complete a group, so start one. */
1584 if (i == FIRST_PSEUDO_REGISTER)
1585 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1586 {
1587 int j = potential_reload_regs[i];
1588 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1589 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1590 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1591 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1592 && HARD_REGNO_MODE_OK (j, group_mode[class])
1593 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1594 j + 1))
1595 break;
1596 }
1597
1598 /* I should be the index in potential_reload_regs
1599 of the new reload reg we have found. */
1600
5352b11a
RS
1601 if (i >= FIRST_PSEUDO_REGISTER)
1602 {
1603 /* There are no groups left to spill. */
1604 spill_failure (max_groups_insn[class]);
1605 failure = 1;
1606 goto failed;
1607 }
1608 else
1609 something_changed
fb3821f7 1610 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1611 global, dumpfile);
32131a9c
RK
1612 }
1613 else
1614 {
1615 /* For groups of more than 2 registers,
1616 look for a sufficient sequence of unspilled registers,
1617 and spill them all at once. */
1618 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1619 {
1620 int j = potential_reload_regs[i];
1621 int k;
1622 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1623 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1624 {
1625 /* Check each reg in the sequence. */
1626 for (k = 0; k < group_size[class]; k++)
1627 if (! (spill_reg_order[j + k] < 0
1628 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1629 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1630 break;
1631 /* We got a full sequence, so spill them all. */
1632 if (k == group_size[class])
1633 {
1634 register enum reg_class *p;
1635 for (k = 0; k < group_size[class]; k++)
1636 {
1637 int idx;
1638 SET_HARD_REG_BIT (counted_for_groups, j + k);
1639 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1640 if (potential_reload_regs[idx] == j + k)
1641 break;
5352b11a
RS
1642 if (i >= FIRST_PSEUDO_REGISTER)
1643 {
1644 /* There are no groups left. */
1645 spill_failure (max_groups_insn[class]);
1646 failure = 1;
1647 goto failed;
1648 }
1649 else
1650 something_changed
fb3821f7
CH
1651 |= new_spill_reg (idx, class,
1652 max_needs, NULL_PTR,
5352b11a 1653 global, dumpfile);
32131a9c
RK
1654 }
1655
1656 /* We have found one that will complete a group,
1657 so count off one group as provided. */
1658 max_groups[class]--;
1659 p = reg_class_superclasses[class];
1660 while (*p != LIM_REG_CLASSES)
1661 max_groups[(int) *p++]--;
1662
1663 break;
1664 }
1665 }
1666 }
fa52261e
RS
1667 /* We couldn't find any registers for this reload.
1668 Abort to avoid going into an infinite loop. */
1669 if (i == FIRST_PSEUDO_REGISTER)
1670 abort ();
32131a9c
RK
1671 }
1672 }
1673
1674 /* Now similarly satisfy all need for single registers. */
1675
1676 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1677 {
1678 /* Consider the potential reload regs that aren't
1679 yet in use as reload regs, in order of preference.
1680 Find the most preferred one that's in this class. */
1681
1682 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1683 if (potential_reload_regs[i] >= 0
1684 && TEST_HARD_REG_BIT (reg_class_contents[class],
1685 potential_reload_regs[i])
1686 /* If this reg will not be available for groups,
1687 pick one that does not foreclose possible groups.
1688 This is a kludge, and not very general,
1689 but it should be sufficient to make the 386 work,
1690 and the problem should not occur on machines with
1691 more registers. */
1692 && (max_nongroups[class] == 0
1693 || possible_group_p (potential_reload_regs[i], max_groups)))
1694 break;
1695
1696 /* I should be the index in potential_reload_regs
1697 of the new reload reg we have found. */
1698
5352b11a
RS
1699 if (i >= FIRST_PSEUDO_REGISTER)
1700 {
1701 /* There are no possible registers left to spill. */
1702 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1703 : max_nongroups_insn[class]);
1704 failure = 1;
1705 goto failed;
1706 }
1707 else
1708 something_changed
1709 |= new_spill_reg (i, class, max_needs, max_nongroups,
1710 global, dumpfile);
32131a9c
RK
1711 }
1712 }
1713 }
1714
1715 /* If global-alloc was run, notify it of any register eliminations we have
1716 done. */
1717 if (global)
1718 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1719 if (ep->can_eliminate)
1720 mark_elimination (ep->from, ep->to);
1721
1722 /* From now on, we need to emit any moves without making new pseudos. */
1723 reload_in_progress = 1;
1724
1725 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1726 around calls. Tell if what mode to use so that we will process
1727 those insns in reload_as_needed if we have to. */
32131a9c
RK
1728
1729 if (caller_save_needed)
a8efe40d
RK
1730 save_call_clobbered_regs (num_eliminable ? QImode
1731 : caller_save_spill_class != NO_REGS ? HImode
1732 : VOIDmode);
32131a9c
RK
1733
1734 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1735 If that insn didn't set the register (i.e., it copied the register to
1736 memory), just delete that insn instead of the equivalencing insn plus
1737 anything now dead. If we call delete_dead_insn on that insn, we may
1738 delete the insn that actually sets the register if the register die
1739 there and that is incorrect. */
1740
1741 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1742 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1743 && GET_CODE (reg_equiv_init[i]) != NOTE)
1744 {
1745 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1746 delete_dead_insn (reg_equiv_init[i]);
1747 else
1748 {
1749 PUT_CODE (reg_equiv_init[i], NOTE);
1750 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1751 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1752 }
1753 }
1754
1755 /* Use the reload registers where necessary
1756 by generating move instructions to move the must-be-register
1757 values into or out of the reload registers. */
1758
a8efe40d
RK
1759 if (something_needs_reloads || something_needs_elimination
1760 || (caller_save_needed && num_eliminable)
1761 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1762 reload_as_needed (first, global);
1763
1764 reload_in_progress = 0;
1765
5352b11a
RS
1766 /* Come here (with failure set nonzero) if we can't get enough spill regs
1767 and we decide not to abort about it. */
1768 failed:
1769
32131a9c
RK
1770 /* Now eliminate all pseudo regs by modifying them into
1771 their equivalent memory references.
1772 The REG-rtx's for the pseudos are modified in place,
1773 so all insns that used to refer to them now refer to memory.
1774
1775 For a reg that has a reg_equiv_address, all those insns
1776 were changed by reloading so that no insns refer to it any longer;
1777 but the DECL_RTL of a variable decl may refer to it,
1778 and if so this causes the debugging info to mention the variable. */
1779
1780 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1781 {
1782 rtx addr = 0;
ab1fd483 1783 int in_struct = 0;
32131a9c 1784 if (reg_equiv_mem[i])
ab1fd483
RS
1785 {
1786 addr = XEXP (reg_equiv_mem[i], 0);
1787 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1788 }
32131a9c
RK
1789 if (reg_equiv_address[i])
1790 addr = reg_equiv_address[i];
1791 if (addr)
1792 {
1793 if (reg_renumber[i] < 0)
1794 {
1795 rtx reg = regno_reg_rtx[i];
1796 XEXP (reg, 0) = addr;
1797 REG_USERVAR_P (reg) = 0;
ab1fd483 1798 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
1799 PUT_CODE (reg, MEM);
1800 }
1801 else if (reg_equiv_mem[i])
1802 XEXP (reg_equiv_mem[i], 0) = addr;
1803 }
1804 }
1805
1806#ifdef PRESERVE_DEATH_INFO_REGNO_P
1807 /* Make a pass over all the insns and remove death notes for things that
1808 are no longer registers or no longer die in the insn (e.g., an input
1809 and output pseudo being tied). */
1810
1811 for (insn = first; insn; insn = NEXT_INSN (insn))
1812 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1813 {
1814 rtx note, next;
1815
1816 for (note = REG_NOTES (insn); note; note = next)
1817 {
1818 next = XEXP (note, 1);
1819 if (REG_NOTE_KIND (note) == REG_DEAD
1820 && (GET_CODE (XEXP (note, 0)) != REG
1821 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1822 remove_note (insn, note);
1823 }
1824 }
1825#endif
1826
1827 /* Indicate that we no longer have known memory locations or constants. */
1828 reg_equiv_constant = 0;
1829 reg_equiv_memory_loc = 0;
5352b11a
RS
1830
1831 return failure;
32131a9c
RK
1832}
1833\f
1834/* Nonzero if, after spilling reg REGNO for non-groups,
1835 it will still be possible to find a group if we still need one. */
1836
1837static int
1838possible_group_p (regno, max_groups)
1839 int regno;
1840 int *max_groups;
1841{
1842 int i;
1843 int class = (int) NO_REGS;
1844
1845 for (i = 0; i < (int) N_REG_CLASSES; i++)
1846 if (max_groups[i] > 0)
1847 {
1848 class = i;
1849 break;
1850 }
1851
1852 if (class == (int) NO_REGS)
1853 return 1;
1854
1855 /* Consider each pair of consecutive registers. */
1856 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
1857 {
1858 /* Ignore pairs that include reg REGNO. */
1859 if (i == regno || i + 1 == regno)
1860 continue;
1861
1862 /* Ignore pairs that are outside the class that needs the group.
1863 ??? Here we fail to handle the case where two different classes
1864 independently need groups. But this never happens with our
1865 current machine descriptions. */
1866 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
1867 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
1868 continue;
1869
1870 /* A pair of consecutive regs we can still spill does the trick. */
1871 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
1872 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1873 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
1874 return 1;
1875
1876 /* A pair of one already spilled and one we can spill does it
1877 provided the one already spilled is not otherwise reserved. */
1878 if (spill_reg_order[i] < 0
1879 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
1880 && spill_reg_order[i + 1] >= 0
1881 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
1882 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
1883 return 1;
1884 if (spill_reg_order[i + 1] < 0
1885 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
1886 && spill_reg_order[i] >= 0
1887 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
1888 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
1889 return 1;
1890 }
1891
1892 return 0;
1893}
1894\f
1895/* Count any groups that can be formed from the registers recently spilled.
1896 This is done class by class, in order of ascending class number. */
1897
1898static void
1899count_possible_groups (group_size, group_mode, max_groups)
1900 int *group_size, *max_groups;
1901 enum machine_mode *group_mode;
1902{
1903 int i;
1904 /* Now find all consecutive groups of spilled registers
1905 and mark each group off against the need for such groups.
1906 But don't count them against ordinary need, yet. */
1907
1908 for (i = 0; i < N_REG_CLASSES; i++)
1909 if (group_size[i] > 1)
1910 {
1911 char regmask[FIRST_PSEUDO_REGISTER];
1912 int j;
1913
1914 bzero (regmask, sizeof regmask);
1915 /* Make a mask of all the regs that are spill regs in class I. */
1916 for (j = 0; j < n_spills; j++)
1917 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
1918 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
1919 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1920 spill_regs[j]))
1921 regmask[spill_regs[j]] = 1;
1922 /* Find each consecutive group of them. */
1923 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
1924 if (regmask[j] && j + group_size[i] <= FIRST_PSEUDO_REGISTER
1925 /* Next line in case group-mode for this class
1926 demands an even-odd pair. */
1927 && HARD_REGNO_MODE_OK (j, group_mode[i]))
1928 {
1929 int k;
1930 for (k = 1; k < group_size[i]; k++)
1931 if (! regmask[j + k])
1932 break;
1933 if (k == group_size[i])
1934 {
1935 /* We found a group. Mark it off against this class's
1936 need for groups, and against each superclass too. */
1937 register enum reg_class *p;
1938 max_groups[i]--;
1939 p = reg_class_superclasses[i];
1940 while (*p != LIM_REG_CLASSES)
1941 max_groups[(int) *p++]--;
a8fdc208 1942 /* Don't count these registers again. */
32131a9c
RK
1943 for (k = 0; k < group_size[i]; k++)
1944 SET_HARD_REG_BIT (counted_for_groups, j + k);
1945 }
fa52261e
RS
1946 /* Skip to the last reg in this group. When j is incremented
1947 above, it will then point to the first reg of the next
1948 possible group. */
1949 j += k - 1;
32131a9c
RK
1950 }
1951 }
1952
1953}
1954\f
1955/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
1956 another mode that needs to be reloaded for the same register class CLASS.
1957 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
1958 ALLOCATE_MODE will never be smaller than OTHER_MODE.
1959
1960 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
1961 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
1962 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
1963 causes unnecessary failures on machines requiring alignment of register
1964 groups when the two modes are different sizes, because the larger mode has
1965 more strict alignment rules than the smaller mode. */
1966
1967static int
1968modes_equiv_for_class_p (allocate_mode, other_mode, class)
1969 enum machine_mode allocate_mode, other_mode;
1970 enum reg_class class;
1971{
1972 register int regno;
1973 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1974 {
1975 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
1976 && HARD_REGNO_MODE_OK (regno, allocate_mode)
1977 && ! HARD_REGNO_MODE_OK (regno, other_mode))
1978 return 0;
1979 }
1980 return 1;
1981}
1982
5352b11a
RS
1983/* Handle the failure to find a register to spill.
1984 INSN should be one of the insns which needed this particular spill reg. */
1985
1986static void
1987spill_failure (insn)
1988 rtx insn;
1989{
1990 if (asm_noperands (PATTERN (insn)) >= 0)
1991 error_for_asm (insn, "`asm' needs too many reloads");
1992 else
1993 abort ();
1994}
1995
32131a9c
RK
1996/* Add a new register to the tables of available spill-registers
1997 (as well as spilling all pseudos allocated to the register).
1998 I is the index of this register in potential_reload_regs.
1999 CLASS is the regclass whose need is being satisfied.
2000 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2001 so that this register can count off against them.
2002 MAX_NONGROUPS is 0 if this register is part of a group.
2003 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2004
2005static int
2006new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2007 int i;
2008 int class;
2009 int *max_needs;
2010 int *max_nongroups;
2011 int global;
2012 FILE *dumpfile;
2013{
2014 register enum reg_class *p;
2015 int val;
2016 int regno = potential_reload_regs[i];
2017
2018 if (i >= FIRST_PSEUDO_REGISTER)
2019 abort (); /* Caller failed to find any register. */
2020
2021 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2022 fatal ("fixed or forbidden register was spilled.\n\
2023This may be due to a compiler bug or to impossible asm statements.");
2024
2025 /* Make reg REGNO an additional reload reg. */
2026
2027 potential_reload_regs[i] = -1;
2028 spill_regs[n_spills] = regno;
2029 spill_reg_order[regno] = n_spills;
2030 if (dumpfile)
2031 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2032
2033 /* Clear off the needs we just satisfied. */
2034
2035 max_needs[class]--;
2036 p = reg_class_superclasses[class];
2037 while (*p != LIM_REG_CLASSES)
2038 max_needs[(int) *p++]--;
2039
2040 if (max_nongroups && max_nongroups[class] > 0)
2041 {
2042 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2043 max_nongroups[class]--;
2044 p = reg_class_superclasses[class];
2045 while (*p != LIM_REG_CLASSES)
2046 max_nongroups[(int) *p++]--;
2047 }
2048
2049 /* Spill every pseudo reg that was allocated to this reg
2050 or to something that overlaps this reg. */
2051
2052 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2053
2054 /* If there are some registers still to eliminate and this register
2055 wasn't ever used before, additional stack space may have to be
2056 allocated to store this register. Thus, we may have changed the offset
2057 between the stack and frame pointers, so mark that something has changed.
2058 (If new pseudos were spilled, thus requiring more space, VAL would have
2059 been set non-zero by the call to spill_hard_reg above since additional
2060 reloads may be needed in that case.
2061
2062 One might think that we need only set VAL to 1 if this is a call-used
2063 register. However, the set of registers that must be saved by the
2064 prologue is not identical to the call-used set. For example, the
2065 register used by the call insn for the return PC is a call-used register,
2066 but must be saved by the prologue. */
2067 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2068 val = 1;
2069
2070 regs_ever_live[spill_regs[n_spills]] = 1;
2071 n_spills++;
2072
2073 return val;
2074}
2075\f
2076/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2077 data that is dead in INSN. */
2078
2079static void
2080delete_dead_insn (insn)
2081 rtx insn;
2082{
2083 rtx prev = prev_real_insn (insn);
2084 rtx prev_dest;
2085
2086 /* If the previous insn sets a register that dies in our insn, delete it
2087 too. */
2088 if (prev && GET_CODE (PATTERN (prev)) == SET
2089 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2090 && reg_mentioned_p (prev_dest, PATTERN (insn))
2091 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2092 delete_dead_insn (prev);
2093
2094 PUT_CODE (insn, NOTE);
2095 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2096 NOTE_SOURCE_FILE (insn) = 0;
2097}
2098
2099/* Modify the home of pseudo-reg I.
2100 The new home is present in reg_renumber[I].
2101
2102 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2103 or it may be -1, meaning there is none or it is not relevant.
2104 This is used so that all pseudos spilled from a given hard reg
2105 can share one stack slot. */
2106
2107static void
2108alter_reg (i, from_reg)
2109 register int i;
2110 int from_reg;
2111{
2112 /* When outputting an inline function, this can happen
2113 for a reg that isn't actually used. */
2114 if (regno_reg_rtx[i] == 0)
2115 return;
2116
2117 /* If the reg got changed to a MEM at rtl-generation time,
2118 ignore it. */
2119 if (GET_CODE (regno_reg_rtx[i]) != REG)
2120 return;
2121
2122 /* Modify the reg-rtx to contain the new hard reg
2123 number or else to contain its pseudo reg number. */
2124 REGNO (regno_reg_rtx[i])
2125 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2126
2127 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2128 allocate a stack slot for it. */
2129
2130 if (reg_renumber[i] < 0
2131 && reg_n_refs[i] > 0
2132 && reg_equiv_constant[i] == 0
2133 && reg_equiv_memory_loc[i] == 0)
2134 {
2135 register rtx x;
2136 int inherent_size = PSEUDO_REGNO_BYTES (i);
2137 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2138 int adjust = 0;
2139
2140 /* Each pseudo reg has an inherent size which comes from its own mode,
2141 and a total size which provides room for paradoxical subregs
2142 which refer to the pseudo reg in wider modes.
2143
2144 We can use a slot already allocated if it provides both
2145 enough inherent space and enough total space.
2146 Otherwise, we allocate a new slot, making sure that it has no less
2147 inherent space, and no less total space, then the previous slot. */
2148 if (from_reg == -1)
2149 {
2150 /* No known place to spill from => no slot to reuse. */
2151 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2152#if BYTES_BIG_ENDIAN
2153 /* Cancel the big-endian correction done in assign_stack_local.
2154 Get the address of the beginning of the slot.
2155 This is so we can do a big-endian correction unconditionally
2156 below. */
2157 adjust = inherent_size - total_size;
2158#endif
2159 }
2160 /* Reuse a stack slot if possible. */
2161 else if (spill_stack_slot[from_reg] != 0
2162 && spill_stack_slot_width[from_reg] >= total_size
2163 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2164 >= inherent_size))
2165 x = spill_stack_slot[from_reg];
2166 /* Allocate a bigger slot. */
2167 else
2168 {
2169 /* Compute maximum size needed, both for inherent size
2170 and for total size. */
2171 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2172 if (spill_stack_slot[from_reg])
2173 {
2174 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2175 > inherent_size)
2176 mode = GET_MODE (spill_stack_slot[from_reg]);
2177 if (spill_stack_slot_width[from_reg] > total_size)
2178 total_size = spill_stack_slot_width[from_reg];
2179 }
2180 /* Make a slot with that size. */
2181 x = assign_stack_local (mode, total_size, -1);
2182#if BYTES_BIG_ENDIAN
2183 /* Cancel the big-endian correction done in assign_stack_local.
2184 Get the address of the beginning of the slot.
2185 This is so we can do a big-endian correction unconditionally
2186 below. */
2187 adjust = GET_MODE_SIZE (mode) - total_size;
2188#endif
2189 spill_stack_slot[from_reg] = x;
2190 spill_stack_slot_width[from_reg] = total_size;
2191 }
2192
2193#if BYTES_BIG_ENDIAN
2194 /* On a big endian machine, the "address" of the slot
2195 is the address of the low part that fits its inherent mode. */
2196 if (inherent_size < total_size)
2197 adjust += (total_size - inherent_size);
2198#endif /* BYTES_BIG_ENDIAN */
2199
2200 /* If we have any adjustment to make, or if the stack slot is the
2201 wrong mode, make a new stack slot. */
2202 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2203 {
2204 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2205 plus_constant (XEXP (x, 0), adjust));
2206 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2207 }
2208
2209 /* Save the stack slot for later. */
2210 reg_equiv_memory_loc[i] = x;
2211 }
2212}
2213
2214/* Mark the slots in regs_ever_live for the hard regs
2215 used by pseudo-reg number REGNO. */
2216
2217void
2218mark_home_live (regno)
2219 int regno;
2220{
2221 register int i, lim;
2222 i = reg_renumber[regno];
2223 if (i < 0)
2224 return;
2225 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2226 while (i < lim)
2227 regs_ever_live[i++] = 1;
2228}
2229\f
2230/* This function handles the tracking of elimination offsets around branches.
2231
2232 X is a piece of RTL being scanned.
2233
2234 INSN is the insn that it came from, if any.
2235
2236 INITIAL_P is non-zero if we are to set the offset to be the initial
2237 offset and zero if we are setting the offset of the label to be the
2238 current offset. */
2239
2240static void
2241set_label_offsets (x, insn, initial_p)
2242 rtx x;
2243 rtx insn;
2244 int initial_p;
2245{
2246 enum rtx_code code = GET_CODE (x);
2247 rtx tem;
2248 int i;
2249 struct elim_table *p;
2250
2251 switch (code)
2252 {
2253 case LABEL_REF:
8be386d9
RS
2254 if (LABEL_REF_NONLOCAL_P (x))
2255 return;
2256
32131a9c
RK
2257 x = XEXP (x, 0);
2258
2259 /* ... fall through ... */
2260
2261 case CODE_LABEL:
2262 /* If we know nothing about this label, set the desired offsets. Note
2263 that this sets the offset at a label to be the offset before a label
2264 if we don't know anything about the label. This is not correct for
2265 the label after a BARRIER, but is the best guess we can make. If
2266 we guessed wrong, we will suppress an elimination that might have
2267 been possible had we been able to guess correctly. */
2268
2269 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2270 {
2271 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2272 offsets_at[CODE_LABEL_NUMBER (x)][i]
2273 = (initial_p ? reg_eliminate[i].initial_offset
2274 : reg_eliminate[i].offset);
2275 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2276 }
2277
2278 /* Otherwise, if this is the definition of a label and it is
d45cf215 2279 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2280 that label. */
2281
2282 else if (x == insn
2283 && (tem = prev_nonnote_insn (insn)) != 0
2284 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2285 {
2286 num_not_at_initial_offset = 0;
2287 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2288 {
2289 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2290 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2291 if (reg_eliminate[i].can_eliminate
2292 && (reg_eliminate[i].offset
2293 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2294 num_not_at_initial_offset++;
2295 }
2296 }
32131a9c
RK
2297
2298 else
2299 /* If neither of the above cases is true, compare each offset
2300 with those previously recorded and suppress any eliminations
2301 where the offsets disagree. */
a8fdc208 2302
32131a9c
RK
2303 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2304 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2305 != (initial_p ? reg_eliminate[i].initial_offset
2306 : reg_eliminate[i].offset))
2307 reg_eliminate[i].can_eliminate = 0;
2308
2309 return;
2310
2311 case JUMP_INSN:
2312 set_label_offsets (PATTERN (insn), insn, initial_p);
2313
2314 /* ... fall through ... */
2315
2316 case INSN:
2317 case CALL_INSN:
2318 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2319 and hence must have all eliminations at their initial offsets. */
2320 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2321 if (REG_NOTE_KIND (tem) == REG_LABEL)
2322 set_label_offsets (XEXP (tem, 0), insn, 1);
2323 return;
2324
2325 case ADDR_VEC:
2326 case ADDR_DIFF_VEC:
2327 /* Each of the labels in the address vector must be at their initial
2328 offsets. We want the first first for ADDR_VEC and the second
2329 field for ADDR_DIFF_VEC. */
2330
2331 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2332 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2333 insn, initial_p);
2334 return;
2335
2336 case SET:
2337 /* We only care about setting PC. If the source is not RETURN,
2338 IF_THEN_ELSE, or a label, disable any eliminations not at
2339 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2340 isn't one of those possibilities. For branches to a label,
2341 call ourselves recursively.
2342
2343 Note that this can disable elimination unnecessarily when we have
2344 a non-local goto since it will look like a non-constant jump to
2345 someplace in the current function. This isn't a significant
2346 problem since such jumps will normally be when all elimination
2347 pairs are back to their initial offsets. */
2348
2349 if (SET_DEST (x) != pc_rtx)
2350 return;
2351
2352 switch (GET_CODE (SET_SRC (x)))
2353 {
2354 case PC:
2355 case RETURN:
2356 return;
2357
2358 case LABEL_REF:
2359 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2360 return;
2361
2362 case IF_THEN_ELSE:
2363 tem = XEXP (SET_SRC (x), 1);
2364 if (GET_CODE (tem) == LABEL_REF)
2365 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2366 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2367 break;
2368
2369 tem = XEXP (SET_SRC (x), 2);
2370 if (GET_CODE (tem) == LABEL_REF)
2371 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2372 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2373 break;
2374 return;
2375 }
2376
2377 /* If we reach here, all eliminations must be at their initial
2378 offset because we are doing a jump to a variable address. */
2379 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2380 if (p->offset != p->initial_offset)
2381 p->can_eliminate = 0;
2382 }
2383}
2384\f
2385/* Used for communication between the next two function to properly share
2386 the vector for an ASM_OPERANDS. */
2387
2388static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2389
a8fdc208 2390/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2391 replacement (such as sp), plus an offset.
2392
2393 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2394 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2395 MEM, we are allowed to replace a sum of a register and the constant zero
2396 with the register, which we cannot do outside a MEM. In addition, we need
2397 to record the fact that a register is referenced outside a MEM.
2398
2399 If INSN is nonzero, it is the insn containing X. If we replace a REG
2400 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2401 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2402 that the REG is being modified.
2403
2404 If we see a modification to a register we know about, take the
2405 appropriate action (see case SET, below).
2406
2407 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2408 replacements done assuming all offsets are at their initial values. If
2409 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2410 encounter, return the actual location so that find_reloads will do
2411 the proper thing. */
2412
2413rtx
2414eliminate_regs (x, mem_mode, insn)
2415 rtx x;
2416 enum machine_mode mem_mode;
2417 rtx insn;
2418{
2419 enum rtx_code code = GET_CODE (x);
2420 struct elim_table *ep;
2421 int regno;
2422 rtx new;
2423 int i, j;
2424 char *fmt;
2425 int copied = 0;
2426
2427 switch (code)
2428 {
2429 case CONST_INT:
2430 case CONST_DOUBLE:
2431 case CONST:
2432 case SYMBOL_REF:
2433 case CODE_LABEL:
2434 case PC:
2435 case CC0:
2436 case ASM_INPUT:
2437 case ADDR_VEC:
2438 case ADDR_DIFF_VEC:
2439 case RETURN:
2440 return x;
2441
2442 case REG:
2443 regno = REGNO (x);
2444
2445 /* First handle the case where we encounter a bare register that
2446 is eliminable. Replace it with a PLUS. */
2447 if (regno < FIRST_PSEUDO_REGISTER)
2448 {
2449 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2450 ep++)
2451 if (ep->from_rtx == x && ep->can_eliminate)
2452 {
2453 if (! mem_mode)
2454 ep->ref_outside_mem = 1;
2455 return plus_constant (ep->to_rtx, ep->previous_offset);
2456 }
2457
2458 }
2459 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2460 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2461 {
2462 /* In this case, find_reloads would attempt to either use an
2463 incorrect address (if something is not at its initial offset)
2464 or substitute an replaced address into an insn (which loses
2465 if the offset is changed by some later action). So we simply
2466 return the replaced stack slot (assuming it is changed by
2467 elimination) and ignore the fact that this is actually a
2468 reference to the pseudo. Ensure we make a copy of the
2469 address in case it is shared. */
fb3821f7
CH
2470 new = eliminate_regs (reg_equiv_memory_loc[regno],
2471 mem_mode, NULL_RTX);
32131a9c
RK
2472 if (new != reg_equiv_memory_loc[regno])
2473 return copy_rtx (new);
2474 }
2475 return x;
2476
2477 case PLUS:
2478 /* If this is the sum of an eliminable register and a constant, rework
2479 the sum. */
2480 if (GET_CODE (XEXP (x, 0)) == REG
2481 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2482 && CONSTANT_P (XEXP (x, 1)))
2483 {
2484 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2485 ep++)
2486 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2487 {
2488 if (! mem_mode)
2489 ep->ref_outside_mem = 1;
2490
2491 /* The only time we want to replace a PLUS with a REG (this
2492 occurs when the constant operand of the PLUS is the negative
2493 of the offset) is when we are inside a MEM. We won't want
2494 to do so at other times because that would change the
2495 structure of the insn in a way that reload can't handle.
2496 We special-case the commonest situation in
2497 eliminate_regs_in_insn, so just replace a PLUS with a
2498 PLUS here, unless inside a MEM. */
2499 if (mem_mode && GET_CODE (XEXP (x, 1)) == CONST_INT
2500 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2501 return ep->to_rtx;
2502 else
2503 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2504 plus_constant (XEXP (x, 1),
2505 ep->previous_offset));
2506 }
2507
2508 /* If the register is not eliminable, we are done since the other
2509 operand is a constant. */
2510 return x;
2511 }
2512
2513 /* If this is part of an address, we want to bring any constant to the
2514 outermost PLUS. We will do this by doing register replacement in
2515 our operands and seeing if a constant shows up in one of them.
2516
2517 We assume here this is part of an address (or a "load address" insn)
2518 since an eliminable register is not likely to appear in any other
2519 context.
2520
2521 If we have (plus (eliminable) (reg)), we want to produce
2522 (plus (plus (replacement) (reg) (const))). If this was part of a
2523 normal add insn, (plus (replacement) (reg)) will be pushed as a
2524 reload. This is the desired action. */
2525
2526 {
fb3821f7
CH
2527 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2528 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2529
2530 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2531 {
2532 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2533 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2534 we must replace the constant here since it may no longer
2535 be in the position of any operand. */
2536 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2537 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2538 && reg_renumber[REGNO (new1)] < 0
2539 && reg_equiv_constant != 0
2540 && reg_equiv_constant[REGNO (new1)] != 0)
2541 new1 = reg_equiv_constant[REGNO (new1)];
2542 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2543 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2544 && reg_renumber[REGNO (new0)] < 0
2545 && reg_equiv_constant[REGNO (new0)] != 0)
2546 new0 = reg_equiv_constant[REGNO (new0)];
2547
2548 new = form_sum (new0, new1);
2549
2550 /* As above, if we are not inside a MEM we do not want to
2551 turn a PLUS into something else. We might try to do so here
2552 for an addition of 0 if we aren't optimizing. */
2553 if (! mem_mode && GET_CODE (new) != PLUS)
2554 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2555 else
2556 return new;
2557 }
2558 }
2559 return x;
2560
2561 case EXPR_LIST:
2562 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2563 if (XEXP (x, 0))
2564 {
fb3821f7 2565 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2566 if (new != XEXP (x, 0))
2567 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2568 }
2569
2570 /* ... fall through ... */
2571
2572 case INSN_LIST:
2573 /* Now do eliminations in the rest of the chain. If this was
2574 an EXPR_LIST, this might result in allocating more memory than is
2575 strictly needed, but it simplifies the code. */
2576 if (XEXP (x, 1))
2577 {
fb3821f7 2578 new = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
32131a9c
RK
2579 if (new != XEXP (x, 1))
2580 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2581 }
2582 return x;
2583
2584 case CALL:
2585 case COMPARE:
2586 case MINUS:
2587 case MULT:
2588 case DIV: case UDIV:
2589 case MOD: case UMOD:
2590 case AND: case IOR: case XOR:
2591 case LSHIFT: case ASHIFT: case ROTATE:
2592 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2593 case NE: case EQ:
2594 case GE: case GT: case GEU: case GTU:
2595 case LE: case LT: case LEU: case LTU:
2596 {
fb3821f7
CH
2597 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2598 rtx new1
2599 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX) : 0;
32131a9c
RK
2600
2601 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2602 return gen_rtx (code, GET_MODE (x), new0, new1);
2603 }
2604 return x;
2605
2606 case PRE_INC:
2607 case POST_INC:
2608 case PRE_DEC:
2609 case POST_DEC:
2610 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2611 if (ep->to_rtx == XEXP (x, 0))
2612 {
2613 if (code == PRE_DEC || code == POST_DEC)
2614 ep->offset += GET_MODE_SIZE (mem_mode);
2615 else
2616 ep->offset -= GET_MODE_SIZE (mem_mode);
2617 }
2618
2619 /* Fall through to generic unary operation case. */
2620 case USE:
2621 case STRICT_LOW_PART:
2622 case NEG: case NOT:
2623 case SIGN_EXTEND: case ZERO_EXTEND:
2624 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2625 case FLOAT: case FIX:
2626 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2627 case ABS:
2628 case SQRT:
2629 case FFS:
fb3821f7 2630 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
32131a9c
RK
2631 if (new != XEXP (x, 0))
2632 return gen_rtx (code, GET_MODE (x), new);
2633 return x;
2634
2635 case SUBREG:
2636 /* Similar to above processing, but preserve SUBREG_WORD.
2637 Convert (subreg (mem)) to (mem) if not paradoxical.
2638 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2639 pseudo didn't get a hard reg, we must replace this with the
2640 eliminated version of the memory location because push_reloads
2641 may do the replacement in certain circumstances. */
2642 if (GET_CODE (SUBREG_REG (x)) == REG
2643 && (GET_MODE_SIZE (GET_MODE (x))
2644 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2645 && reg_equiv_memory_loc != 0
2646 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2647 {
2648 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
fb3821f7 2649 mem_mode, NULL_RTX);
32131a9c
RK
2650
2651 /* If we didn't change anything, we must retain the pseudo. */
2652 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2653 new = XEXP (x, 0);
2654 else
2655 /* Otherwise, ensure NEW isn't shared in case we have to reload
2656 it. */
2657 new = copy_rtx (new);
2658 }
2659 else
fb3821f7 2660 new = eliminate_regs (SUBREG_REG (x), mem_mode, NULL_RTX);
32131a9c
RK
2661
2662 if (new != XEXP (x, 0))
2663 {
2664 if (GET_CODE (new) == MEM
2665 && (GET_MODE_SIZE (GET_MODE (x))
2666 <= GET_MODE_SIZE (GET_MODE (new))))
2667 {
2668 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2669 enum machine_mode mode = GET_MODE (x);
2670
2671#if BYTES_BIG_ENDIAN
2672 offset += (MIN (UNITS_PER_WORD,
2673 GET_MODE_SIZE (GET_MODE (new)))
2674 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2675#endif
2676
2677 PUT_MODE (new, mode);
2678 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2679 return new;
2680 }
2681 else
2682 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2683 }
2684
2685 return x;
2686
2687 case CLOBBER:
2688 /* If clobbering a register that is the replacement register for an
d45cf215 2689 elimination we still think can be performed, note that it cannot
32131a9c
RK
2690 be performed. Otherwise, we need not be concerned about it. */
2691 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2692 if (ep->to_rtx == XEXP (x, 0))
2693 ep->can_eliminate = 0;
2694
2695 return x;
2696
2697 case ASM_OPERANDS:
2698 {
2699 rtx *temp_vec;
2700 /* Properly handle sharing input and constraint vectors. */
2701 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2702 {
2703 /* When we come to a new vector not seen before,
2704 scan all its elements; keep the old vector if none
2705 of them changes; otherwise, make a copy. */
2706 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2707 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2708 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2709 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
fb3821f7 2710 mem_mode, NULL_RTX);
32131a9c
RK
2711
2712 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2713 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2714 break;
2715
2716 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2717 new_asm_operands_vec = old_asm_operands_vec;
2718 else
2719 new_asm_operands_vec
2720 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2721 }
2722
2723 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2724 if (new_asm_operands_vec == old_asm_operands_vec)
2725 return x;
2726
2727 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2728 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2729 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2730 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2731 ASM_OPERANDS_SOURCE_FILE (x),
2732 ASM_OPERANDS_SOURCE_LINE (x));
2733 new->volatil = x->volatil;
2734 return new;
2735 }
2736
2737 case SET:
2738 /* Check for setting a register that we know about. */
2739 if (GET_CODE (SET_DEST (x)) == REG)
2740 {
2741 /* See if this is setting the replacement register for an
a8fdc208 2742 elimination.
32131a9c
RK
2743
2744 If DEST is the frame pointer, we do nothing because we assume that
2745 all assignments to the frame pointer are for non-local gotos and
2746 are being done at a time when they are valid and do not disturb
2747 anything else. Some machines want to eliminate a fake argument
2748 pointer with either the frame or stack pointer. Assignments to
2749 the frame pointer must not prevent this elimination. */
2750
2751 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2752 ep++)
2753 if (ep->to_rtx == SET_DEST (x)
2754 && SET_DEST (x) != frame_pointer_rtx)
2755 {
6dc42e49 2756 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
2757 this elimination can't be done. */
2758 rtx src = SET_SRC (x);
2759
2760 if (GET_CODE (src) == PLUS
2761 && XEXP (src, 0) == SET_DEST (x)
2762 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2763 ep->offset -= INTVAL (XEXP (src, 1));
2764 else
2765 ep->can_eliminate = 0;
2766 }
2767
2768 /* Now check to see we are assigning to a register that can be
2769 eliminated. If so, it must be as part of a PARALLEL, since we
2770 will not have been called if this is a single SET. So indicate
2771 that we can no longer eliminate this reg. */
2772 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2773 ep++)
2774 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2775 ep->can_eliminate = 0;
2776 }
2777
2778 /* Now avoid the loop below in this common case. */
2779 {
fb3821f7
CH
2780 rtx new0 = eliminate_regs (SET_DEST (x), 0, NULL_RTX);
2781 rtx new1 = eliminate_regs (SET_SRC (x), 0, NULL_RTX);
32131a9c
RK
2782
2783 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2784 write a CLOBBER insn. */
2785 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
2786 && insn != 0)
2787 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
2788
2789 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
2790 return gen_rtx (SET, VOIDmode, new0, new1);
2791 }
2792
2793 return x;
2794
2795 case MEM:
2796 /* Our only special processing is to pass the mode of the MEM to our
2797 recursive call and copy the flags. While we are here, handle this
2798 case more efficiently. */
fb3821f7 2799 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), NULL_RTX);
32131a9c
RK
2800 if (new != XEXP (x, 0))
2801 {
2802 new = gen_rtx (MEM, GET_MODE (x), new);
2803 new->volatil = x->volatil;
2804 new->unchanging = x->unchanging;
2805 new->in_struct = x->in_struct;
2806 return new;
2807 }
2808 else
2809 return x;
2810 }
2811
2812 /* Process each of our operands recursively. If any have changed, make a
2813 copy of the rtx. */
2814 fmt = GET_RTX_FORMAT (code);
2815 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2816 {
2817 if (*fmt == 'e')
2818 {
fb3821f7 2819 new = eliminate_regs (XEXP (x, i), mem_mode, NULL_RTX);
32131a9c
RK
2820 if (new != XEXP (x, i) && ! copied)
2821 {
2822 rtx new_x = rtx_alloc (code);
2823 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2824 + (sizeof (new_x->fld[0])
2825 * GET_RTX_LENGTH (code))));
2826 x = new_x;
2827 copied = 1;
2828 }
2829 XEXP (x, i) = new;
2830 }
2831 else if (*fmt == 'E')
2832 {
2833 int copied_vec = 0;
2834 for (j = 0; j < XVECLEN (x, i); j++)
2835 {
2836 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2837 if (new != XVECEXP (x, i, j) && ! copied_vec)
2838 {
2839 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2840 &XVECEXP (x, i, 0));
2841 if (! copied)
2842 {
2843 rtx new_x = rtx_alloc (code);
2844 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
2845 + (sizeof (new_x->fld[0])
2846 * GET_RTX_LENGTH (code))));
2847 x = new_x;
2848 copied = 1;
2849 }
2850 XVEC (x, i) = new_v;
2851 copied_vec = 1;
2852 }
2853 XVECEXP (x, i, j) = new;
2854 }
2855 }
2856 }
2857
2858 return x;
2859}
2860\f
2861/* Scan INSN and eliminate all eliminable registers in it.
2862
2863 If REPLACE is nonzero, do the replacement destructively. Also
2864 delete the insn as dead it if it is setting an eliminable register.
2865
2866 If REPLACE is zero, do all our allocations in reload_obstack.
2867
2868 If no eliminations were done and this insn doesn't require any elimination
2869 processing (these are not identical conditions: it might be updating sp,
2870 but not referencing fp; this needs to be seen during reload_as_needed so
2871 that the offset between fp and sp can be taken into consideration), zero
2872 is returned. Otherwise, 1 is returned. */
2873
2874static int
2875eliminate_regs_in_insn (insn, replace)
2876 rtx insn;
2877 int replace;
2878{
2879 rtx old_body = PATTERN (insn);
2880 rtx new_body;
2881 int val = 0;
2882 struct elim_table *ep;
2883
2884 if (! replace)
2885 push_obstacks (&reload_obstack, &reload_obstack);
2886
2887 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
2888 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
2889 {
2890 /* Check for setting an eliminable register. */
2891 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2892 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
2893 {
2894 /* In this case this insn isn't serving a useful purpose. We
2895 will delete it in reload_as_needed once we know that this
2896 elimination is, in fact, being done.
2897
2898 If REPLACE isn't set, we can't delete this insn, but neededn't
2899 process it since it won't be used unless something changes. */
2900 if (replace)
2901 delete_dead_insn (insn);
2902 val = 1;
2903 goto done;
2904 }
2905
2906 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
2907 in the insn is the negative of the offset in FROM. Substitute
2908 (set (reg) (reg to)) for the insn and change its code.
2909
2910 We have to do this here, rather than in eliminate_regs, do that we can
2911 change the insn code. */
2912
2913 if (GET_CODE (SET_SRC (old_body)) == PLUS
2914 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
2915 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
2916 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2917 ep++)
2918 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
2919 && ep->can_eliminate
2920 && ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
2921 {
2922 PATTERN (insn) = gen_rtx (SET, VOIDmode,
2923 SET_DEST (old_body), ep->to_rtx);
2924 INSN_CODE (insn) = -1;
2925 val = 1;
2926 goto done;
2927 }
2928 }
2929
2930 old_asm_operands_vec = 0;
2931
2932 /* Replace the body of this insn with a substituted form. If we changed
2933 something, return non-zero. If this is the final call for this
2934 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
2935
2936 If we are replacing a body that was a (set X (plus Y Z)), try to
2937 re-recognize the insn. We do this in case we had a simple addition
2938 but now can do this as a load-address. This saves an insn in this
2939 common case. */
2940
fb3821f7 2941 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
2942 if (new_body != old_body)
2943 {
2944 if (GET_CODE (old_body) != SET || GET_CODE (SET_SRC (old_body)) != PLUS
2945 || ! validate_change (insn, &PATTERN (insn), new_body, 0))
2946 PATTERN (insn) = new_body;
2947
2948 if (replace && REG_NOTES (insn))
fb3821f7 2949 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, NULL_RTX);
32131a9c
RK
2950 val = 1;
2951 }
a8fdc208 2952
32131a9c
RK
2953 /* Loop through all elimination pairs. See if any have changed and
2954 recalculate the number not at initial offset.
2955
a8efe40d
RK
2956 Compute the maximum offset (minimum offset if the stack does not
2957 grow downward) for each elimination pair.
2958
32131a9c
RK
2959 We also detect a cases where register elimination cannot be done,
2960 namely, if a register would be both changed and referenced outside a MEM
2961 in the resulting insn since such an insn is often undefined and, even if
2962 not, we cannot know what meaning will be given to it. Note that it is
2963 valid to have a register used in an address in an insn that changes it
2964 (presumably with a pre- or post-increment or decrement).
2965
2966 If anything changes, return nonzero. */
2967
2968 num_not_at_initial_offset = 0;
2969 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2970 {
2971 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
2972 ep->can_eliminate = 0;
2973
2974 ep->ref_outside_mem = 0;
2975
2976 if (ep->previous_offset != ep->offset)
2977 val = 1;
2978
2979 ep->previous_offset = ep->offset;
2980 if (ep->can_eliminate && ep->offset != ep->initial_offset)
2981 num_not_at_initial_offset++;
a8efe40d
RK
2982
2983#ifdef STACK_GROWS_DOWNWARD
2984 ep->max_offset = MAX (ep->max_offset, ep->offset);
2985#else
2986 ep->max_offset = MIN (ep->max_offset, ep->offset);
2987#endif
32131a9c
RK
2988 }
2989
2990 done:
2991 if (! replace)
2992 pop_obstacks ();
2993
2994 return val;
2995}
2996
2997/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
2998 replacement we currently believe is valid, mark it as not eliminable if X
2999 modifies DEST in any way other than by adding a constant integer to it.
3000
3001 If DEST is the frame pointer, we do nothing because we assume that
3002 all assignments to the frame pointer are nonlocal gotos and are being done
3003 at a time when they are valid and do not disturb anything else.
3004 Some machines want to eliminate a fake argument pointer with either the
3005 frame or stack pointer. Assignments to the frame pointer must not prevent
3006 this elimination.
3007
3008 Called via note_stores from reload before starting its passes to scan
3009 the insns of the function. */
3010
3011static void
3012mark_not_eliminable (dest, x)
3013 rtx dest;
3014 rtx x;
3015{
3016 register int i;
3017
3018 /* A SUBREG of a hard register here is just changing its mode. We should
3019 not see a SUBREG of an eliminable hard register, but check just in
3020 case. */
3021 if (GET_CODE (dest) == SUBREG)
3022 dest = SUBREG_REG (dest);
3023
3024 if (dest == frame_pointer_rtx)
3025 return;
3026
3027 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3028 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3029 && (GET_CODE (x) != SET
3030 || GET_CODE (SET_SRC (x)) != PLUS
3031 || XEXP (SET_SRC (x), 0) != dest
3032 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3033 {
3034 reg_eliminate[i].can_eliminate_previous
3035 = reg_eliminate[i].can_eliminate = 0;
3036 num_eliminable--;
3037 }
3038}
3039\f
3040/* Kick all pseudos out of hard register REGNO.
3041 If GLOBAL is nonzero, try to find someplace else to put them.
3042 If DUMPFILE is nonzero, log actions taken on that file.
3043
3044 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3045 because we found we can't eliminate some register. In the case, no pseudos
3046 are allowed to be in the register, even if they are only in a block that
3047 doesn't require spill registers, unlike the case when we are spilling this
3048 hard reg to produce another spill register.
3049
3050 Return nonzero if any pseudos needed to be kicked out. */
3051
3052static int
3053spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3054 register int regno;
3055 int global;
3056 FILE *dumpfile;
3057 int cant_eliminate;
3058{
3059 int something_changed = 0;
3060 register int i;
3061
3062 SET_HARD_REG_BIT (forbidden_regs, regno);
3063
3064 /* Spill every pseudo reg that was allocated to this reg
3065 or to something that overlaps this reg. */
3066
3067 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3068 if (reg_renumber[i] >= 0
3069 && reg_renumber[i] <= regno
a8fdc208 3070 && (reg_renumber[i]
32131a9c
RK
3071 + HARD_REGNO_NREGS (reg_renumber[i],
3072 PSEUDO_REGNO_MODE (i))
3073 > regno))
3074 {
3075 enum reg_class class = REGNO_REG_CLASS (regno);
3076
3077 /* If this register belongs solely to a basic block which needed no
3078 spilling of any class that this register is contained in,
3079 leave it be, unless we are spilling this register because
3080 it was a hard register that can't be eliminated. */
3081
3082 if (! cant_eliminate
3083 && basic_block_needs[0]
3084 && reg_basic_block[i] >= 0
3085 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3086 {
3087 enum reg_class *p;
3088
3089 for (p = reg_class_superclasses[(int) class];
3090 *p != LIM_REG_CLASSES; p++)
3091 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3092 break;
a8fdc208 3093
32131a9c
RK
3094 if (*p == LIM_REG_CLASSES)
3095 continue;
3096 }
3097
3098 /* Mark it as no longer having a hard register home. */
3099 reg_renumber[i] = -1;
3100 /* We will need to scan everything again. */
3101 something_changed = 1;
3102 if (global)
3103 retry_global_alloc (i, forbidden_regs);
3104
3105 alter_reg (i, regno);
3106 if (dumpfile)
3107 {
3108 if (reg_renumber[i] == -1)
3109 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3110 else
3111 fprintf (dumpfile, " Register %d now in %d.\n\n",
3112 i, reg_renumber[i]);
3113 }
3114 }
3115
3116 return something_changed;
3117}
3118\f
3119/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3120
3121static void
3122scan_paradoxical_subregs (x)
3123 register rtx x;
3124{
3125 register int i;
3126 register char *fmt;
3127 register enum rtx_code code = GET_CODE (x);
3128
3129 switch (code)
3130 {
3131 case CONST_INT:
3132 case CONST:
3133 case SYMBOL_REF:
3134 case LABEL_REF:
3135 case CONST_DOUBLE:
3136 case CC0:
3137 case PC:
3138 case REG:
3139 case USE:
3140 case CLOBBER:
3141 return;
3142
3143 case SUBREG:
3144 if (GET_CODE (SUBREG_REG (x)) == REG
3145 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3146 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3147 = GET_MODE_SIZE (GET_MODE (x));
3148 return;
3149 }
3150
3151 fmt = GET_RTX_FORMAT (code);
3152 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3153 {
3154 if (fmt[i] == 'e')
3155 scan_paradoxical_subregs (XEXP (x, i));
3156 else if (fmt[i] == 'E')
3157 {
3158 register int j;
3159 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3160 scan_paradoxical_subregs (XVECEXP (x, i, j));
3161 }
3162 }
3163}
3164\f
3165struct hard_reg_n_uses { int regno; int uses; };
3166
3167static int
3168hard_reg_use_compare (p1, p2)
3169 struct hard_reg_n_uses *p1, *p2;
3170{
3171 int tem = p1->uses - p2->uses;
3172 if (tem != 0) return tem;
3173 /* If regs are equally good, sort by regno,
3174 so that the results of qsort leave nothing to chance. */
3175 return p1->regno - p2->regno;
3176}
3177
3178/* Choose the order to consider regs for use as reload registers
3179 based on how much trouble would be caused by spilling one.
3180 Store them in order of decreasing preference in potential_reload_regs. */
3181
3182static void
3183order_regs_for_reload ()
3184{
3185 register int i;
3186 register int o = 0;
3187 int large = 0;
3188
3189 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3190
3191 CLEAR_HARD_REG_SET (bad_spill_regs);
3192
3193 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3194 potential_reload_regs[i] = -1;
3195
3196 /* Count number of uses of each hard reg by pseudo regs allocated to it
3197 and then order them by decreasing use. */
3198
3199 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3200 {
3201 hard_reg_n_uses[i].uses = 0;
3202 hard_reg_n_uses[i].regno = i;
3203 }
3204
3205 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3206 {
3207 int regno = reg_renumber[i];
3208 if (regno >= 0)
3209 {
3210 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3211 while (regno < lim)
3212 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3213 }
3214 large += reg_n_refs[i];
3215 }
3216
3217 /* Now fixed registers (which cannot safely be used for reloading)
3218 get a very high use count so they will be considered least desirable.
3219 Registers used explicitly in the rtl code are almost as bad. */
3220
3221 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3222 {
3223 if (fixed_regs[i])
3224 {
3225 hard_reg_n_uses[i].uses += 2 * large + 2;
3226 SET_HARD_REG_BIT (bad_spill_regs, i);
3227 }
3228 else if (regs_explicitly_used[i])
3229 {
3230 hard_reg_n_uses[i].uses += large + 1;
3231 /* ??? We are doing this here because of the potential that
3232 bad code may be generated if a register explicitly used in
3233 an insn was used as a spill register for that insn. But
3234 not using these are spill registers may lose on some machine.
3235 We'll have to see how this works out. */
3236 SET_HARD_REG_BIT (bad_spill_regs, i);
3237 }
3238 }
3239 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3240 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3241
3242#ifdef ELIMINABLE_REGS
3243 /* If registers other than the frame pointer are eliminable, mark them as
3244 poor choices. */
3245 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3246 {
3247 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3248 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3249 }
3250#endif
3251
3252 /* Prefer registers not so far used, for use in temporary loading.
3253 Among them, if REG_ALLOC_ORDER is defined, use that order.
3254 Otherwise, prefer registers not preserved by calls. */
3255
3256#ifdef REG_ALLOC_ORDER
3257 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3258 {
3259 int regno = reg_alloc_order[i];
3260
3261 if (hard_reg_n_uses[regno].uses == 0)
3262 potential_reload_regs[o++] = regno;
3263 }
3264#else
3265 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3266 {
3267 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3268 potential_reload_regs[o++] = i;
3269 }
3270 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3271 {
3272 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3273 potential_reload_regs[o++] = i;
3274 }
3275#endif
3276
3277 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3278 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3279
3280 /* Now add the regs that are already used,
3281 preferring those used less often. The fixed and otherwise forbidden
3282 registers will be at the end of this list. */
3283
3284 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3285 if (hard_reg_n_uses[i].uses != 0)
3286 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3287}
3288\f
3289/* Reload pseudo-registers into hard regs around each insn as needed.
3290 Additional register load insns are output before the insn that needs it
3291 and perhaps store insns after insns that modify the reloaded pseudo reg.
3292
3293 reg_last_reload_reg and reg_reloaded_contents keep track of
3294 which pseudo-registers are already available in reload registers.
3295 We update these for the reloads that we perform,
3296 as the insns are scanned. */
3297
3298static void
3299reload_as_needed (first, live_known)
3300 rtx first;
3301 int live_known;
3302{
3303 register rtx insn;
3304 register int i;
3305 int this_block = 0;
3306 rtx x;
3307 rtx after_call = 0;
3308
3309 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3310 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3311 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3312 reg_has_output_reload = (char *) alloca (max_regno);
3313 for (i = 0; i < n_spills; i++)
3314 {
3315 reg_reloaded_contents[i] = -1;
3316 reg_reloaded_insn[i] = 0;
3317 }
3318
3319 /* Reset all offsets on eliminable registers to their initial values. */
3320#ifdef ELIMINABLE_REGS
3321 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3322 {
3323 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3324 reg_eliminate[i].initial_offset)
3325 reg_eliminate[i].previous_offset
3326 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3327 }
3328#else
3329 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3330 reg_eliminate[0].previous_offset
3331 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3332#endif
3333
3334 num_not_at_initial_offset = 0;
3335
3336 for (insn = first; insn;)
3337 {
3338 register rtx next = NEXT_INSN (insn);
3339
3340 /* Notice when we move to a new basic block. */
aa2c50d6 3341 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3342 && insn == basic_block_head[this_block+1])
3343 ++this_block;
3344
3345 /* If we pass a label, copy the offsets from the label information
3346 into the current offsets of each elimination. */
3347 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3348 {
3349 num_not_at_initial_offset = 0;
3350 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3351 {
3352 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3353 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3354 if (reg_eliminate[i].can_eliminate
3355 && (reg_eliminate[i].offset
3356 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3357 num_not_at_initial_offset++;
3358 }
3359 }
32131a9c
RK
3360
3361 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3362 {
3363 rtx avoid_return_reg = 0;
3364
3365#ifdef SMALL_REGISTER_CLASSES
3366 /* Set avoid_return_reg if this is an insn
3367 that might use the value of a function call. */
3368 if (GET_CODE (insn) == CALL_INSN)
3369 {
3370 if (GET_CODE (PATTERN (insn)) == SET)
3371 after_call = SET_DEST (PATTERN (insn));
3372 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3373 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3374 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3375 else
3376 after_call = 0;
3377 }
3378 else if (after_call != 0
3379 && !(GET_CODE (PATTERN (insn)) == SET
3380 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3381 {
3382 if (reg_mentioned_p (after_call, PATTERN (insn)))
3383 avoid_return_reg = after_call;
3384 after_call = 0;
3385 }
3386#endif /* SMALL_REGISTER_CLASSES */
3387
2758481d
RS
3388 /* If this is a USE and CLOBBER of a MEM, ensure that any
3389 references to eliminable registers have been removed. */
3390
3391 if ((GET_CODE (PATTERN (insn)) == USE
3392 || GET_CODE (PATTERN (insn)) == CLOBBER)
3393 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3394 XEXP (XEXP (PATTERN (insn), 0), 0)
3395 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3396 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3397
32131a9c
RK
3398 /* If we need to do register elimination processing, do so.
3399 This might delete the insn, in which case we are done. */
3400 if (num_eliminable && GET_MODE (insn) == QImode)
3401 {
3402 eliminate_regs_in_insn (insn, 1);
3403 if (GET_CODE (insn) == NOTE)
3404 {
3405 insn = next;
3406 continue;
3407 }
3408 }
3409
3410 if (GET_MODE (insn) == VOIDmode)
3411 n_reloads = 0;
3412 /* First find the pseudo regs that must be reloaded for this insn.
3413 This info is returned in the tables reload_... (see reload.h).
3414 Also modify the body of INSN by substituting RELOAD
3415 rtx's for those pseudo regs. */
3416 else
3417 {
3418 bzero (reg_has_output_reload, max_regno);
3419 CLEAR_HARD_REG_SET (reg_is_output_reload);
3420
3421 find_reloads (insn, 1, spill_indirect_levels, live_known,
3422 spill_reg_order);
3423 }
3424
3425 if (n_reloads > 0)
3426 {
3c3eeea6
RK
3427 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3428 rtx p;
32131a9c
RK
3429 int class;
3430
3431 /* If this block has not had spilling done for a
a8fdc208 3432 particular class, deactivate any optional reloads
32131a9c
RK
3433 of that class lest they try to use a spill-reg which isn't
3434 available here. If we have any non-optionals that need a
3435 spill reg, abort. */
3436
3437 for (class = 0; class < N_REG_CLASSES; class++)
3438 if (basic_block_needs[class] != 0
3439 && basic_block_needs[class][this_block] == 0)
3440 for (i = 0; i < n_reloads; i++)
3441 if (class == (int) reload_reg_class[i])
3442 {
3443 if (reload_optional[i])
b07ef7b9
RK
3444 {
3445 reload_in[i] = reload_out[i] = 0;
3446 reload_secondary_p[i] = 0;
3447 }
3448 else if (reload_reg_rtx[i] == 0
3449 && (reload_in[i] != 0 || reload_out[i] != 0
3450 || reload_secondary_p[i] != 0))
32131a9c
RK
3451 abort ();
3452 }
3453
3454 /* Now compute which reload regs to reload them into. Perhaps
3455 reusing reload regs from previous insns, or else output
3456 load insns to reload them. Maybe output store insns too.
3457 Record the choices of reload reg in reload_reg_rtx. */
3458 choose_reload_regs (insn, avoid_return_reg);
3459
3460 /* Generate the insns to reload operands into or out of
3461 their reload regs. */
3462 emit_reload_insns (insn);
3463
3464 /* Substitute the chosen reload regs from reload_reg_rtx
3465 into the insn's body (or perhaps into the bodies of other
3466 load and store insn that we just made for reloading
3467 and that we moved the structure into). */
3468 subst_reloads ();
3c3eeea6
RK
3469
3470 /* If this was an ASM, make sure that all the reload insns
3471 we have generated are valid. If not, give an error
3472 and delete them. */
3473
3474 if (asm_noperands (PATTERN (insn)) >= 0)
3475 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3476 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3477 && (recog_memoized (p) < 0
3478 || (insn_extract (p),
3479 ! constrain_operands (INSN_CODE (p), 1))))
3480 {
3481 error_for_asm (insn,
3482 "`asm' operand requires impossible reload");
3483 PUT_CODE (p, NOTE);
3484 NOTE_SOURCE_FILE (p) = 0;
3485 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3486 }
32131a9c
RK
3487 }
3488 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3489 is no longer validly lying around to save a future reload.
3490 Note that this does not detect pseudos that were reloaded
3491 for this insn in order to be stored in
3492 (obeying register constraints). That is correct; such reload
3493 registers ARE still valid. */
3494 note_stores (PATTERN (insn), forget_old_reloads_1);
3495
3496 /* There may have been CLOBBER insns placed after INSN. So scan
3497 between INSN and NEXT and use them to forget old reloads. */
3498 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3499 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3500 note_stores (PATTERN (x), forget_old_reloads_1);
3501
3502#ifdef AUTO_INC_DEC
3503 /* Likewise for regs altered by auto-increment in this insn.
3504 But note that the reg-notes are not changed by reloading:
3505 they still contain the pseudo-regs, not the spill regs. */
3506 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3507 if (REG_NOTE_KIND (x) == REG_INC)
3508 {
3509 /* See if this pseudo reg was reloaded in this insn.
3510 If so, its last-reload info is still valid
3511 because it is based on this insn's reload. */
3512 for (i = 0; i < n_reloads; i++)
3513 if (reload_out[i] == XEXP (x, 0))
3514 break;
3515
3516 if (i != n_reloads)
3517 forget_old_reloads_1 (XEXP (x, 0));
3518 }
3519#endif
3520 }
3521 /* A reload reg's contents are unknown after a label. */
3522 if (GET_CODE (insn) == CODE_LABEL)
3523 for (i = 0; i < n_spills; i++)
3524 {
3525 reg_reloaded_contents[i] = -1;
3526 reg_reloaded_insn[i] = 0;
3527 }
3528
3529 /* Don't assume a reload reg is still good after a call insn
3530 if it is a call-used reg. */
3531 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == CALL_INSN)
3532 for (i = 0; i < n_spills; i++)
3533 if (call_used_regs[spill_regs[i]])
3534 {
3535 reg_reloaded_contents[i] = -1;
3536 reg_reloaded_insn[i] = 0;
3537 }
3538
3539 /* In case registers overlap, allow certain insns to invalidate
3540 particular hard registers. */
3541
3542#ifdef INSN_CLOBBERS_REGNO_P
3543 for (i = 0 ; i < n_spills ; i++)
3544 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3545 {
3546 reg_reloaded_contents[i] = -1;
3547 reg_reloaded_insn[i] = 0;
3548 }
3549#endif
3550
3551 insn = next;
3552
3553#ifdef USE_C_ALLOCA
3554 alloca (0);
3555#endif
3556 }
3557}
3558
3559/* Discard all record of any value reloaded from X,
3560 or reloaded in X from someplace else;
3561 unless X is an output reload reg of the current insn.
3562
3563 X may be a hard reg (the reload reg)
3564 or it may be a pseudo reg that was reloaded from. */
3565
3566static void
3567forget_old_reloads_1 (x)
3568 rtx x;
3569{
3570 register int regno;
3571 int nr;
3572
3573 if (GET_CODE (x) != REG)
3574 return;
3575
3576 regno = REGNO (x);
3577
3578 if (regno >= FIRST_PSEUDO_REGISTER)
3579 nr = 1;
3580 else
3581 {
3582 int i;
3583 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3584 /* Storing into a spilled-reg invalidates its contents.
3585 This can happen if a block-local pseudo is allocated to that reg
3586 and it wasn't spilled because this block's total need is 0.
3587 Then some insn might have an optional reload and use this reg. */
3588 for (i = 0; i < nr; i++)
3589 if (spill_reg_order[regno + i] >= 0
3590 /* But don't do this if the reg actually serves as an output
3591 reload reg in the current instruction. */
3592 && (n_reloads == 0
3593 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3594 {
3595 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3596 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3597 }
3598 }
3599
3600 /* Since value of X has changed,
3601 forget any value previously copied from it. */
3602
3603 while (nr-- > 0)
3604 /* But don't forget a copy if this is the output reload
3605 that establishes the copy's validity. */
3606 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3607 reg_last_reload_reg[regno + nr] = 0;
3608}
3609\f
3610/* For each reload, the mode of the reload register. */
3611static enum machine_mode reload_mode[MAX_RELOADS];
3612
3613/* For each reload, the largest number of registers it will require. */
3614static int reload_nregs[MAX_RELOADS];
3615
3616/* Comparison function for qsort to decide which of two reloads
3617 should be handled first. *P1 and *P2 are the reload numbers. */
3618
3619static int
3620reload_reg_class_lower (p1, p2)
3621 short *p1, *p2;
3622{
3623 register int r1 = *p1, r2 = *p2;
3624 register int t;
a8fdc208 3625
32131a9c
RK
3626 /* Consider required reloads before optional ones. */
3627 t = reload_optional[r1] - reload_optional[r2];
3628 if (t != 0)
3629 return t;
3630
3631 /* Count all solitary classes before non-solitary ones. */
3632 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3633 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3634 if (t != 0)
3635 return t;
3636
3637 /* Aside from solitaires, consider all multi-reg groups first. */
3638 t = reload_nregs[r2] - reload_nregs[r1];
3639 if (t != 0)
3640 return t;
3641
3642 /* Consider reloads in order of increasing reg-class number. */
3643 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3644 if (t != 0)
3645 return t;
3646
3647 /* If reloads are equally urgent, sort by reload number,
3648 so that the results of qsort leave nothing to chance. */
3649 return r1 - r2;
3650}
3651\f
3652/* The following HARD_REG_SETs indicate when each hard register is
3653 used for a reload of various parts of the current insn. */
3654
3655/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3656static HARD_REG_SET reload_reg_used;
3657/* If reg is in use for a RELOAD_FOR_INPUT_RELOAD_ADDRESS reload. */
3658static HARD_REG_SET reload_reg_used_in_input_addr;
3659/* If reg is in use for a RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reload. */
3660static HARD_REG_SET reload_reg_used_in_output_addr;
3661/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3662static HARD_REG_SET reload_reg_used_in_op_addr;
3663/* If reg is in use for a RELOAD_FOR_INPUT reload. */
3664static HARD_REG_SET reload_reg_used_in_input;
3665/* If reg is in use for a RELOAD_FOR_OUTPUT reload. */
3666static HARD_REG_SET reload_reg_used_in_output;
3667
3668/* If reg is in use as a reload reg for any sort of reload. */
3669static HARD_REG_SET reload_reg_used_at_all;
3670
3671/* Mark reg REGNO as in use for a reload of the sort spec'd by WHEN_NEEDED.
3672 MODE is used to indicate how many consecutive regs are actually used. */
3673
3674static void
3675mark_reload_reg_in_use (regno, when_needed, mode)
3676 int regno;
3677 enum reload_when_needed when_needed;
3678 enum machine_mode mode;
3679{
3680 int nregs = HARD_REGNO_NREGS (regno, mode);
3681 int i;
3682
3683 for (i = regno; i < nregs + regno; i++)
3684 {
3685 switch (when_needed)
3686 {
3687 case RELOAD_OTHER:
3688 SET_HARD_REG_BIT (reload_reg_used, i);
3689 break;
3690
3691 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3692 SET_HARD_REG_BIT (reload_reg_used_in_input_addr, i);
3693 break;
3694
3695 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3696 SET_HARD_REG_BIT (reload_reg_used_in_output_addr, i);
3697 break;
3698
3699 case RELOAD_FOR_OPERAND_ADDRESS:
3700 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3701 break;
3702
3703 case RELOAD_FOR_INPUT:
3704 SET_HARD_REG_BIT (reload_reg_used_in_input, i);
3705 break;
3706
3707 case RELOAD_FOR_OUTPUT:
3708 SET_HARD_REG_BIT (reload_reg_used_in_output, i);
3709 break;
3710 }
3711
3712 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3713 }
3714}
3715
3716/* 1 if reg REGNO is free as a reload reg for a reload of the sort
3717 specified by WHEN_NEEDED. */
3718
3719static int
3720reload_reg_free_p (regno, when_needed)
3721 int regno;
3722 enum reload_when_needed when_needed;
3723{
3724 /* In use for a RELOAD_OTHER means it's not available for anything. */
3725 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
3726 return 0;
3727 switch (when_needed)
3728 {
3729 case RELOAD_OTHER:
3730 /* In use for anything means not available for a RELOAD_OTHER. */
3731 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
3732
3733 /* The other kinds of use can sometimes share a register. */
3734 case RELOAD_FOR_INPUT:
3735 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3736 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3737 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno));
3738 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3739 return (! TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno)
3740 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno));
3741 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3742 return (! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3743 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3744 case RELOAD_FOR_OPERAND_ADDRESS:
3745 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3746 && ! TEST_HARD_REG_BIT (reload_reg_used_in_input, regno)
3747 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3748 case RELOAD_FOR_OUTPUT:
3749 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3750 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno)
3751 && ! TEST_HARD_REG_BIT (reload_reg_used_in_output, regno));
3752 }
3753 abort ();
3754}
3755
3756/* Return 1 if the value in reload reg REGNO, as used by a reload
3757 needed for the part of the insn specified by WHEN_NEEDED,
3758 is not in use for a reload in any prior part of the insn.
3759
3760 We can assume that the reload reg was already tested for availability
3761 at the time it is needed, and we should not check this again,
3762 in case the reg has already been marked in use. */
3763
3764static int
3765reload_reg_free_before_p (regno, when_needed)
3766 int regno;
3767 enum reload_when_needed when_needed;
3768{
3769 switch (when_needed)
3770 {
3771 case RELOAD_OTHER:
3772 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3773 its use starts from the beginning, so nothing can use it earlier. */
3774 return 1;
3775
3776 /* If this use is for part of the insn,
3777 check the reg is not in use for any prior part. */
3778 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3779 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
3780 return 0;
3781 case RELOAD_FOR_OUTPUT:
3782 if (TEST_HARD_REG_BIT (reload_reg_used_in_input, regno))
3783 return 0;
3784 case RELOAD_FOR_OPERAND_ADDRESS:
3785 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr, regno))
3786 return 0;
3787 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3788 case RELOAD_FOR_INPUT:
3789 return 1;
3790 }
3791 abort ();
3792}
3793
3794/* Return 1 if the value in reload reg REGNO, as used by a reload
3795 needed for the part of the insn specified by WHEN_NEEDED,
3796 is still available in REGNO at the end of the insn.
3797
3798 We can assume that the reload reg was already tested for availability
3799 at the time it is needed, and we should not check this again,
3800 in case the reg has already been marked in use. */
3801
3802static int
3803reload_reg_reaches_end_p (regno, when_needed)
3804 int regno;
3805 enum reload_when_needed when_needed;
3806{
3807 switch (when_needed)
3808 {
3809 case RELOAD_OTHER:
3810 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
3811 its value must reach the end. */
3812 return 1;
3813
3814 /* If this use is for part of the insn,
3815 its value reaches if no subsequent part uses the same register. */
3816 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
3817 case RELOAD_FOR_INPUT:
3818 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
3819 || TEST_HARD_REG_BIT (reload_reg_used_in_output, regno))
3820 return 0;
3821 case RELOAD_FOR_OPERAND_ADDRESS:
3822 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr, regno))
3823 return 0;
3824 case RELOAD_FOR_OUTPUT:
3825 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
3826 return 1;
3827 }
3828 abort ();
3829}
3830\f
3831/* Vector of reload-numbers showing the order in which the reloads should
3832 be processed. */
3833short reload_order[MAX_RELOADS];
3834
3835/* Indexed by reload number, 1 if incoming value
3836 inherited from previous insns. */
3837char reload_inherited[MAX_RELOADS];
3838
3839/* For an inherited reload, this is the insn the reload was inherited from,
3840 if we know it. Otherwise, this is 0. */
3841rtx reload_inheritance_insn[MAX_RELOADS];
3842
3843/* If non-zero, this is a place to get the value of the reload,
3844 rather than using reload_in. */
3845rtx reload_override_in[MAX_RELOADS];
3846
3847/* For each reload, the index in spill_regs of the spill register used,
3848 or -1 if we did not need one of the spill registers for this reload. */
3849int reload_spill_index[MAX_RELOADS];
3850
3851/* Index of last register assigned as a spill register. We allocate in
3852 a round-robin fashio. */
3853
3854static last_spill_reg = 0;
3855
3856/* Find a spill register to use as a reload register for reload R.
3857 LAST_RELOAD is non-zero if this is the last reload for the insn being
3858 processed.
3859
3860 Set reload_reg_rtx[R] to the register allocated.
3861
3862 If NOERROR is nonzero, we return 1 if successful,
3863 or 0 if we couldn't find a spill reg and we didn't change anything. */
3864
3865static int
3866allocate_reload_reg (r, insn, last_reload, noerror)
3867 int r;
3868 rtx insn;
3869 int last_reload;
3870 int noerror;
3871{
3872 int i;
3873 int pass;
3874 int count;
3875 rtx new;
3876 int regno;
3877
3878 /* If we put this reload ahead, thinking it is a group,
3879 then insist on finding a group. Otherwise we can grab a
a8fdc208 3880 reg that some other reload needs.
32131a9c
RK
3881 (That can happen when we have a 68000 DATA_OR_FP_REG
3882 which is a group of data regs or one fp reg.)
3883 We need not be so restrictive if there are no more reloads
3884 for this insn.
3885
3886 ??? Really it would be nicer to have smarter handling
3887 for that kind of reg class, where a problem like this is normal.
3888 Perhaps those classes should be avoided for reloading
3889 by use of more alternatives. */
3890
3891 int force_group = reload_nregs[r] > 1 && ! last_reload;
3892
3893 /* If we want a single register and haven't yet found one,
3894 take any reg in the right class and not in use.
3895 If we want a consecutive group, here is where we look for it.
3896
3897 We use two passes so we can first look for reload regs to
3898 reuse, which are already in use for other reloads in this insn,
3899 and only then use additional registers.
3900 I think that maximizing reuse is needed to make sure we don't
3901 run out of reload regs. Suppose we have three reloads, and
3902 reloads A and B can share regs. These need two regs.
3903 Suppose A and B are given different regs.
3904 That leaves none for C. */
3905 for (pass = 0; pass < 2; pass++)
3906 {
3907 /* I is the index in spill_regs.
3908 We advance it round-robin between insns to use all spill regs
3909 equally, so that inherited reloads have a chance
3910 of leapfrogging each other. */
3911
3912 for (count = 0, i = last_spill_reg; count < n_spills; count++)
3913 {
3914 int class = (int) reload_reg_class[r];
3915
3916 i = (i + 1) % n_spills;
3917
3918 if (reload_reg_free_p (spill_regs[i], reload_when_needed[r])
3919 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
3920 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
3921 /* Look first for regs to share, then for unshared. */
3922 && (pass || TEST_HARD_REG_BIT (reload_reg_used_at_all,
3923 spill_regs[i])))
3924 {
3925 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
3926 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
3927 (on 68000) got us two FP regs. If NR is 1,
3928 we would reject both of them. */
3929 if (force_group)
3930 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
3931 /* If we need only one reg, we have already won. */
3932 if (nr == 1)
3933 {
3934 /* But reject a single reg if we demand a group. */
3935 if (force_group)
3936 continue;
3937 break;
3938 }
3939 /* Otherwise check that as many consecutive regs as we need
3940 are available here.
3941 Also, don't use for a group registers that are
3942 needed for nongroups. */
3943 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
3944 while (nr > 1)
3945 {
3946 regno = spill_regs[i] + nr - 1;
3947 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
3948 && spill_reg_order[regno] >= 0
3949 && reload_reg_free_p (regno, reload_when_needed[r])
3950 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
3951 regno)))
3952 break;
3953 nr--;
3954 }
3955 if (nr == 1)
3956 break;
3957 }
3958 }
3959
3960 /* If we found something on pass 1, omit pass 2. */
3961 if (count < n_spills)
3962 break;
3963 }
3964
3965 /* We should have found a spill register by now. */
3966 if (count == n_spills)
3967 {
3968 if (noerror)
3969 return 0;
3970 abort ();
3971 }
3972
3973 last_spill_reg = i;
3974
3975 /* Mark as in use for this insn the reload regs we use for this. */
3976 mark_reload_reg_in_use (spill_regs[i], reload_when_needed[r],
3977 reload_mode[r]);
3978
3979 new = spill_reg_rtx[i];
3980
3981 if (new == 0 || GET_MODE (new) != reload_mode[r])
3982 spill_reg_rtx[i] = new = gen_rtx (REG, reload_mode[r], spill_regs[i]);
3983
3984 reload_reg_rtx[r] = new;
3985 reload_spill_index[r] = i;
3986 regno = true_regnum (new);
3987
3988 /* Detect when the reload reg can't hold the reload mode.
3989 This used to be one `if', but Sequent compiler can't handle that. */
3990 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
3991 {
3992 enum machine_mode test_mode = VOIDmode;
3993 if (reload_in[r])
3994 test_mode = GET_MODE (reload_in[r]);
3995 /* If reload_in[r] has VOIDmode, it means we will load it
3996 in whatever mode the reload reg has: to wit, reload_mode[r].
3997 We have already tested that for validity. */
3998 /* Aside from that, we need to test that the expressions
3999 to reload from or into have modes which are valid for this
4000 reload register. Otherwise the reload insns would be invalid. */
4001 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4002 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4003 if (! (reload_out[r] != 0
4004 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4005 /* The reg is OK. */
4006 return 1;
4007 }
4008
4009 /* The reg is not OK. */
4010 if (noerror)
4011 return 0;
4012
4013 if (asm_noperands (PATTERN (insn)) < 0)
4014 /* It's the compiler's fault. */
4015 abort ();
4016
4017 /* It's the user's fault; the operand's mode and constraint
4018 don't match. Disable this reload so we don't crash in final. */
4019 error_for_asm (insn,
4020 "`asm' operand constraint incompatible with operand size");
4021 reload_in[r] = 0;
4022 reload_out[r] = 0;
4023 reload_reg_rtx[r] = 0;
4024 reload_optional[r] = 1;
4025 reload_secondary_p[r] = 1;
4026
4027 return 1;
4028}
4029\f
4030/* Assign hard reg targets for the pseudo-registers we must reload
4031 into hard regs for this insn.
4032 Also output the instructions to copy them in and out of the hard regs.
4033
4034 For machines with register classes, we are responsible for
4035 finding a reload reg in the proper class. */
4036
4037static void
4038choose_reload_regs (insn, avoid_return_reg)
4039 rtx insn;
4040 /* This argument is currently ignored. */
4041 rtx avoid_return_reg;
4042{
4043 register int i, j;
4044 int max_group_size = 1;
4045 enum reg_class group_class = NO_REGS;
4046 int inheritance;
4047
4048 rtx save_reload_reg_rtx[MAX_RELOADS];
4049 char save_reload_inherited[MAX_RELOADS];
4050 rtx save_reload_inheritance_insn[MAX_RELOADS];
4051 rtx save_reload_override_in[MAX_RELOADS];
4052 int save_reload_spill_index[MAX_RELOADS];
4053 HARD_REG_SET save_reload_reg_used;
4054 HARD_REG_SET save_reload_reg_used_in_input_addr;
4055 HARD_REG_SET save_reload_reg_used_in_output_addr;
4056 HARD_REG_SET save_reload_reg_used_in_op_addr;
4057 HARD_REG_SET save_reload_reg_used_in_input;
4058 HARD_REG_SET save_reload_reg_used_in_output;
4059 HARD_REG_SET save_reload_reg_used_at_all;
4060
4061 bzero (reload_inherited, MAX_RELOADS);
4062 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4063 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4064
4065 CLEAR_HARD_REG_SET (reload_reg_used);
4066 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4067 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr);
4068 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr);
4069 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4070 CLEAR_HARD_REG_SET (reload_reg_used_in_output);
4071 CLEAR_HARD_REG_SET (reload_reg_used_in_input);
4072
4073 /* Distinguish output-only and input-only reloads
4074 because they can overlap with other things. */
4075 for (j = 0; j < n_reloads; j++)
4076 if (reload_when_needed[j] == RELOAD_OTHER
4077 && ! reload_needed_for_multiple[j])
4078 {
4079 if (reload_in[j] == 0)
4080 {
4081 /* But earlyclobber operands must stay as RELOAD_OTHER. */
4082 for (i = 0; i < n_earlyclobbers; i++)
4083 if (rtx_equal_p (reload_out[j], reload_earlyclobbers[i]))
4084 break;
4085 if (i == n_earlyclobbers)
4086 reload_when_needed[j] = RELOAD_FOR_OUTPUT;
4087 }
4088 if (reload_out[j] == 0)
4089 reload_when_needed[j] = RELOAD_FOR_INPUT;
4090
4091 if (reload_secondary_reload[j] >= 0
4092 && ! reload_needed_for_multiple[reload_secondary_reload[j]])
4093 reload_when_needed[reload_secondary_reload[j]]
4094 = reload_when_needed[j];
4095 }
4096
4097#ifdef SMALL_REGISTER_CLASSES
4098 /* Don't bother with avoiding the return reg
4099 if we have no mandatory reload that could use it. */
4100 if (avoid_return_reg)
4101 {
4102 int do_avoid = 0;
4103 int regno = REGNO (avoid_return_reg);
4104 int nregs
4105 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4106 int r;
4107
4108 for (r = regno; r < regno + nregs; r++)
4109 if (spill_reg_order[r] >= 0)
4110 for (j = 0; j < n_reloads; j++)
4111 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4112 && (reload_in[j] != 0 || reload_out[j] != 0
4113 || reload_secondary_p[j])
4114 &&
4115 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4116 do_avoid = 1;
4117 if (!do_avoid)
4118 avoid_return_reg = 0;
4119 }
4120#endif /* SMALL_REGISTER_CLASSES */
4121
4122#if 0 /* Not needed, now that we can always retry without inheritance. */
4123 /* See if we have more mandatory reloads than spill regs.
4124 If so, then we cannot risk optimizations that could prevent
a8fdc208 4125 reloads from sharing one spill register.
32131a9c
RK
4126
4127 Since we will try finding a better register than reload_reg_rtx
4128 unless it is equal to reload_in or reload_out, count such reloads. */
4129
4130 {
4131 int tem = 0;
4132#ifdef SMALL_REGISTER_CLASSES
4133 int tem = (avoid_return_reg != 0);
a8fdc208 4134#endif
32131a9c
RK
4135 for (j = 0; j < n_reloads; j++)
4136 if (! reload_optional[j]
4137 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4138 && (reload_reg_rtx[j] == 0
4139 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4140 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4141 tem++;
4142 if (tem > n_spills)
4143 must_reuse = 1;
4144 }
4145#endif
4146
4147#ifdef SMALL_REGISTER_CLASSES
4148 /* Don't use the subroutine call return reg for a reload
4149 if we are supposed to avoid it. */
4150 if (avoid_return_reg)
4151 {
4152 int regno = REGNO (avoid_return_reg);
4153 int nregs
4154 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4155 int r;
4156
4157 for (r = regno; r < regno + nregs; r++)
4158 if (spill_reg_order[r] >= 0)
4159 SET_HARD_REG_BIT (reload_reg_used, r);
4160 }
4161#endif /* SMALL_REGISTER_CLASSES */
4162
4163 /* In order to be certain of getting the registers we need,
4164 we must sort the reloads into order of increasing register class.
4165 Then our grabbing of reload registers will parallel the process
a8fdc208 4166 that provided the reload registers.
32131a9c
RK
4167
4168 Also note whether any of the reloads wants a consecutive group of regs.
4169 If so, record the maximum size of the group desired and what
4170 register class contains all the groups needed by this insn. */
4171
4172 for (j = 0; j < n_reloads; j++)
4173 {
4174 reload_order[j] = j;
4175 reload_spill_index[j] = -1;
4176
4177 reload_mode[j]
4178 = (reload_strict_low[j] && reload_out[j]
4179 ? GET_MODE (SUBREG_REG (reload_out[j]))
4180 : (reload_inmode[j] == VOIDmode
4181 || (GET_MODE_SIZE (reload_outmode[j])
4182 > GET_MODE_SIZE (reload_inmode[j])))
4183 ? reload_outmode[j] : reload_inmode[j]);
4184
4185 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4186
4187 if (reload_nregs[j] > 1)
4188 {
4189 max_group_size = MAX (reload_nregs[j], max_group_size);
4190 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4191 }
4192
4193 /* If we have already decided to use a certain register,
4194 don't use it in another way. */
4195 if (reload_reg_rtx[j])
4196 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]),
4197 reload_when_needed[j], reload_mode[j]);
4198 }
4199
4200 if (n_reloads > 1)
4201 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4202
4203 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4204 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4205 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4206 sizeof reload_inheritance_insn);
4207 bcopy (reload_override_in, save_reload_override_in,
4208 sizeof reload_override_in);
4209 bcopy (reload_spill_index, save_reload_spill_index,
4210 sizeof reload_spill_index);
4211 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4212 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4213 COPY_HARD_REG_SET (save_reload_reg_used_in_output,
4214 reload_reg_used_in_output);
4215 COPY_HARD_REG_SET (save_reload_reg_used_in_input,
4216 reload_reg_used_in_input);
4217 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr,
4218 reload_reg_used_in_input_addr);
4219 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr,
4220 reload_reg_used_in_output_addr);
4221 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4222 reload_reg_used_in_op_addr);
4223
4224 /* Try first with inheritance, then turning it off. */
4225
4226 for (inheritance = 1; inheritance >= 0; inheritance--)
4227 {
4228 /* Process the reloads in order of preference just found.
4229 Beyond this point, subregs can be found in reload_reg_rtx.
4230
4231 This used to look for an existing reloaded home for all
4232 of the reloads, and only then perform any new reloads.
4233 But that could lose if the reloads were done out of reg-class order
4234 because a later reload with a looser constraint might have an old
4235 home in a register needed by an earlier reload with a tighter constraint.
4236
4237 To solve this, we make two passes over the reloads, in the order
4238 described above. In the first pass we try to inherit a reload
4239 from a previous insn. If there is a later reload that needs a
4240 class that is a proper subset of the class being processed, we must
4241 also allocate a spill register during the first pass.
4242
4243 Then make a second pass over the reloads to allocate any reloads
4244 that haven't been given registers yet. */
4245
4246 for (j = 0; j < n_reloads; j++)
4247 {
4248 register int r = reload_order[j];
4249
4250 /* Ignore reloads that got marked inoperative. */
4251 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4252 continue;
4253
4254 /* If find_reloads chose a to use reload_in or reload_out as a reload
4255 register, we don't need to chose one. Otherwise, try even if it found
4256 one since we might save an insn if we find the value lying around. */
4257 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4258 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4259 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4260 continue;
4261
4262#if 0 /* No longer needed for correct operation.
4263 It might give better code, or might not; worth an experiment? */
4264 /* If this is an optional reload, we can't inherit from earlier insns
4265 until we are sure that any non-optional reloads have been allocated.
4266 The following code takes advantage of the fact that optional reloads
4267 are at the end of reload_order. */
4268 if (reload_optional[r] != 0)
4269 for (i = 0; i < j; i++)
4270 if ((reload_out[reload_order[i]] != 0
4271 || reload_in[reload_order[i]] != 0
4272 || reload_secondary_p[reload_order[i]])
4273 && ! reload_optional[reload_order[i]]
4274 && reload_reg_rtx[reload_order[i]] == 0)
4275 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4276#endif
4277
4278 /* First see if this pseudo is already available as reloaded
4279 for a previous insn. We cannot try to inherit for reloads
4280 that are smaller than the maximum number of registers needed
4281 for groups unless the register we would allocate cannot be used
4282 for the groups.
4283
4284 We could check here to see if this is a secondary reload for
4285 an object that is already in a register of the desired class.
4286 This would avoid the need for the secondary reload register.
4287 But this is complex because we can't easily determine what
4288 objects might want to be loaded via this reload. So let a register
4289 be allocated here. In `emit_reload_insns' we suppress one of the
4290 loads in the case described above. */
4291
4292 if (inheritance)
4293 {
4294 register int regno = -1;
4295
4296 if (reload_in[r] == 0)
4297 ;
4298 else if (GET_CODE (reload_in[r]) == REG)
4299 regno = REGNO (reload_in[r]);
4300 else if (GET_CODE (reload_in_reg[r]) == REG)
4301 regno = REGNO (reload_in_reg[r]);
4302#if 0
4303 /* This won't work, since REGNO can be a pseudo reg number.
4304 Also, it takes much more hair to keep track of all the things
4305 that can invalidate an inherited reload of part of a pseudoreg. */
4306 else if (GET_CODE (reload_in[r]) == SUBREG
4307 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4308 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4309#endif
4310
4311 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4312 {
4313 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4314
4315 if (reg_reloaded_contents[i] == regno
4316 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4317 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4318 spill_regs[i])
4319 && (reload_nregs[r] == max_group_size
4320 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4321 spill_regs[i]))
4322 && reload_reg_free_p (spill_regs[i], reload_when_needed[r])
4323 && reload_reg_free_before_p (spill_regs[i],
4324 reload_when_needed[r]))
4325 {
4326 /* If a group is needed, verify that all the subsequent
4327 registers still have their values intact. */
4328 int nr
4329 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4330 int k;
4331
4332 for (k = 1; k < nr; k++)
4333 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4334 != regno)
4335 break;
4336
4337 if (k == nr)
4338 {
4339 /* Mark the register as in use for this part of
4340 the insn. */
4341 mark_reload_reg_in_use (spill_regs[i],
4342 reload_when_needed[r],
4343 reload_mode[r]);
4344 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4345 reload_inherited[r] = 1;
4346 reload_inheritance_insn[r] = reg_reloaded_insn[i];
4347 reload_spill_index[r] = i;
4348 }
4349 }
4350 }
4351 }
4352
4353 /* Here's another way to see if the value is already lying around. */
4354 if (inheritance
4355 && reload_in[r] != 0
4356 && ! reload_inherited[r]
4357 && reload_out[r] == 0
4358 && (CONSTANT_P (reload_in[r])
4359 || GET_CODE (reload_in[r]) == PLUS
4360 || GET_CODE (reload_in[r]) == REG
4361 || GET_CODE (reload_in[r]) == MEM)
4362 && (reload_nregs[r] == max_group_size
4363 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4364 {
4365 register rtx equiv
4366 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 4367 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
4368 int regno;
4369
4370 if (equiv != 0)
4371 {
4372 if (GET_CODE (equiv) == REG)
4373 regno = REGNO (equiv);
4374 else if (GET_CODE (equiv) == SUBREG)
4375 {
4376 regno = REGNO (SUBREG_REG (equiv));
4377 if (regno < FIRST_PSEUDO_REGISTER)
4378 regno += SUBREG_WORD (equiv);
4379 }
4380 else
4381 abort ();
4382 }
4383
4384 /* If we found a spill reg, reject it unless it is free
4385 and of the desired class. */
4386 if (equiv != 0
4387 && ((spill_reg_order[regno] >= 0
4388 && ! reload_reg_free_before_p (regno,
4389 reload_when_needed[r]))
4390 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4391 regno)))
4392 equiv = 0;
4393
4394 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4395 equiv = 0;
4396
4397 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4398 equiv = 0;
4399
4400 /* We found a register that contains the value we need.
4401 If this register is the same as an `earlyclobber' operand
4402 of the current insn, just mark it as a place to reload from
4403 since we can't use it as the reload register itself. */
4404
4405 if (equiv != 0)
4406 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
4407 if (reg_overlap_mentioned_for_reload_p (equiv,
4408 reload_earlyclobbers[i]))
32131a9c
RK
4409 {
4410 reload_override_in[r] = equiv;
4411 equiv = 0;
4412 break;
4413 }
4414
4415 /* JRV: If the equiv register we have found is explicitly
4416 clobbered in the current insn, mark but don't use, as above. */
4417
4418 if (equiv != 0 && regno_clobbered_p (regno, insn))
4419 {
4420 reload_override_in[r] = equiv;
4421 equiv = 0;
4422 }
4423
4424 /* If we found an equivalent reg, say no code need be generated
4425 to load it, and use it as our reload reg. */
4426 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4427 {
4428 reload_reg_rtx[r] = equiv;
4429 reload_inherited[r] = 1;
4430 /* If it is a spill reg,
4431 mark the spill reg as in use for this insn. */
4432 i = spill_reg_order[regno];
4433 if (i >= 0)
4434 mark_reload_reg_in_use (regno, reload_when_needed[r],
4435 reload_mode[r]);
4436 }
4437 }
4438
4439 /* If we found a register to use already, or if this is an optional
4440 reload, we are done. */
4441 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
4442 continue;
4443
4444#if 0 /* No longer needed for correct operation. Might or might not
4445 give better code on the average. Want to experiment? */
4446
4447 /* See if there is a later reload that has a class different from our
4448 class that intersects our class or that requires less register
4449 than our reload. If so, we must allocate a register to this
4450 reload now, since that reload might inherit a previous reload
4451 and take the only available register in our class. Don't do this
4452 for optional reloads since they will force all previous reloads
4453 to be allocated. Also don't do this for reloads that have been
4454 turned off. */
4455
4456 for (i = j + 1; i < n_reloads; i++)
4457 {
4458 int s = reload_order[i];
4459
d45cf215
RS
4460 if ((reload_in[s] == 0 && reload_out[s] == 0
4461 && ! reload_secondary_p[s])
32131a9c
RK
4462 || reload_optional[s])
4463 continue;
4464
4465 if ((reload_reg_class[s] != reload_reg_class[r]
4466 && reg_classes_intersect_p (reload_reg_class[r],
4467 reload_reg_class[s]))
4468 || reload_nregs[s] < reload_nregs[r])
4469 break;
4470 }
4471
4472 if (i == n_reloads)
4473 continue;
4474
4475 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
4476#endif
4477 }
4478
4479 /* Now allocate reload registers for anything non-optional that
4480 didn't get one yet. */
4481 for (j = 0; j < n_reloads; j++)
4482 {
4483 register int r = reload_order[j];
4484
4485 /* Ignore reloads that got marked inoperative. */
4486 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4487 continue;
4488
4489 /* Skip reloads that already have a register allocated or are
4490 optional. */
4491 if (reload_reg_rtx[r] != 0 || reload_optional[r])
4492 continue;
4493
4494 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
4495 break;
4496 }
4497
4498 /* If that loop got all the way, we have won. */
4499 if (j == n_reloads)
4500 break;
4501
4502 fail:
4503 /* Loop around and try without any inheritance. */
4504 /* First undo everything done by the failed attempt
4505 to allocate with inheritance. */
4506 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
4507 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
4508 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
4509 sizeof reload_inheritance_insn);
4510 bcopy (save_reload_override_in, reload_override_in,
4511 sizeof reload_override_in);
4512 bcopy (save_reload_spill_index, reload_spill_index,
4513 sizeof reload_spill_index);
4514 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
4515 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
4516 COPY_HARD_REG_SET (reload_reg_used_in_input,
4517 save_reload_reg_used_in_input);
4518 COPY_HARD_REG_SET (reload_reg_used_in_output,
4519 save_reload_reg_used_in_output);
4520 COPY_HARD_REG_SET (reload_reg_used_in_input_addr,
4521 save_reload_reg_used_in_input_addr);
4522 COPY_HARD_REG_SET (reload_reg_used_in_output_addr,
4523 save_reload_reg_used_in_output_addr);
4524 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
4525 save_reload_reg_used_in_op_addr);
4526 }
4527
4528 /* If we thought we could inherit a reload, because it seemed that
4529 nothing else wanted the same reload register earlier in the insn,
4530 verify that assumption, now that all reloads have been assigned. */
4531
4532 for (j = 0; j < n_reloads; j++)
4533 {
4534 register int r = reload_order[j];
4535
4536 if (reload_inherited[r] && reload_reg_rtx[r] != 0
4537 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
4538 reload_when_needed[r]))
4539 reload_inherited[r] = 0;
4540
4541 /* If we found a better place to reload from,
4542 validate it in the same fashion, if it is a reload reg. */
4543 if (reload_override_in[r]
4544 && (GET_CODE (reload_override_in[r]) == REG
4545 || GET_CODE (reload_override_in[r]) == SUBREG))
4546 {
4547 int regno = true_regnum (reload_override_in[r]);
4548 if (spill_reg_order[regno] >= 0
4549 && ! reload_reg_free_before_p (regno, reload_when_needed[r]))
4550 reload_override_in[r] = 0;
4551 }
4552 }
4553
4554 /* Now that reload_override_in is known valid,
4555 actually override reload_in. */
4556 for (j = 0; j < n_reloads; j++)
4557 if (reload_override_in[j])
4558 reload_in[j] = reload_override_in[j];
4559
4560 /* If this reload won't be done because it has been cancelled or is
4561 optional and not inherited, clear reload_reg_rtx so other
4562 routines (such as subst_reloads) don't get confused. */
4563 for (j = 0; j < n_reloads; j++)
4564 if ((reload_optional[j] && ! reload_inherited[j])
4565 || (reload_in[j] == 0 && reload_out[j] == 0
4566 && ! reload_secondary_p[j]))
4567 reload_reg_rtx[j] = 0;
4568
4569 /* Record which pseudos and which spill regs have output reloads. */
4570 for (j = 0; j < n_reloads; j++)
4571 {
4572 register int r = reload_order[j];
4573
4574 i = reload_spill_index[r];
4575
4576 /* I is nonneg if this reload used one of the spill regs.
4577 If reload_reg_rtx[r] is 0, this is an optional reload
4578 that we opted to ignore. */
4579 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
4580 && reload_reg_rtx[r] != 0)
4581 {
4582 register int nregno = REGNO (reload_out[r]);
4583 int nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
4584
4585 while (--nr >= 0)
4586 {
4587 reg_has_output_reload[nregno + nr] = 1;
4588 if (i >= 0)
4589 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
4590 }
4591
4592 if (reload_when_needed[r] != RELOAD_OTHER
4593 && reload_when_needed[r] != RELOAD_FOR_OUTPUT)
4594 abort ();
4595 }
4596 }
4597}
4598\f
4599/* Output insns to reload values in and out of the chosen reload regs. */
4600
4601static void
4602emit_reload_insns (insn)
4603 rtx insn;
4604{
4605 register int j;
4606 rtx following_insn = NEXT_INSN (insn);
a8efe40d 4607 rtx before_insn = insn;
32131a9c
RK
4608 rtx first_output_reload_insn = NEXT_INSN (insn);
4609 rtx first_other_reload_insn = insn;
4610 rtx first_operand_address_reload_insn = insn;
4611 int special;
4612 /* Values to be put in spill_reg_store are put here first. */
4613 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
4614
d45cf215 4615 /* If this is a CALL_INSN preceded by USE insns, any reload insns
a8efe40d
RK
4616 must go in front of the first USE insn, not in front of INSN. */
4617
4618 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
4619 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
4620 while (GET_CODE (PREV_INSN (before_insn)) == INSN
4621 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
4622 first_other_reload_insn = first_operand_address_reload_insn
4623 = before_insn = PREV_INSN (before_insn);
4624
32131a9c
RK
4625 /* Now output the instructions to copy the data into and out of the
4626 reload registers. Do these in the order that the reloads were reported,
4627 since reloads of base and index registers precede reloads of operands
4628 and the operands may need the base and index registers reloaded. */
4629
4630 for (j = 0; j < n_reloads; j++)
4631 {
4632 register rtx old;
4633 rtx oldequiv_reg = 0;
4634 rtx this_reload_insn = 0;
4635 rtx store_insn = 0;
4636
4637 old = reload_in[j];
4638 if (old != 0 && ! reload_inherited[j]
4639 && ! rtx_equal_p (reload_reg_rtx[j], old)
4640 && reload_reg_rtx[j] != 0)
4641 {
4642 register rtx reloadreg = reload_reg_rtx[j];
4643 rtx oldequiv = 0;
4644 enum machine_mode mode;
4645 rtx where;
d445b551 4646 rtx reload_insn;
32131a9c
RK
4647
4648 /* Determine the mode to reload in.
4649 This is very tricky because we have three to choose from.
4650 There is the mode the insn operand wants (reload_inmode[J]).
4651 There is the mode of the reload register RELOADREG.
4652 There is the intrinsic mode of the operand, which we could find
4653 by stripping some SUBREGs.
4654 It turns out that RELOADREG's mode is irrelevant:
4655 we can change that arbitrarily.
4656
4657 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
4658 then the reload reg may not support QImode moves, so use SImode.
4659 If foo is in memory due to spilling a pseudo reg, this is safe,
4660 because the QImode value is in the least significant part of a
4661 slot big enough for a SImode. If foo is some other sort of
4662 memory reference, then it is impossible to reload this case,
4663 so previous passes had better make sure this never happens.
4664
4665 Then consider a one-word union which has SImode and one of its
4666 members is a float, being fetched as (SUBREG:SF union:SI).
4667 We must fetch that as SFmode because we could be loading into
4668 a float-only register. In this case OLD's mode is correct.
4669
4670 Consider an immediate integer: it has VOIDmode. Here we need
4671 to get a mode from something else.
4672
4673 In some cases, there is a fourth mode, the operand's
4674 containing mode. If the insn specifies a containing mode for
4675 this operand, it overrides all others.
4676
4677 I am not sure whether the algorithm here is always right,
4678 but it does the right things in those cases. */
4679
4680 mode = GET_MODE (old);
4681 if (mode == VOIDmode)
4682 mode = reload_inmode[j];
4683 if (reload_strict_low[j])
4684 mode = GET_MODE (SUBREG_REG (reload_in[j]));
4685
4686#ifdef SECONDARY_INPUT_RELOAD_CLASS
4687 /* If we need a secondary register for this operation, see if
4688 the value is already in a register in that class. Don't
4689 do this if the secondary register will be used as a scratch
4690 register. */
4691
4692 if (reload_secondary_reload[j] >= 0
4693 && reload_secondary_icode[j] == CODE_FOR_nothing)
4694 oldequiv
4695 = find_equiv_reg (old, insn,
4696 reload_reg_class[reload_secondary_reload[j]],
fb3821f7 4697 -1, NULL_PTR, 0, mode);
32131a9c
RK
4698#endif
4699
4700 /* If reloading from memory, see if there is a register
4701 that already holds the same value. If so, reload from there.
4702 We can pass 0 as the reload_reg_p argument because
4703 any other reload has either already been emitted,
4704 in which case find_equiv_reg will see the reload-insn,
4705 or has yet to be emitted, in which case it doesn't matter
4706 because we will use this equiv reg right away. */
4707
4708 if (oldequiv == 0
4709 && (GET_CODE (old) == MEM
4710 || (GET_CODE (old) == REG
4711 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4712 && reg_renumber[REGNO (old)] < 0)))
4713 oldequiv = find_equiv_reg (old, insn, GENERAL_REGS,
fb3821f7 4714 -1, NULL_PTR, 0, mode);
32131a9c
RK
4715
4716 if (oldequiv)
4717 {
4718 int regno = true_regnum (oldequiv);
4719
4720 /* If OLDEQUIV is a spill register, don't use it for this
4721 if any other reload needs it at an earlier stage of this insn
a8fdc208 4722 or at this stage. */
32131a9c
RK
4723 if (spill_reg_order[regno] >= 0
4724 && (! reload_reg_free_p (regno, reload_when_needed[j])
4725 || ! reload_reg_free_before_p (regno,
4726 reload_when_needed[j])))
4727 oldequiv = 0;
4728
4729 /* If OLDEQUIV is not a spill register,
4730 don't use it if any other reload wants it. */
4731 if (spill_reg_order[regno] < 0)
4732 {
4733 int k;
4734 for (k = 0; k < n_reloads; k++)
4735 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
4736 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
4737 oldequiv))
32131a9c
RK
4738 {
4739 oldequiv = 0;
4740 break;
4741 }
4742 }
4743 }
4744
4745 if (oldequiv == 0)
4746 oldequiv = old;
4747 else if (GET_CODE (oldequiv) == REG)
4748 oldequiv_reg = oldequiv;
4749 else if (GET_CODE (oldequiv) == SUBREG)
4750 oldequiv_reg = SUBREG_REG (oldequiv);
4751
4752 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
4753 then load RELOADREG from OLDEQUIV. */
4754
4755 if (GET_MODE (reloadreg) != mode)
4756 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
4757 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
4758 oldequiv = SUBREG_REG (oldequiv);
4759 if (GET_MODE (oldequiv) != VOIDmode
4760 && mode != GET_MODE (oldequiv))
4761 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
4762
4763 /* Decide where to put reload insn for this reload. */
4764 switch (reload_when_needed[j])
4765 {
4766 case RELOAD_FOR_INPUT:
4767 case RELOAD_OTHER:
4768 where = first_operand_address_reload_insn;
4769 break;
4770 case RELOAD_FOR_INPUT_RELOAD_ADDRESS:
4771 where = first_other_reload_insn;
4772 break;
4773 case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS:
4774 where = first_output_reload_insn;
4775 break;
4776 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 4777 where = before_insn;
32131a9c
RK
4778 }
4779
4780 special = 0;
4781
4782 /* Auto-increment addresses must be reloaded in a special way. */
4783 if (GET_CODE (oldequiv) == POST_INC
4784 || GET_CODE (oldequiv) == POST_DEC
4785 || GET_CODE (oldequiv) == PRE_INC
4786 || GET_CODE (oldequiv) == PRE_DEC)
4787 {
4788 /* We are not going to bother supporting the case where a
4789 incremented register can't be copied directly from
4790 OLDEQUIV since this seems highly unlikely. */
4791 if (reload_secondary_reload[j] >= 0)
4792 abort ();
4793 /* Prevent normal processing of this reload. */
4794 special = 1;
4795 /* Output a special code sequence for this case. */
4796 this_reload_insn
4797 = inc_for_reload (reloadreg, oldequiv, reload_inc[j], where);
4798 }
4799
4800 /* If we are reloading a pseudo-register that was set by the previous
4801 insn, see if we can get rid of that pseudo-register entirely
4802 by redirecting the previous insn into our reload register. */
4803
4804 else if (optimize && GET_CODE (old) == REG
4805 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4806 && dead_or_set_p (insn, old)
4807 /* This is unsafe if some other reload
4808 uses the same reg first. */
4809 && (reload_when_needed[j] == RELOAD_OTHER
4810 || reload_when_needed[j] == RELOAD_FOR_INPUT
4811 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS))
4812 {
4813 rtx temp = PREV_INSN (insn);
4814 while (temp && GET_CODE (temp) == NOTE)
4815 temp = PREV_INSN (temp);
4816 if (temp
4817 && GET_CODE (temp) == INSN
4818 && GET_CODE (PATTERN (temp)) == SET
4819 && SET_DEST (PATTERN (temp)) == old
4820 /* Make sure we can access insn_operand_constraint. */
4821 && asm_noperands (PATTERN (temp)) < 0
4822 /* This is unsafe if prev insn rejects our reload reg. */
4823 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
4824 reloadreg)
4825 /* This is unsafe if operand occurs more than once in current
4826 insn. Perhaps some occurrences aren't reloaded. */
4827 && count_occurrences (PATTERN (insn), old) == 1
4828 /* Don't risk splitting a matching pair of operands. */
4829 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
4830 {
4831 /* Store into the reload register instead of the pseudo. */
4832 SET_DEST (PATTERN (temp)) = reloadreg;
4833 /* If these are the only uses of the pseudo reg,
4834 pretend for GDB it lives in the reload reg we used. */
4835 if (reg_n_deaths[REGNO (old)] == 1
4836 && reg_n_sets[REGNO (old)] == 1)
4837 {
4838 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
4839 alter_reg (REGNO (old), -1);
4840 }
4841 special = 1;
4842 }
4843 }
4844
4845 /* We can't do that, so output an insn to load RELOADREG.
4846 Keep them in the following order:
4847 all reloads for input reload addresses,
4848 all reloads for ordinary input operands,
4849 all reloads for addresses of non-reloaded operands,
4850 the insn being reloaded,
4851 all reloads for addresses of output reloads,
4852 the output reloads. */
4853 if (! special)
4854 {
4855#ifdef SECONDARY_INPUT_RELOAD_CLASS
4856 rtx second_reload_reg = 0;
4857 enum insn_code icode;
4858
4859 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
4860 and icode, if any. If OLDEQUIV and OLD are different or
4861 if this is an in-out reload, recompute whether or not we
4862 still need a secondary register and what the icode should
4863 be. If we still need a secondary register and the class or
4864 icode is different, go back to reloading from OLD if using
4865 OLDEQUIV means that we got the wrong type of register. We
4866 cannot have different class or icode due to an in-out reload
4867 because we don't make such reloads when both the input and
4868 output need secondary reload registers. */
32131a9c
RK
4869
4870 if (reload_secondary_reload[j] >= 0)
4871 {
4872 int secondary_reload = reload_secondary_reload[j];
1554c2c6
RK
4873 rtx real_oldequiv = oldequiv;
4874 rtx real_old = old;
4875
4876 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
4877 and similarly for OLD.
4878 See comments in find_secondary_reload in reload.c. */
4879 if (GET_CODE (oldequiv) == REG
4880 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
4881 && reg_equiv_mem[REGNO (oldequiv)] != 0)
4882 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
4883
4884 if (GET_CODE (old) == REG
4885 && REGNO (old) >= FIRST_PSEUDO_REGISTER
4886 && reg_equiv_mem[REGNO (old)] != 0)
4887 real_old = reg_equiv_mem[REGNO (old)];
4888
32131a9c
RK
4889 second_reload_reg = reload_reg_rtx[secondary_reload];
4890 icode = reload_secondary_icode[j];
4891
d445b551
RK
4892 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
4893 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
4894 {
4895 enum reg_class new_class
4896 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 4897 mode, real_oldequiv);
32131a9c
RK
4898
4899 if (new_class == NO_REGS)
4900 second_reload_reg = 0;
4901 else
4902 {
4903 enum insn_code new_icode;
4904 enum machine_mode new_mode;
4905
4906 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
4907 REGNO (second_reload_reg)))
1554c2c6 4908 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
4909 else
4910 {
4911 new_icode = reload_in_optab[(int) mode];
4912 if (new_icode != CODE_FOR_nothing
4913 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 4914 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 4915 (reloadreg, mode)))
a8fdc208
RS
4916 || (insn_operand_predicate[(int) new_icode][1]
4917 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 4918 (real_oldequiv, mode)))))
32131a9c
RK
4919 new_icode = CODE_FOR_nothing;
4920
4921 if (new_icode == CODE_FOR_nothing)
4922 new_mode = mode;
4923 else
4924 new_mode = insn_operand_mode[new_icode][2];
4925
4926 if (GET_MODE (second_reload_reg) != new_mode)
4927 {
4928 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
4929 new_mode))
1554c2c6 4930 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
4931 else
4932 second_reload_reg
4933 = gen_reg_rtx (REG, new_mode,
4934 REGNO (second_reload_reg));
4935 }
4936 }
4937 }
4938 }
4939
4940 /* If we still need a secondary reload register, check
4941 to see if it is being used as a scratch or intermediate
1554c2c6
RK
4942 register and generate code appropriately. If we need
4943 a scratch register, use REAL_OLDEQUIV since the form of
4944 the insn may depend on the actual address if it is
4945 a MEM. */
32131a9c
RK
4946
4947 if (second_reload_reg)
4948 {
4949 if (icode != CODE_FOR_nothing)
4950 {
d445b551 4951 reload_insn = emit_insn_before (GEN_FCN (icode)
1554c2c6
RK
4952 (reloadreg,
4953 real_oldequiv,
d445b551
RK
4954 second_reload_reg),
4955 where);
4956 if (this_reload_insn == 0)
4957 this_reload_insn = reload_insn;
32131a9c
RK
4958 special = 1;
4959 }
4960 else
4961 {
4962 /* See if we need a scratch register to load the
4963 intermediate register (a tertiary reload). */
4964 enum insn_code tertiary_icode
4965 = reload_secondary_icode[secondary_reload];
4966
4967 if (tertiary_icode != CODE_FOR_nothing)
4968 {
4969 rtx third_reload_reg
4970 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
4971
d445b551
RK
4972 reload_insn
4973 = emit_insn_before ((GEN_FCN (tertiary_icode)
4974 (second_reload_reg,
1554c2c6 4975 real_oldequiv,
d445b551
RK
4976 third_reload_reg)),
4977 where);
4978 if (this_reload_insn == 0)
4979 this_reload_insn = reload_insn;
32131a9c
RK
4980 }
4981 else
4982 {
d445b551
RK
4983 reload_insn
4984 = gen_input_reload (second_reload_reg,
fe751ebf 4985 oldequiv, where);
d445b551
RK
4986 if (this_reload_insn == 0)
4987 this_reload_insn = reload_insn;
32131a9c
RK
4988 oldequiv = second_reload_reg;
4989 }
4990 }
4991 }
4992 }
4993#endif
4994
4995 if (! special)
d445b551 4996 {
3c3eeea6 4997 reload_insn = gen_input_reload (reloadreg, oldequiv, where);
d445b551
RK
4998 if (this_reload_insn == 0)
4999 this_reload_insn = reload_insn;
5000 }
32131a9c
RK
5001
5002#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5003 /* We may have to make a REG_DEAD note for the secondary reload
5004 register in the insns we just made. Find the last insn that
5005 mentioned the register. */
5006 if (! special && second_reload_reg
5007 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5008 {
5009 rtx prev;
5010
5011 for (prev = where;
5012 prev != PREV_INSN (this_reload_insn);
5013 prev = PREV_INSN (prev))
5014 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
5015 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5016 PATTERN (prev)))
32131a9c
RK
5017 {
5018 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5019 second_reload_reg,
5020 REG_NOTES (prev));
5021 break;
5022 }
5023 }
5024#endif
5025 }
5026
5027 /* Update where to put other reload insns. */
5028 if (this_reload_insn)
5029 switch (reload_when_needed[j])
5030 {
5031 case RELOAD_FOR_INPUT:
5032 case RELOAD_OTHER:
5033 if (first_other_reload_insn == first_operand_address_reload_insn)
5034 first_other_reload_insn = this_reload_insn;
5035 break;
5036 case RELOAD_FOR_OPERAND_ADDRESS:
a8efe40d 5037 if (first_operand_address_reload_insn == before_insn)
32131a9c 5038 first_operand_address_reload_insn = this_reload_insn;
a8efe40d 5039 if (first_other_reload_insn == before_insn)
32131a9c
RK
5040 first_other_reload_insn = this_reload_insn;
5041 }
5042
5043 /* reload_inc[j] was formerly processed here. */
5044 }
5045
5046 /* Add a note saying the input reload reg
5047 dies in this insn, if anyone cares. */
5048#ifdef PRESERVE_DEATH_INFO_REGNO_P
5049 if (old != 0
5050 && reload_reg_rtx[j] != old
5051 && reload_reg_rtx[j] != 0
5052 && reload_out[j] == 0
5053 && ! reload_inherited[j]
5054 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5055 {
5056 register rtx reloadreg = reload_reg_rtx[j];
5057
a8fdc208 5058#if 0
32131a9c
RK
5059 /* We can't abort here because we need to support this for sched.c.
5060 It's not terrible to miss a REG_DEAD note, but we should try
5061 to figure out how to do this correctly. */
5062 /* The code below is incorrect for address-only reloads. */
5063 if (reload_when_needed[j] != RELOAD_OTHER
5064 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5065 abort ();
5066#endif
5067
5068 /* Add a death note to this insn, for an input reload. */
5069
5070 if ((reload_when_needed[j] == RELOAD_OTHER
5071 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5072 && ! dead_or_set_p (insn, reloadreg))
5073 REG_NOTES (insn)
5074 = gen_rtx (EXPR_LIST, REG_DEAD,
5075 reloadreg, REG_NOTES (insn));
5076 }
5077
5078 /* When we inherit a reload, the last marked death of the reload reg
5079 may no longer really be a death. */
5080 if (reload_reg_rtx[j] != 0
5081 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5082 && reload_inherited[j])
5083 {
5084 /* Handle inheriting an output reload.
5085 Remove the death note from the output reload insn. */
5086 if (reload_spill_index[j] >= 0
5087 && GET_CODE (reload_in[j]) == REG
5088 && spill_reg_store[reload_spill_index[j]] != 0
5089 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5090 REG_DEAD, REGNO (reload_reg_rtx[j])))
5091 remove_death (REGNO (reload_reg_rtx[j]),
5092 spill_reg_store[reload_spill_index[j]]);
5093 /* Likewise for input reloads that were inherited. */
5094 else if (reload_spill_index[j] >= 0
5095 && GET_CODE (reload_in[j]) == REG
5096 && spill_reg_store[reload_spill_index[j]] == 0
5097 && reload_inheritance_insn[j] != 0
a8fdc208 5098 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
5099 REGNO (reload_reg_rtx[j])))
5100 remove_death (REGNO (reload_reg_rtx[j]),
5101 reload_inheritance_insn[j]);
5102 else
5103 {
5104 rtx prev;
5105
5106 /* We got this register from find_equiv_reg.
5107 Search back for its last death note and get rid of it.
5108 But don't search back too far.
5109 Don't go past a place where this reg is set,
5110 since a death note before that remains valid. */
5111 for (prev = PREV_INSN (insn);
5112 prev && GET_CODE (prev) != CODE_LABEL;
5113 prev = PREV_INSN (prev))
5114 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5115 && dead_or_set_p (prev, reload_reg_rtx[j]))
5116 {
5117 if (find_regno_note (prev, REG_DEAD,
5118 REGNO (reload_reg_rtx[j])))
5119 remove_death (REGNO (reload_reg_rtx[j]), prev);
5120 break;
5121 }
5122 }
5123 }
5124
5125 /* We might have used find_equiv_reg above to choose an alternate
5126 place from which to reload. If so, and it died, we need to remove
5127 that death and move it to one of the insns we just made. */
5128
5129 if (oldequiv_reg != 0
5130 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5131 {
5132 rtx prev, prev1;
5133
5134 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5135 prev = PREV_INSN (prev))
5136 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5137 && dead_or_set_p (prev, oldequiv_reg))
5138 {
5139 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5140 {
5141 for (prev1 = this_reload_insn;
5142 prev1; prev1 = PREV_INSN (prev1))
5143 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
5144 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5145 PATTERN (prev1)))
32131a9c
RK
5146 {
5147 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5148 oldequiv_reg,
5149 REG_NOTES (prev1));
5150 break;
5151 }
5152 remove_death (REGNO (oldequiv_reg), prev);
5153 }
5154 break;
5155 }
5156 }
5157#endif
5158
5159 /* If we are reloading a register that was recently stored in with an
5160 output-reload, see if we can prove there was
5161 actually no need to store the old value in it. */
5162
5163 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5164 /* This is unsafe if some other reload uses the same reg first. */
5165 && (reload_when_needed[j] == RELOAD_OTHER
5166 || reload_when_needed[j] == RELOAD_FOR_INPUT
5167 || reload_when_needed[j] == RELOAD_FOR_INPUT_RELOAD_ADDRESS)
5168 && GET_CODE (reload_in[j]) == REG
5169#if 0
5170 /* There doesn't seem to be any reason to restrict this to pseudos
5171 and doing so loses in the case where we are copying from a
5172 register of the wrong class. */
5173 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5174#endif
5175 && spill_reg_store[reload_spill_index[j]] != 0
5176 && dead_or_set_p (insn, reload_in[j])
5177 /* This is unsafe if operand occurs more than once in current
5178 insn. Perhaps some occurrences weren't reloaded. */
5179 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5180 delete_output_reload (insn, j,
5181 spill_reg_store[reload_spill_index[j]]);
5182
5183 /* Input-reloading is done. Now do output-reloading,
5184 storing the value from the reload-register after the main insn
5185 if reload_out[j] is nonzero.
5186
5187 ??? At some point we need to support handling output reloads of
5188 JUMP_INSNs or insns that set cc0. */
5189 old = reload_out[j];
5190 if (old != 0
5191 && reload_reg_rtx[j] != old
5192 && reload_reg_rtx[j] != 0)
5193 {
5194 register rtx reloadreg = reload_reg_rtx[j];
5195 register rtx second_reloadreg = 0;
5196 rtx prev_insn = PREV_INSN (first_output_reload_insn);
5197 rtx note, p;
5198 enum machine_mode mode;
5199 int special = 0;
5200
5201 /* An output operand that dies right away does need a reload,
5202 but need not be copied from it. Show the new location in the
5203 REG_UNUSED note. */
5204 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5205 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5206 {
5207 XEXP (note, 0) = reload_reg_rtx[j];
5208 continue;
5209 }
5210 else if (GET_CODE (old) == SCRATCH)
5211 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5212 but we don't want to make an output reload. */
5213 continue;
5214
5215#if 0
5216 /* Strip off of OLD any size-increasing SUBREGs such as
5217 (SUBREG:SI foo:QI 0). */
5218
5219 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5220 && (GET_MODE_SIZE (GET_MODE (old))
5221 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5222 old = SUBREG_REG (old);
5223#endif
5224
5225 /* If is a JUMP_INSN, we can't support output reloads yet. */
5226 if (GET_CODE (insn) == JUMP_INSN)
5227 abort ();
5228
5229 /* Determine the mode to reload in.
5230 See comments above (for input reloading). */
5231
5232 mode = GET_MODE (old);
5233 if (mode == VOIDmode)
5234 abort (); /* Should never happen for an output. */
5235
5236 /* A strict-low-part output operand needs to be reloaded
5237 in the mode of the entire value. */
5238 if (reload_strict_low[j])
5239 {
5240 mode = GET_MODE (SUBREG_REG (reload_out[j]));
5241 /* Encapsulate OLD into that mode. */
5242 /* If OLD is a subreg, then strip it, since the subreg will
5243 be altered by this very reload. */
5244 while (GET_CODE (old) == SUBREG && GET_MODE (old) != mode)
5245 old = SUBREG_REG (old);
5246 if (GET_MODE (old) != VOIDmode
5247 && mode != GET_MODE (old))
5248 old = gen_rtx (SUBREG, mode, old, 0);
5249 }
5250
5251 if (GET_MODE (reloadreg) != mode)
5252 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5253
5254#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5255
5256 /* If we need two reload regs, set RELOADREG to the intermediate
5257 one, since it will be stored into OUT. We might need a secondary
5258 register only for an input reload, so check again here. */
5259
1554c2c6 5260 if (reload_secondary_reload[j] >= 0)
32131a9c 5261 {
1554c2c6 5262 rtx real_old = old;
32131a9c 5263
1554c2c6
RK
5264 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5265 && reg_equiv_mem[REGNO (old)] != 0)
5266 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 5267
1554c2c6
RK
5268 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5269 mode, real_old)
5270 != NO_REGS))
5271 {
5272 second_reloadreg = reloadreg;
5273 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
32131a9c 5274
1554c2c6
RK
5275 /* See if RELOADREG is to be used as a scratch register
5276 or as an intermediate register. */
5277 if (reload_secondary_icode[j] != CODE_FOR_nothing)
32131a9c 5278 {
1554c2c6
RK
5279 emit_insn_before ((GEN_FCN (reload_secondary_icode[j])
5280 (real_old, second_reloadreg,
5281 reloadreg)),
5282 first_output_reload_insn);
5283 special = 1;
32131a9c
RK
5284 }
5285 else
1554c2c6
RK
5286 {
5287 /* See if we need both a scratch and intermediate reload
5288 register. */
5289 int secondary_reload = reload_secondary_reload[j];
5290 enum insn_code tertiary_icode
5291 = reload_secondary_icode[secondary_reload];
5292 rtx pat;
32131a9c 5293
1554c2c6
RK
5294 if (GET_MODE (reloadreg) != mode)
5295 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5296
5297 if (tertiary_icode != CODE_FOR_nothing)
5298 {
5299 rtx third_reloadreg
5300 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5301 pat = (GEN_FCN (tertiary_icode)
5302 (reloadreg, second_reloadreg, third_reloadreg));
5303 }
5304 else
5305 pat = gen_move_insn (reloadreg, second_reloadreg);
5306
5307 emit_insn_before (pat, first_output_reload_insn);
5308 }
32131a9c
RK
5309 }
5310 }
5311#endif
5312
5313 /* Output the last reload insn. */
5314 if (! special)
0dadecf6
RK
5315 {
5316#ifdef SECONDARY_MEMORY_NEEDED
5317 /* If we need a memory location to do the move, do it that way. */
5318 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
5319 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
5320 REGNO_REG_CLASS (REGNO (reloadreg)),
5321 GET_MODE (reloadreg)))
5322 {
5323 /* Get the memory to use and rewrite both registers to
5324 its mode. */
5325 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg));
5326
5327 if (GET_MODE (loc) != GET_MODE (reloadreg))
5328 reloadreg = gen_rtx (REG, GET_MODE (loc),
5329 REGNO (reloadreg));
5330
5331 if (GET_MODE (loc) != GET_MODE (old))
5332 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
5333
5334 emit_insn_before (gen_move_insn (loc, reloadreg),
5335 first_output_reload_insn);
5336 emit_insn_before (gen_move_insn (old, loc),
5337 first_output_reload_insn);
5338 }
5339 else
5340#endif
5341 emit_insn_before (gen_move_insn (old, reloadreg),
5342 first_output_reload_insn);
5343 }
32131a9c
RK
5344
5345#ifdef PRESERVE_DEATH_INFO_REGNO_P
5346 /* If final will look at death notes for this reg,
5347 put one on the last output-reload insn to use it. Similarly
5348 for any secondary register. */
5349 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
5350 for (p = PREV_INSN (first_output_reload_insn);
5351 p != prev_insn; p = PREV_INSN (p))
5352 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5353 && reg_overlap_mentioned_for_reload_p (reloadreg,
5354 PATTERN (p)))
32131a9c
RK
5355 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5356 reloadreg, REG_NOTES (p));
5357
5358#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5359 if (! special
5360 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
5361 for (p = PREV_INSN (first_output_reload_insn);
5362 p != prev_insn; p = PREV_INSN (p))
5363 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
5364 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
5365 PATTERN (p)))
32131a9c
RK
5366 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5367 second_reloadreg, REG_NOTES (p));
5368#endif
5369#endif
5370 /* Look at all insns we emitted, just to be safe. */
5371 for (p = NEXT_INSN (prev_insn); p != first_output_reload_insn;
5372 p = NEXT_INSN (p))
5373 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
5374 {
5375 /* If this output reload doesn't come from a spill reg,
5376 clear any memory of reloaded copies of the pseudo reg.
5377 If this output reload comes from a spill reg,
5378 reg_has_output_reload will make this do nothing. */
5379 note_stores (PATTERN (p), forget_old_reloads_1);
5380
5381 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
5382 store_insn = p;
5383 }
5384
5385 first_output_reload_insn = NEXT_INSN (prev_insn);
5386 }
5387
5388 if (reload_spill_index[j] >= 0)
5389 new_spill_reg_store[reload_spill_index[j]] = store_insn;
5390 }
5391
32131a9c
RK
5392 /* Move death notes from INSN
5393 to output-operand-address and output reload insns. */
5394#ifdef PRESERVE_DEATH_INFO_REGNO_P
5395 {
5396 rtx insn1;
5397 /* Loop over those insns, last ones first. */
5398 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
5399 insn1 = PREV_INSN (insn1))
5400 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
5401 {
5402 rtx source = SET_SRC (PATTERN (insn1));
5403 rtx dest = SET_DEST (PATTERN (insn1));
5404
5405 /* The note we will examine next. */
5406 rtx reg_notes = REG_NOTES (insn);
5407 /* The place that pointed to this note. */
5408 rtx *prev_reg_note = &REG_NOTES (insn);
5409
5410 /* If the note is for something used in the source of this
5411 reload insn, or in the output address, move the note. */
5412 while (reg_notes)
5413 {
5414 rtx next_reg_notes = XEXP (reg_notes, 1);
5415 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
5416 && GET_CODE (XEXP (reg_notes, 0)) == REG
5417 && ((GET_CODE (dest) != REG
bfa30b22
RK
5418 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5419 dest))
5420 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
5421 source)))
32131a9c
RK
5422 {
5423 *prev_reg_note = next_reg_notes;
5424 XEXP (reg_notes, 1) = REG_NOTES (insn1);
5425 REG_NOTES (insn1) = reg_notes;
5426 }
5427 else
5428 prev_reg_note = &XEXP (reg_notes, 1);
5429
5430 reg_notes = next_reg_notes;
5431 }
5432 }
5433 }
5434#endif
5435
5436 /* For all the spill regs newly reloaded in this instruction,
5437 record what they were reloaded from, so subsequent instructions
d445b551
RK
5438 can inherit the reloads.
5439
5440 Update spill_reg_store for the reloads of this insn.
e9e79d69 5441 Copy the elements that were updated in the loop above. */
32131a9c
RK
5442
5443 for (j = 0; j < n_reloads; j++)
5444 {
5445 register int r = reload_order[j];
5446 register int i = reload_spill_index[r];
5447
5448 /* I is nonneg if this reload used one of the spill regs.
5449 If reload_reg_rtx[r] is 0, this is an optional reload
5450 that we opted to ignore. */
d445b551 5451
32131a9c
RK
5452 if (i >= 0 && reload_reg_rtx[r] != 0)
5453 {
5454 /* First, clear out memory of what used to be in this spill reg.
5455 If consecutive registers are used, clear them all. */
5456 int nr
5457 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
5458 int k;
5459
5460 for (k = 0; k < nr; k++)
5461 {
5462 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
5463 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
5464 }
5465
5466 /* Maybe the spill reg contains a copy of reload_out. */
5467 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5468 {
5469 register int nregno = REGNO (reload_out[r]);
d445b551
RK
5470
5471 spill_reg_store[i] = new_spill_reg_store[i];
32131a9c 5472 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551 5473
32131a9c
RK
5474 for (k = 0; k < nr; k++)
5475 {
5476 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5477 = nregno;
5478 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
5479 }
5480 }
d445b551 5481
32131a9c
RK
5482 /* Maybe the spill reg contains a copy of reload_in. */
5483 else if (reload_out[r] == 0
5484 && reload_in[r] != 0
5485 && (GET_CODE (reload_in[r]) == REG
5486 || GET_CODE (reload_in_reg[r]) == REG))
5487 {
5488 register int nregno;
5489 if (GET_CODE (reload_in[r]) == REG)
5490 nregno = REGNO (reload_in[r]);
5491 else
5492 nregno = REGNO (reload_in_reg[r]);
5493
5494 /* If there are two separate reloads (one in and one out)
5495 for the same (hard or pseudo) reg,
a8fdc208 5496 leave reg_last_reload_reg set
32131a9c
RK
5497 based on the output reload.
5498 Otherwise, set it from this input reload. */
5499 if (!reg_has_output_reload[nregno]
5500 /* But don't do so if another input reload
5501 will clobber this one's value. */
5502 && reload_reg_reaches_end_p (spill_regs[i],
5503 reload_when_needed[r]))
5504 {
5505 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
d445b551
RK
5506
5507 /* Unless we inherited this reload, show we haven't
5508 recently done a store. */
5509 if (! reload_inherited[r])
5510 spill_reg_store[i] = 0;
5511
32131a9c
RK
5512 for (k = 0; k < nr; k++)
5513 {
5514 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5515 = nregno;
5516 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
5517 = insn;
5518 }
5519 }
5520 }
5521 }
5522
5523 /* The following if-statement was #if 0'd in 1.34 (or before...).
5524 It's reenabled in 1.35 because supposedly nothing else
5525 deals with this problem. */
5526
5527 /* If a register gets output-reloaded from a non-spill register,
5528 that invalidates any previous reloaded copy of it.
5529 But forget_old_reloads_1 won't get to see it, because
5530 it thinks only about the original insn. So invalidate it here. */
5531 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
5532 {
5533 register int nregno = REGNO (reload_out[r]);
5534 reg_last_reload_reg[nregno] = 0;
5535 }
5536 }
5537}
5538\f
5539/* Emit code before BEFORE_INSN to perform an input reload of IN to RELOADREG.
3c3eeea6 5540 Returns first insn emitted. */
32131a9c
RK
5541
5542rtx
3c3eeea6 5543gen_input_reload (reloadreg, in, before_insn)
32131a9c
RK
5544 rtx reloadreg;
5545 rtx in;
5546 rtx before_insn;
5547{
5548 register rtx prev_insn = PREV_INSN (before_insn);
5549
a8fdc208 5550 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
5551 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
5552 register that didn't get a hard register. In that case we can just
5553 call emit_move_insn.
5554
5555 We can also be asked to reload a PLUS that adds either two registers or
5556 a register and a constant or MEM. This can occur during frame pointer
5557 elimination. That case if handled by trying to emit a single insn
5558 to perform the add. If it is not valid, we use a two insn sequence.
5559
5560 Finally, we could be called to handle an 'o' constraint by putting
5561 an address into a register. In that case, we first try to do this
5562 with a named pattern of "reload_load_address". If no such pattern
5563 exists, we just emit a SET insn and hope for the best (it will normally
5564 be valid on machines that use 'o').
5565
5566 This entire process is made complex because reload will never
5567 process the insns we generate here and so we must ensure that
5568 they will fit their constraints and also by the fact that parts of
5569 IN might be being reloaded separately and replaced with spill registers.
5570 Because of this, we are, in some sense, just guessing the right approach
5571 here. The one listed above seems to work.
5572
5573 ??? At some point, this whole thing needs to be rethought. */
5574
5575 if (GET_CODE (in) == PLUS
5576 && GET_CODE (XEXP (in, 0)) == REG
5577 && (GET_CODE (XEXP (in, 1)) == REG
5578 || CONSTANT_P (XEXP (in, 1))
5579 || GET_CODE (XEXP (in, 1)) == MEM))
5580 {
5581 /* We need to compute the sum of what is either a register and a
5582 constant, a register and memory, or a hard register and a pseudo
5583 register and put it into the reload register. The best possible way
5584 of doing this is if the machine has a three-operand ADD insn that
5585 accepts the required operands.
5586
5587 The simplest approach is to try to generate such an insn and see if it
5588 is recognized and matches its constraints. If so, it can be used.
5589
5590 It might be better not to actually emit the insn unless it is valid,
0009eff2 5591 but we need to pass the insn as an operand to `recog' and
b36d7dd7 5592 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 5593 not valid than to dummy things up. */
a8fdc208 5594
af929c62 5595 rtx op0, op1, tem, insn;
32131a9c 5596 int code;
a8fdc208 5597
af929c62
RK
5598 op0 = find_replacement (&XEXP (in, 0));
5599 op1 = find_replacement (&XEXP (in, 1));
5600
32131a9c
RK
5601 /* Since constraint checking is strict, commutativity won't be
5602 checked, so we need to do that here to avoid spurious failure
5603 if the add instruction is two-address and the second operand
5604 of the add is the same as the reload reg, which is frequently
5605 the case. If the insn would be A = B + A, rearrange it so
5606 it will be A = A + B as constrain_operands expects. */
a8fdc208 5607
32131a9c
RK
5608 if (GET_CODE (XEXP (in, 1)) == REG
5609 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
af929c62
RK
5610 tem = op0, op0 = op1, op1 = tem;
5611
5612 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
5613 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c
RK
5614
5615 insn = emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in),
5616 before_insn);
5617 code = recog_memoized (insn);
5618
5619 if (code >= 0)
5620 {
5621 insn_extract (insn);
5622 /* We want constrain operands to treat this insn strictly in
5623 its validity determination, i.e., the way it would after reload
5624 has completed. */
5625 if (constrain_operands (code, 1))
5626 return insn;
5627 }
5628
5629 if (PREV_INSN (insn))
5630 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
5631 if (NEXT_INSN (insn))
5632 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
5633
5634 /* If that failed, we must use a conservative two-insn sequence.
5635 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
5636 register since "move" will be able to handle an arbitrary operand,
5637 unlike add which can't, in general. Then add the registers.
32131a9c
RK
5638
5639 If there is another way to do this for a specific machine, a
5640 DEFINE_PEEPHOLE should be specified that recognizes the sequence
5641 we emit below. */
5642
af929c62
RK
5643 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
5644 || (GET_CODE (op1) == REG
5645 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
5646 tem = op0, op0 = op1, op1 = tem;
32131a9c 5647
af929c62
RK
5648 emit_insn_before (gen_move_insn (reloadreg, op0), before_insn);
5649 emit_insn_before (gen_add2_insn (reloadreg, op1), before_insn);
32131a9c
RK
5650 }
5651
0dadecf6
RK
5652#ifdef SECONDARY_MEMORY_NEEDED
5653 /* If we need a memory location to do the move, do it that way. */
5654 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
5655 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
5656 REGNO_REG_CLASS (REGNO (reloadreg)),
5657 GET_MODE (reloadreg)))
5658 {
5659 /* Get the memory to use and rewrite both registers to its mode. */
5660 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg));
5661
5662 if (GET_MODE (loc) != GET_MODE (reloadreg))
5663 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
5664
5665 if (GET_MODE (loc) != GET_MODE (in))
5666 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
5667
5668 emit_insn_before (gen_move_insn (reloadreg, loc), before_insn);
5669 emit_insn_before (gen_move_insn (loc, in), before_insn);
5670 }
5671#endif
5672
32131a9c
RK
5673 /* If IN is a simple operand, use gen_move_insn. */
5674 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
3c3eeea6 5675 emit_insn_before (gen_move_insn (reloadreg, in), before_insn);
32131a9c
RK
5676
5677#ifdef HAVE_reload_load_address
5678 else if (HAVE_reload_load_address)
3c3eeea6 5679 emit_insn_before (gen_reload_load_address (reloadreg, in), before_insn);
32131a9c
RK
5680#endif
5681
5682 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
5683 else
3c3eeea6 5684 emit_insn_before (gen_rtx (SET, VOIDmode, reloadreg, in), before_insn);
32131a9c
RK
5685
5686 /* Return the first insn emitted.
5687 We can not just return PREV_INSN (before_insn), because there may have
5688 been multiple instructions emitted. Also note that gen_move_insn may
5689 emit more than one insn itself, so we can not assume that there is one
5690 insn emitted per emit_insn_before call. */
5691
5692 return NEXT_INSN (prev_insn);
5693}
5694\f
5695/* Delete a previously made output-reload
5696 whose result we now believe is not needed.
5697 First we double-check.
5698
5699 INSN is the insn now being processed.
5700 OUTPUT_RELOAD_INSN is the insn of the output reload.
5701 J is the reload-number for this insn. */
5702
5703static void
5704delete_output_reload (insn, j, output_reload_insn)
5705 rtx insn;
5706 int j;
5707 rtx output_reload_insn;
5708{
5709 register rtx i1;
5710
5711 /* Get the raw pseudo-register referred to. */
5712
5713 rtx reg = reload_in[j];
5714 while (GET_CODE (reg) == SUBREG)
5715 reg = SUBREG_REG (reg);
5716
5717 /* If the pseudo-reg we are reloading is no longer referenced
5718 anywhere between the store into it and here,
5719 and no jumps or labels intervene, then the value can get
5720 here through the reload reg alone.
5721 Otherwise, give up--return. */
5722 for (i1 = NEXT_INSN (output_reload_insn);
5723 i1 != insn; i1 = NEXT_INSN (i1))
5724 {
5725 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
5726 return;
5727 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
5728 && reg_mentioned_p (reg, PATTERN (i1)))
5729 return;
5730 }
5731
5732 /* If this insn will store in the pseudo again,
5733 the previous store can be removed. */
5734 if (reload_out[j] == reload_in[j])
5735 delete_insn (output_reload_insn);
5736
5737 /* See if the pseudo reg has been completely replaced
5738 with reload regs. If so, delete the store insn
5739 and forget we had a stack slot for the pseudo. */
5740 else if (reg_n_deaths[REGNO (reg)] == 1
5741 && reg_basic_block[REGNO (reg)] >= 0
5742 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
5743 {
5744 rtx i2;
5745
5746 /* We know that it was used only between here
5747 and the beginning of the current basic block.
5748 (We also know that the last use before INSN was
5749 the output reload we are thinking of deleting, but never mind that.)
5750 Search that range; see if any ref remains. */
5751 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5752 {
d445b551
RK
5753 rtx set = single_set (i2);
5754
32131a9c
RK
5755 /* Uses which just store in the pseudo don't count,
5756 since if they are the only uses, they are dead. */
d445b551 5757 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5758 continue;
5759 if (GET_CODE (i2) == CODE_LABEL
5760 || GET_CODE (i2) == JUMP_INSN)
5761 break;
5762 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
5763 && reg_mentioned_p (reg, PATTERN (i2)))
5764 /* Some other ref remains;
5765 we can't do anything. */
5766 return;
5767 }
5768
5769 /* Delete the now-dead stores into this pseudo. */
5770 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
5771 {
d445b551
RK
5772 rtx set = single_set (i2);
5773
5774 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
5775 delete_insn (i2);
5776 if (GET_CODE (i2) == CODE_LABEL
5777 || GET_CODE (i2) == JUMP_INSN)
5778 break;
5779 }
5780
5781 /* For the debugging info,
5782 say the pseudo lives in this reload reg. */
5783 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
5784 alter_reg (REGNO (reg), -1);
5785 }
5786}
5787
5788\f
a8fdc208 5789/* Output reload-insns to reload VALUE into RELOADREG.
32131a9c
RK
5790 VALUE is a autoincrement or autodecrement RTX whose operand
5791 is a register or memory location;
5792 so reloading involves incrementing that location.
5793
5794 INC_AMOUNT is the number to increment or decrement by (always positive).
5795 This cannot be deduced from VALUE.
5796
5797 INSN is the insn before which the new insns should be emitted.
5798
5799 The return value is the first of the insns emitted. */
5800
5801static rtx
5802inc_for_reload (reloadreg, value, inc_amount, insn)
5803 rtx reloadreg;
5804 rtx value;
5805 int inc_amount;
5806 rtx insn;
5807{
5808 /* REG or MEM to be copied and incremented. */
5809 rtx incloc = XEXP (value, 0);
5810 /* Nonzero if increment after copying. */
5811 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
0009eff2
RK
5812 rtx prev = PREV_INSN (insn);
5813 rtx inc;
5814 rtx add_insn;
5815 int code;
32131a9c
RK
5816
5817 /* No hard register is equivalent to this register after
5818 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
5819 we could inc/dec that register as well (maybe even using it for
5820 the source), but I'm not sure it's worth worrying about. */
5821 if (GET_CODE (incloc) == REG)
5822 reg_last_reload_reg[REGNO (incloc)] = 0;
5823
5824 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
5825 inc_amount = - inc_amount;
5826
fb3821f7 5827 inc = GEN_INT (inc_amount);
0009eff2
RK
5828
5829 /* If this is post-increment, first copy the location to the reload reg. */
5830 if (post)
5831 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5832
5833 /* See if we can directly increment INCLOC. Use a method similar to that
5834 in gen_input_reload. */
5835
5836 add_insn = emit_insn_before (gen_rtx (SET, VOIDmode, incloc,
5837 gen_rtx (PLUS, GET_MODE (incloc),
5838 incloc, inc)), insn);
5839
5840 code = recog_memoized (add_insn);
5841 if (code >= 0)
32131a9c 5842 {
0009eff2
RK
5843 insn_extract (add_insn);
5844 if (constrain_operands (code, 1))
32131a9c 5845 {
0009eff2
RK
5846 /* If this is a pre-increment and we have incremented the value
5847 where it lives, copy the incremented value to RELOADREG to
5848 be used as an address. */
5849
5850 if (! post)
5851 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5852 return NEXT_INSN (prev);
32131a9c
RK
5853 }
5854 }
0009eff2
RK
5855
5856 if (PREV_INSN (add_insn))
5857 NEXT_INSN (PREV_INSN (add_insn)) = NEXT_INSN (add_insn);
5858 if (NEXT_INSN (add_insn))
5859 PREV_INSN (NEXT_INSN (add_insn)) = PREV_INSN (add_insn);
5860
5861 /* If couldn't do the increment directly, must increment in RELOADREG.
5862 The way we do this depends on whether this is pre- or post-increment.
5863 For pre-increment, copy INCLOC to the reload register, increment it
5864 there, then save back. */
5865
5866 if (! post)
5867 {
5868 emit_insn_before (gen_move_insn (reloadreg, incloc), insn);
5869 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5870 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
5871 }
32131a9c
RK
5872 else
5873 {
0009eff2
RK
5874 /* Postincrement.
5875 Because this might be a jump insn or a compare, and because RELOADREG
5876 may not be available after the insn in an input reload, we must do
5877 the incrementation before the insn being reloaded for.
5878
5879 We have already copied INCLOC to RELOADREG. Increment the copy in
5880 RELOADREG, save that back, then decrement RELOADREG so it has
5881 the original value. */
5882
5883 emit_insn_before (gen_add2_insn (reloadreg, inc), insn);
5884 emit_insn_before (gen_move_insn (incloc, reloadreg), insn);
fb3821f7 5885 emit_insn_before (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)),
0009eff2 5886 insn);
32131a9c 5887 }
0009eff2
RK
5888
5889 return NEXT_INSN (prev);
32131a9c
RK
5890}
5891\f
5892/* Return 1 if we are certain that the constraint-string STRING allows
5893 the hard register REG. Return 0 if we can't be sure of this. */
5894
5895static int
5896constraint_accepts_reg_p (string, reg)
5897 char *string;
5898 rtx reg;
5899{
5900 int value = 0;
5901 int regno = true_regnum (reg);
5902 int c;
5903
5904 /* Initialize for first alternative. */
5905 value = 0;
5906 /* Check that each alternative contains `g' or `r'. */
5907 while (1)
5908 switch (c = *string++)
5909 {
5910 case 0:
5911 /* If an alternative lacks `g' or `r', we lose. */
5912 return value;
5913 case ',':
5914 /* If an alternative lacks `g' or `r', we lose. */
5915 if (value == 0)
5916 return 0;
5917 /* Initialize for next alternative. */
5918 value = 0;
5919 break;
5920 case 'g':
5921 case 'r':
5922 /* Any general reg wins for this alternative. */
5923 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
5924 value = 1;
5925 break;
5926 default:
5927 /* Any reg in specified class wins for this alternative. */
5928 {
0009eff2 5929 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 5930
0009eff2 5931 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
5932 value = 1;
5933 }
5934 }
5935}
5936\f
d445b551
RK
5937/* Return the number of places FIND appears within X, but don't count
5938 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
5939
5940static int
5941count_occurrences (x, find)
5942 register rtx x, find;
5943{
5944 register int i, j;
5945 register enum rtx_code code;
5946 register char *format_ptr;
5947 int count;
5948
5949 if (x == find)
5950 return 1;
5951 if (x == 0)
5952 return 0;
5953
5954 code = GET_CODE (x);
5955
5956 switch (code)
5957 {
5958 case REG:
5959 case QUEUED:
5960 case CONST_INT:
5961 case CONST_DOUBLE:
5962 case SYMBOL_REF:
5963 case CODE_LABEL:
5964 case PC:
5965 case CC0:
5966 return 0;
d445b551
RK
5967
5968 case SET:
5969 if (SET_DEST (x) == find)
5970 return count_occurrences (SET_SRC (x), find);
5971 break;
32131a9c
RK
5972 }
5973
5974 format_ptr = GET_RTX_FORMAT (code);
5975 count = 0;
5976
5977 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5978 {
5979 switch (*format_ptr++)
5980 {
5981 case 'e':
5982 count += count_occurrences (XEXP (x, i), find);
5983 break;
5984
5985 case 'E':
5986 if (XVEC (x, i) != NULL)
5987 {
5988 for (j = 0; j < XVECLEN (x, i); j++)
5989 count += count_occurrences (XVECEXP (x, i, j), find);
5990 }
5991 break;
5992 }
5993 }
5994 return count;
5995}
This page took 0.659177 seconds and 5 git commands to generate.