]> gcc.gnu.org Git - gcc.git/blame - gcc/reload1.c
reload.c (find_equiv_reg): If need_stable_sp is set, check if stack pointer is change...
[gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
f5963e61 2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e99215a3
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
32131a9c
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
cab634f2
KG
24
25#include "machmode.h"
26#include "hard-reg-set.h"
32131a9c
RK
27#include "rtl.h"
28#include "obstack.h"
29#include "insn-config.h"
30#include "insn-flags.h"
31#include "insn-codes.h"
32#include "flags.h"
33#include "expr.h"
34#include "regs.h"
32131a9c
RK
35#include "reload.h"
36#include "recog.h"
37#include "basic-block.h"
38#include "output.h"
a9c366bf 39#include "real.h"
10f0ad3d 40#include "toplev.h"
32131a9c
RK
41
42/* This file contains the reload pass of the compiler, which is
43 run after register allocation has been done. It checks that
44 each insn is valid (operands required to be in registers really
45 are in registers of the proper class) and fixes up invalid ones
46 by copying values temporarily into registers for the insns
47 that need them.
48
49 The results of register allocation are described by the vector
50 reg_renumber; the insns still contain pseudo regs, but reg_renumber
51 can be used to find which hard reg, if any, a pseudo reg is in.
52
53 The technique we always use is to free up a few hard regs that are
54 called ``reload regs'', and for each place where a pseudo reg
55 must be in a hard reg, copy it temporarily into one of the reload regs.
56
57 All the pseudos that were formerly allocated to the hard regs that
58 are now in use as reload regs must be ``spilled''. This means
59 that they go to other hard regs, or to stack slots if no other
60 available hard regs can be found. Spilling can invalidate more
61 insns, requiring additional need for reloads, so we must keep checking
62 until the process stabilizes.
63
64 For machines with different classes of registers, we must keep track
65 of the register class needed for each reload, and make sure that
66 we allocate enough reload registers of each class.
67
68 The file reload.c contains the code that checks one insn for
69 validity and reports the reloads that it needs. This file
70 is in charge of scanning the entire rtl code, accumulating the
71 reload needs, spilling, assigning reload registers to use for
72 fixing up each insn, and generating the new insns to copy values
73 into the reload registers. */
546b63fb
RK
74
75
76#ifndef REGISTER_MOVE_COST
77#define REGISTER_MOVE_COST(x, y) 2
78#endif
32131a9c
RK
79\f
80/* During reload_as_needed, element N contains a REG rtx for the hard reg
0f41302f 81 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
82static rtx *reg_last_reload_reg;
83
84/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
85 for an output reload that stores into reg N. */
86static char *reg_has_output_reload;
87
88/* Indicates which hard regs are reload-registers for an output reload
89 in the current insn. */
90static HARD_REG_SET reg_is_output_reload;
91
92/* Element N is the constant value to which pseudo reg N is equivalent,
93 or zero if pseudo reg N is not equivalent to a constant.
94 find_reloads looks at this in order to replace pseudo reg N
95 with the constant it stands for. */
96rtx *reg_equiv_constant;
97
98/* Element N is a memory location to which pseudo reg N is equivalent,
99 prior to any register elimination (such as frame pointer to stack
100 pointer). Depending on whether or not it is a valid address, this value
101 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 102rtx *reg_equiv_memory_loc;
32131a9c
RK
103
104/* Element N is the address of stack slot to which pseudo reg N is equivalent.
105 This is used when the address is not valid as a memory address
106 (because its displacement is too big for the machine.) */
107rtx *reg_equiv_address;
108
109/* Element N is the memory slot to which pseudo reg N is equivalent,
110 or zero if pseudo reg N is not equivalent to a memory slot. */
111rtx *reg_equiv_mem;
112
113/* Widest width in which each pseudo reg is referred to (via subreg). */
114static int *reg_max_ref_width;
115
116/* Element N is the insn that initialized reg N from its equivalent
117 constant or memory slot. */
118static rtx *reg_equiv_init;
119
e6e52be0
R
120/* During reload_as_needed, element N contains the last pseudo regno reloaded
121 into hard register N. If that pseudo reg occupied more than one register,
32131a9c
RK
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126/* During reload_as_needed, element N contains the insn for which
e6e52be0
R
127 hard register N was last used. Its contents are significant only
128 when reg_reloaded_valid is set for this register. */
32131a9c
RK
129static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
130
e6e52be0
R
131/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
132static HARD_REG_SET reg_reloaded_valid;
133/* Indicate if the register was dead at the end of the reload.
134 This is only valid if reg_reloaded_contents is set and valid. */
135static HARD_REG_SET reg_reloaded_dead;
136
32131a9c
RK
137/* Number of spill-regs so far; number of valid elements of spill_regs. */
138static int n_spills;
139
140/* In parallel with spill_regs, contains REG rtx's for those regs.
141 Holds the last rtx used for any given reg, or 0 if it has never
142 been used for spilling yet. This rtx is reused, provided it has
143 the proper mode. */
144static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
145
146/* In parallel with spill_regs, contains nonzero for a spill reg
147 that was stored after the last time it was used.
148 The precise value is the insn generated to do the store. */
149static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
150
151/* This table is the inverse mapping of spill_regs:
152 indexed by hard reg number,
153 it contains the position of that reg in spill_regs,
154 or -1 for something that is not in spill_regs. */
155static short spill_reg_order[FIRST_PSEUDO_REGISTER];
156
157/* This reg set indicates registers that may not be used for retrying global
158 allocation. The registers that may not be used include all spill registers
159 and the frame pointer (if we are using one). */
160HARD_REG_SET forbidden_regs;
161
162/* This reg set indicates registers that are not good for spill registers.
163 They will not be used to complete groups of spill registers. This includes
546b63fb 164 all fixed registers, registers that may be eliminated, and, if
e9a25f70 165 SMALL_REGISTER_CLASSES is zero, registers explicitly used in the rtl.
32131a9c
RK
166
167 (spill_reg_order prevents these registers from being used to start a
168 group.) */
169static HARD_REG_SET bad_spill_regs;
170
171/* Describes order of use of registers for reloading
172 of spilled pseudo-registers. `spills' is the number of
173 elements that are actually valid; new ones are added at the end. */
174static short spill_regs[FIRST_PSEUDO_REGISTER];
175
8b4f9969
JW
176/* This reg set indicates those registers that have been used a spill
177 registers. This information is used in reorg.c, to help figure out
178 what registers are live at any point. It is assumed that all spill_regs
179 are dead at every CODE_LABEL. */
180
181HARD_REG_SET used_spill_regs;
182
4079cd63
JW
183/* Index of last register assigned as a spill register. We allocate in
184 a round-robin fashion. */
185
186static int last_spill_reg;
187
32131a9c
RK
188/* Describes order of preference for putting regs into spill_regs.
189 Contains the numbers of all the hard regs, in order most preferred first.
190 This order is different for each function.
191 It is set up by order_regs_for_reload.
192 Empty elements at the end contain -1. */
193static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
194
195/* 1 for a hard register that appears explicitly in the rtl
196 (for example, function value registers, special registers
197 used by insns, structure value pointer registers). */
198static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
199
200/* Indicates if a register was counted against the need for
201 groups. 0 means it can count against max_nongroup instead. */
202static HARD_REG_SET counted_for_groups;
203
204/* Indicates if a register was counted against the need for
205 non-groups. 0 means it can become part of a new group.
206 During choose_reload_regs, 1 here means don't use this reg
207 as part of a group, even if it seems to be otherwise ok. */
208static HARD_REG_SET counted_for_nongroups;
209
210/* Nonzero if indirect addressing is supported on the machine; this means
211 that spilling (REG n) does not require reloading it into a register in
212 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
213 value indicates the level of indirect addressing supported, e.g., two
214 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
215 a hard register. */
216
217static char spill_indirect_levels;
218
219/* Nonzero if indirect addressing is supported when the innermost MEM is
220 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
221 which these are valid is the same as spill_indirect_levels, above. */
222
223char indirect_symref_ok;
224
225/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
226
227char double_reg_address_ok;
228
229/* Record the stack slot for each spilled hard register. */
230
231static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
232
233/* Width allocated so far for that stack slot. */
234
235static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
236
237/* Indexed by register class and basic block number, nonzero if there is
238 any need for a spill register of that class in that basic block.
239 The pointer is 0 if we did stupid allocation and don't know
240 the structure of basic blocks. */
241
242char *basic_block_needs[N_REG_CLASSES];
243
244/* First uid used by insns created by reload in this function.
245 Used in find_equiv_reg. */
246int reload_first_uid;
247
248/* Flag set by local-alloc or global-alloc if anything is live in
249 a call-clobbered reg across calls. */
250
251int caller_save_needed;
252
7402683f
ILT
253/* The register class to use for a base register when reloading an
254 address. This is normally BASE_REG_CLASS, but it may be different
255 when using SMALL_REGISTER_CLASSES and passing parameters in
256 registers. */
257enum reg_class reload_address_base_reg_class;
258
259/* The register class to use for an index register when reloading an
260 address. This is normally INDEX_REG_CLASS, but it may be different
261 when using SMALL_REGISTER_CLASSES and passing parameters in
262 registers. */
263enum reg_class reload_address_index_reg_class;
264
32131a9c
RK
265/* Set to 1 while reload_as_needed is operating.
266 Required by some machines to handle any generated moves differently. */
267
268int reload_in_progress = 0;
269
270/* These arrays record the insn_code of insns that may be needed to
271 perform input and output reloads of special objects. They provide a
272 place to pass a scratch register. */
273
274enum insn_code reload_in_optab[NUM_MACHINE_MODES];
275enum insn_code reload_out_optab[NUM_MACHINE_MODES];
276
d45cf215 277/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
278 The allocated storage can be freed once find_reloads has processed the
279 insn. */
280
281struct obstack reload_obstack;
282char *reload_firstobj;
283
284#define obstack_chunk_alloc xmalloc
285#define obstack_chunk_free free
286
32131a9c
RK
287/* List of labels that must never be deleted. */
288extern rtx forced_labels;
2c5d9e37
RK
289
290/* Allocation number table from global register allocation. */
291extern int *reg_allocno;
32131a9c
RK
292\f
293/* This structure is used to record information about register eliminations.
294 Each array entry describes one possible way of eliminating a register
295 in favor of another. If there is more than one way of eliminating a
296 particular register, the most preferred should be specified first. */
297
298static struct elim_table
299{
0f41302f
MS
300 int from; /* Register number to be eliminated. */
301 int to; /* Register number used as replacement. */
302 int initial_offset; /* Initial difference between values. */
303 int can_eliminate; /* Non-zero if this elimination can be done. */
32131a9c 304 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
0f41302f
MS
305 insns made by reload. */
306 int offset; /* Current offset between the two regs. */
307 int max_offset; /* Maximum offset between the two regs. */
308 int previous_offset; /* Offset at end of previous insn. */
309 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
32131a9c
RK
310 rtx from_rtx; /* REG rtx for the register to be eliminated.
311 We cannot simply compare the number since
312 we might then spuriously replace a hard
313 register corresponding to a pseudo
0f41302f
MS
314 assigned to the reg to be eliminated. */
315 rtx to_rtx; /* REG rtx for the replacement. */
32131a9c
RK
316} reg_eliminate[] =
317
318/* If a set of eliminable registers was specified, define the table from it.
319 Otherwise, default to the normal case of the frame pointer being
320 replaced by the stack pointer. */
321
322#ifdef ELIMINABLE_REGS
323 ELIMINABLE_REGS;
324#else
325 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
326#endif
327
328#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
329
330/* Record the number of pending eliminations that have an offset not equal
331 to their initial offset. If non-zero, we use a new copy of each
332 replacement result in any insns encountered. */
333static int num_not_at_initial_offset;
334
335/* Count the number of registers that we may be able to eliminate. */
336static int num_eliminable;
337
338/* For each label, we record the offset of each elimination. If we reach
339 a label by more than one path and an offset differs, we cannot do the
340 elimination. This information is indexed by the number of the label.
341 The first table is an array of flags that records whether we have yet
342 encountered a label and the second table is an array of arrays, one
343 entry in the latter array for each elimination. */
344
345static char *offsets_known_at;
346static int (*offsets_at)[NUM_ELIMINABLE_REGS];
347
348/* Number of labels in the current function. */
349
350static int num_labels;
546b63fb
RK
351
352struct hard_reg_n_uses { int regno; int uses; };
32131a9c 353\f
546b63fb
RK
354static int possible_group_p PROTO((int, int *));
355static void count_possible_groups PROTO((int *, enum machine_mode *,
066aca28 356 int *, int));
546b63fb
RK
357static int modes_equiv_for_class_p PROTO((enum machine_mode,
358 enum machine_mode,
359 enum reg_class));
360static void spill_failure PROTO((rtx));
361static int new_spill_reg PROTO((int, int, int *, int *, int,
362 FILE *));
363static void delete_dead_insn PROTO((rtx));
364static void alter_reg PROTO((int, int));
c307c237 365static void mark_scratch_live PROTO((rtx));
546b63fb
RK
366static void set_label_offsets PROTO((rtx, rtx, int));
367static int eliminate_regs_in_insn PROTO((rtx, int));
368static void mark_not_eliminable PROTO((rtx, rtx));
369static int spill_hard_reg PROTO((int, int, FILE *, int));
370static void scan_paradoxical_subregs PROTO((rtx));
788a0818 371static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
2c5d9e37 372static void order_regs_for_reload PROTO((int));
788a0818 373static int compare_spill_regs PROTO((const GENERIC_PTR, const GENERIC_PTR));
546b63fb 374static void reload_as_needed PROTO((rtx, int));
9a881562 375static void forget_old_reloads_1 PROTO((rtx, rtx));
788a0818 376static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
546b63fb
RK
377static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
378 enum machine_mode));
be7ae2a4
RK
379static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
380 enum machine_mode));
546b63fb
RK
381static int reload_reg_free_p PROTO((int, int, enum reload_type));
382static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
f5470689 383static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int));
546b63fb
RK
384static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
385static int allocate_reload_reg PROTO((int, rtx, int, int));
386static void choose_reload_regs PROTO((rtx, rtx));
387static void merge_assigned_reloads PROTO((rtx));
388static void emit_reload_insns PROTO((rtx));
389static void delete_output_reload PROTO((rtx, int, rtx));
390static void inc_for_reload PROTO((rtx, rtx, int));
391static int constraint_accepts_reg_p PROTO((char *, rtx));
2a9fb548 392static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
cbfc3ad3 393static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
2a9fb548
ILT
394static void reload_cse_invalidate_mem PROTO((rtx));
395static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
2a9fb548 396static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
31418d35 397static int reload_cse_noop_set_p PROTO((rtx, rtx));
e9a25f70
JL
398static int reload_cse_simplify_set PROTO((rtx, rtx));
399static int reload_cse_simplify_operands PROTO((rtx));
2a9fb548
ILT
400static void reload_cse_check_clobber PROTO((rtx, rtx));
401static void reload_cse_record_set PROTO((rtx, rtx));
e9a25f70
JL
402static void reload_cse_delete_death_notes PROTO((rtx));
403static void reload_cse_no_longer_dead PROTO((int, enum machine_mode));
32131a9c 404\f
546b63fb
RK
405/* Initialize the reload pass once per compilation. */
406
32131a9c
RK
407void
408init_reload ()
409{
410 register int i;
411
412 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
413 Set spill_indirect_levels to the number of levels such addressing is
414 permitted, zero if it is not permitted at all. */
415
416 register rtx tem
38a448ca
RH
417 = gen_rtx_MEM (Pmode,
418 gen_rtx_PLUS (Pmode,
419 gen_rtx_REG (Pmode, LAST_VIRTUAL_REGISTER + 1),
420 GEN_INT (4)));
32131a9c
RK
421 spill_indirect_levels = 0;
422
423 while (memory_address_p (QImode, tem))
424 {
425 spill_indirect_levels++;
38a448ca 426 tem = gen_rtx_MEM (Pmode, tem);
32131a9c
RK
427 }
428
429 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
430
38a448ca 431 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
32131a9c
RK
432 indirect_symref_ok = memory_address_p (QImode, tem);
433
434 /* See if reg+reg is a valid (and offsettable) address. */
435
65701fd2 436 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638 437 {
38a448ca
RH
438 tem = gen_rtx_PLUS (Pmode,
439 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
440 gen_rtx_REG (Pmode, i));
57caa638
RS
441 /* This way, we make sure that reg+reg is an offsettable address. */
442 tem = plus_constant (tem, 4);
443
444 if (memory_address_p (QImode, tem))
445 {
446 double_reg_address_ok = 1;
447 break;
448 }
449 }
32131a9c 450
0f41302f 451 /* Initialize obstack for our rtl allocation. */
32131a9c
RK
452 gcc_obstack_init (&reload_obstack);
453 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
7402683f
ILT
454
455 /* Decide which register class should be used when reloading
456 addresses. If we are using SMALL_REGISTER_CLASSES, and any
457 parameters are passed in registers, then we do not want to use
458 those registers when reloading an address. Otherwise, if a
459 function argument needs a reload, we may wind up clobbering
460 another argument to the function which was already computed. If
461 we find a subset class which simply avoids those registers, we
462 use it instead. ??? It would be better to only use the
463 restricted class when we actually are loading function arguments,
464 but that is hard to determine. */
465 reload_address_base_reg_class = BASE_REG_CLASS;
466 reload_address_index_reg_class = INDEX_REG_CLASS;
7402683f
ILT
467 if (SMALL_REGISTER_CLASSES)
468 {
469 int regno;
470 HARD_REG_SET base, index;
471 enum reg_class *p;
472
473 COPY_HARD_REG_SET (base, reg_class_contents[BASE_REG_CLASS]);
474 COPY_HARD_REG_SET (index, reg_class_contents[INDEX_REG_CLASS]);
475 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
476 {
477 if (FUNCTION_ARG_REGNO_P (regno))
478 {
479 CLEAR_HARD_REG_BIT (base, regno);
480 CLEAR_HARD_REG_BIT (index, regno);
481 }
482 }
483
484 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[BASE_REG_CLASS],
485 baseok);
486 for (p = reg_class_subclasses[BASE_REG_CLASS];
487 *p != LIM_REG_CLASSES;
488 p++)
489 {
490 GO_IF_HARD_REG_EQUAL (base, reg_class_contents[*p], usebase);
491 continue;
492 usebase:
493 reload_address_base_reg_class = *p;
494 break;
495 }
496 baseok:;
497
498 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[INDEX_REG_CLASS],
499 indexok);
500 for (p = reg_class_subclasses[INDEX_REG_CLASS];
501 *p != LIM_REG_CLASSES;
502 p++)
503 {
504 GO_IF_HARD_REG_EQUAL (index, reg_class_contents[*p], useindex);
505 continue;
506 useindex:
507 reload_address_index_reg_class = *p;
508 break;
509 }
510 indexok:;
511 }
32131a9c
RK
512}
513
546b63fb 514/* Main entry point for the reload pass.
32131a9c
RK
515
516 FIRST is the first insn of the function being compiled.
517
518 GLOBAL nonzero means we were called from global_alloc
519 and should attempt to reallocate any pseudoregs that we
520 displace from hard regs we will use for reloads.
521 If GLOBAL is zero, we do not have enough information to do that,
522 so any pseudo reg that is spilled must go to the stack.
523
524 DUMPFILE is the global-reg debugging dump file stream, or 0.
525 If it is nonzero, messages are written to it to describe
526 which registers are seized as reload regs, which pseudo regs
5352b11a 527 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 528
5352b11a
RS
529 Return value is nonzero if reload failed
530 and we must not do any more for this function. */
531
532int
32131a9c
RK
533reload (first, global, dumpfile)
534 rtx first;
535 int global;
536 FILE *dumpfile;
537{
538 register int class;
8b3e912b 539 register int i, j, k;
32131a9c
RK
540 register rtx insn;
541 register struct elim_table *ep;
542
a68d4b75
BK
543 /* The two pointers used to track the true location of the memory used
544 for label offsets. */
545 char *real_known_ptr = NULL_PTR;
546 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
547
32131a9c
RK
548 int something_changed;
549 int something_needs_reloads;
550 int something_needs_elimination;
551 int new_basic_block_needs;
a8efe40d
RK
552 enum reg_class caller_save_spill_class = NO_REGS;
553 int caller_save_group_size = 1;
32131a9c 554
5352b11a
RS
555 /* Nonzero means we couldn't get enough spill regs. */
556 int failure = 0;
557
32131a9c
RK
558 /* The basic block number currently being processed for INSN. */
559 int this_block;
560
561 /* Make sure even insns with volatile mem refs are recognizable. */
562 init_recog ();
563
564 /* Enable find_equiv_reg to distinguish insns made by reload. */
565 reload_first_uid = get_max_uid ();
566
567 for (i = 0; i < N_REG_CLASSES; i++)
568 basic_block_needs[i] = 0;
569
0dadecf6
RK
570#ifdef SECONDARY_MEMORY_NEEDED
571 /* Initialize the secondary memory table. */
572 clear_secondary_mem ();
573#endif
574
32131a9c
RK
575 /* Remember which hard regs appear explicitly
576 before we merge into `regs_ever_live' the ones in which
577 pseudo regs have been allocated. */
578 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
579
580 /* We don't have a stack slot for any spill reg yet. */
4c9a05bc
RK
581 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
582 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
32131a9c 583
a8efe40d
RK
584 /* Initialize the save area information for caller-save, in case some
585 are needed. */
586 init_save_areas ();
a8fdc208 587
32131a9c
RK
588 /* Compute which hard registers are now in use
589 as homes for pseudo registers.
590 This is done here rather than (eg) in global_alloc
591 because this point is reached even if not optimizing. */
32131a9c
RK
592 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
593 mark_home_live (i);
594
8dddd002
RK
595 /* A function that receives a nonlocal goto must save all call-saved
596 registers. */
597 if (current_function_has_nonlocal_label)
598 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
599 {
600 if (! call_used_regs[i] && ! fixed_regs[i])
601 regs_ever_live[i] = 1;
602 }
603
c307c237
RK
604 for (i = 0; i < scratch_list_length; i++)
605 if (scratch_list[i])
606 mark_scratch_live (scratch_list[i]);
607
32131a9c
RK
608 /* Make sure that the last insn in the chain
609 is not something that needs reloading. */
fb3821f7 610 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
611
612 /* Find all the pseudo registers that didn't get hard regs
613 but do have known equivalent constants or memory slots.
614 These include parameters (known equivalent to parameter slots)
615 and cse'd or loop-moved constant memory addresses.
616
617 Record constant equivalents in reg_equiv_constant
618 so they will be substituted by find_reloads.
619 Record memory equivalents in reg_mem_equiv so they can
620 be substituted eventually by altering the REG-rtx's. */
621
622 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 623 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
32131a9c 624 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 625 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
32131a9c 626 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 627 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
32131a9c 628 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 629 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
32131a9c 630 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 631 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
32131a9c 632 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
4c9a05bc 633 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
32131a9c 634
f95182a4
ILT
635 if (SMALL_REGISTER_CLASSES)
636 CLEAR_HARD_REG_SET (forbidden_regs);
56f58d3a 637
32131a9c 638 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
56f58d3a
RK
639 Also find all paradoxical subregs and find largest such for each pseudo.
640 On machines with small register classes, record hard registers that
b453cb0b
RK
641 are used for user variables. These can never be used for spills.
642 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
643 caller-saved registers must be marked live. */
32131a9c
RK
644
645 for (insn = first; insn; insn = NEXT_INSN (insn))
646 {
647 rtx set = single_set (insn);
648
b453cb0b
RK
649 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
650 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
651 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
652 if (! call_used_regs[i])
653 regs_ever_live[i] = 1;
654
32131a9c
RK
655 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
656 {
fb3821f7 657 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
658 if (note
659#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 660 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
661 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
662#endif
663 )
32131a9c
RK
664 {
665 rtx x = XEXP (note, 0);
666 i = REGNO (SET_DEST (set));
667 if (i > LAST_VIRTUAL_REGISTER)
668 {
669 if (GET_CODE (x) == MEM)
956d6950
JL
670 {
671 /* If the operand is a PLUS, the MEM may be shared,
672 so make sure we have an unshared copy here. */
673 if (GET_CODE (XEXP (x, 0)) == PLUS)
674 x = copy_rtx (x);
675
676 reg_equiv_memory_loc[i] = x;
677 }
32131a9c
RK
678 else if (CONSTANT_P (x))
679 {
680 if (LEGITIMATE_CONSTANT_P (x))
681 reg_equiv_constant[i] = x;
682 else
683 reg_equiv_memory_loc[i]
d445b551 684 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
685 }
686 else
687 continue;
688
689 /* If this register is being made equivalent to a MEM
690 and the MEM is not SET_SRC, the equivalencing insn
691 is one with the MEM as a SET_DEST and it occurs later.
692 So don't mark this insn now. */
693 if (GET_CODE (x) != MEM
694 || rtx_equal_p (SET_SRC (set), x))
695 reg_equiv_init[i] = insn;
696 }
697 }
698 }
699
700 /* If this insn is setting a MEM from a register equivalent to it,
701 this is the equivalencing insn. */
702 else if (set && GET_CODE (SET_DEST (set)) == MEM
703 && GET_CODE (SET_SRC (set)) == REG
704 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
705 && rtx_equal_p (SET_DEST (set),
706 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
707 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
708
709 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
710 scan_paradoxical_subregs (PATTERN (insn));
711 }
712
713 /* Does this function require a frame pointer? */
714
715 frame_pointer_needed = (! flag_omit_frame_pointer
716#ifdef EXIT_IGNORE_STACK
717 /* ?? If EXIT_IGNORE_STACK is set, we will not save
718 and restore sp for alloca. So we can't eliminate
719 the frame pointer in that case. At some point,
720 we should improve this by emitting the
721 sp-adjusting insns for this case. */
722 || (current_function_calls_alloca
723 && EXIT_IGNORE_STACK)
724#endif
725 || FRAME_POINTER_REQUIRED);
726
727 num_eliminable = 0;
728
729 /* Initialize the table of registers to eliminate. The way we do this
730 depends on how the eliminable registers were defined. */
731#ifdef ELIMINABLE_REGS
732 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
733 {
734 ep->can_eliminate = ep->can_eliminate_previous
735 = (CAN_ELIMINATE (ep->from, ep->to)
9ff3516a 736 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
32131a9c
RK
737 }
738#else
739 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
740 = ! frame_pointer_needed;
741#endif
742
743 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 744 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
745 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
746 We depend on this. */
747 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
748 {
749 num_eliminable += ep->can_eliminate;
38a448ca
RH
750 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
751 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
32131a9c
RK
752 }
753
754 num_labels = max_label_num () - get_first_label_num ();
755
756 /* Allocate the tables used to store offset information at labels. */
a68d4b75
BK
757 /* We used to use alloca here, but the size of what it would try to
758 allocate would occasionally cause it to exceed the stack limit and
759 cause a core dump. */
760 real_known_ptr = xmalloc (num_labels);
761 real_at_ptr
32131a9c 762 = (int (*)[NUM_ELIMINABLE_REGS])
a68d4b75 763 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
32131a9c 764
a68d4b75
BK
765 offsets_known_at = real_known_ptr - get_first_label_num ();
766 offsets_at
767 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
32131a9c
RK
768
769 /* Alter each pseudo-reg rtx to contain its hard reg number.
770 Assign stack slots to the pseudos that lack hard regs or equivalents.
771 Do not touch virtual registers. */
772
773 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
774 alter_reg (i, -1);
775
32131a9c
RK
776 /* If we have some registers we think can be eliminated, scan all insns to
777 see if there is an insn that sets one of these registers to something
778 other than itself plus a constant. If so, the register cannot be
779 eliminated. Doing this scan here eliminates an extra pass through the
780 main reload loop in the most common case where register elimination
781 cannot be done. */
782 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
783 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
784 || GET_CODE (insn) == CALL_INSN)
785 note_stores (PATTERN (insn), mark_not_eliminable);
786
787#ifndef REGISTER_CONSTRAINTS
788 /* If all the pseudo regs have hard regs,
789 except for those that are never referenced,
790 we know that no reloads are needed. */
791 /* But that is not true if there are register constraints, since
792 in that case some pseudos might be in the wrong kind of hard reg. */
793
794 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
b1f21e0a 795 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
32131a9c
RK
796 break;
797
b8093d02 798 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
a68d4b75
BK
799 {
800 free (real_known_ptr);
801 free (real_at_ptr);
802 return;
803 }
32131a9c
RK
804#endif
805
806 /* Compute the order of preference for hard registers to spill.
807 Store them by decreasing preference in potential_reload_regs. */
808
2c5d9e37 809 order_regs_for_reload (global);
32131a9c
RK
810
811 /* So far, no hard regs have been spilled. */
812 n_spills = 0;
813 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
814 spill_reg_order[i] = -1;
815
4079cd63
JW
816 /* Initialize to -1, which means take the first spill register. */
817 last_spill_reg = -1;
818
32131a9c
RK
819 /* On most machines, we can't use any register explicitly used in the
820 rtl as a spill register. But on some, we have to. Those will have
821 taken care to keep the life of hard regs as short as possible. */
822
f95182a4 823 if (! SMALL_REGISTER_CLASSES)
f95182a4 824 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
32131a9c
RK
825
826 /* Spill any hard regs that we know we can't eliminate. */
827 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
828 if (! ep->can_eliminate)
9ff3516a
RK
829 spill_hard_reg (ep->from, global, dumpfile, 1);
830
831#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
832 if (frame_pointer_needed)
833 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
834#endif
32131a9c
RK
835
836 if (global)
837 for (i = 0; i < N_REG_CLASSES; i++)
838 {
4c9a05bc 839 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
32131a9c
RK
840 bzero (basic_block_needs[i], n_basic_blocks);
841 }
842
b2f15f94
RK
843 /* From now on, we need to emit any moves without making new pseudos. */
844 reload_in_progress = 1;
845
32131a9c
RK
846 /* This loop scans the entire function each go-round
847 and repeats until one repetition spills no additional hard regs. */
848
d45cf215 849 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
850 to require another pass. Note that getting an additional reload
851 reg does not necessarily imply any pseudo reg was spilled;
852 sometimes we find a reload reg that no pseudo reg was allocated in. */
853 something_changed = 1;
854 /* This flag is set if there are any insns that require reloading. */
855 something_needs_reloads = 0;
856 /* This flag is set if there are any insns that require register
857 eliminations. */
858 something_needs_elimination = 0;
859 while (something_changed)
860 {
861 rtx after_call = 0;
862
863 /* For each class, number of reload regs needed in that class.
864 This is the maximum over all insns of the needs in that class
865 of the individual insn. */
866 int max_needs[N_REG_CLASSES];
867 /* For each class, size of group of consecutive regs
868 that is needed for the reloads of this class. */
869 int group_size[N_REG_CLASSES];
870 /* For each class, max number of consecutive groups needed.
871 (Each group contains group_size[CLASS] consecutive registers.) */
872 int max_groups[N_REG_CLASSES];
873 /* For each class, max number needed of regs that don't belong
874 to any of the groups. */
875 int max_nongroups[N_REG_CLASSES];
876 /* For each class, the machine mode which requires consecutive
877 groups of regs of that class.
878 If two different modes ever require groups of one class,
879 they must be the same size and equally restrictive for that class,
880 otherwise we can't handle the complexity. */
881 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
882 /* Record the insn where each maximum need is first found. */
883 rtx max_needs_insn[N_REG_CLASSES];
884 rtx max_groups_insn[N_REG_CLASSES];
885 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 886 rtx x;
7657bf2f 887 HOST_WIDE_INT starting_frame_size;
29a82058 888#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
9ff3516a 889 int previous_frame_pointer_needed = frame_pointer_needed;
29a82058 890#endif
e404a39a 891 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
892
893 something_changed = 0;
4c9a05bc
RK
894 bzero ((char *) max_needs, sizeof max_needs);
895 bzero ((char *) max_groups, sizeof max_groups);
896 bzero ((char *) max_nongroups, sizeof max_nongroups);
897 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
898 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
899 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
900 bzero ((char *) group_size, sizeof group_size);
32131a9c
RK
901 for (i = 0; i < N_REG_CLASSES; i++)
902 group_mode[i] = VOIDmode;
903
904 /* Keep track of which basic blocks are needing the reloads. */
905 this_block = 0;
906
907 /* Remember whether any element of basic_block_needs
908 changes from 0 to 1 in this pass. */
909 new_basic_block_needs = 0;
910
7657bf2f
JW
911 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
912 here because the stack size may be a part of the offset computation
913 for register elimination, and there might have been new stack slots
914 created in the last iteration of this loop. */
915 assign_stack_local (BLKmode, 0, 0);
916
917 starting_frame_size = get_frame_size ();
918
32131a9c
RK
919 /* Reset all offsets on eliminable registers to their initial values. */
920#ifdef ELIMINABLE_REGS
921 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
922 {
923 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
924 ep->previous_offset = ep->offset
925 = ep->max_offset = ep->initial_offset;
32131a9c
RK
926 }
927#else
928#ifdef INITIAL_FRAME_POINTER_OFFSET
929 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
930#else
931 if (!FRAME_POINTER_REQUIRED)
932 abort ();
933 reg_eliminate[0].initial_offset = 0;
934#endif
a8efe40d 935 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
936 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
937#endif
938
939 num_not_at_initial_offset = 0;
940
4c9a05bc 941 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
32131a9c
RK
942
943 /* Set a known offset for each forced label to be at the initial offset
944 of each elimination. We do this because we assume that all
945 computed jumps occur from a location where each elimination is
946 at its initial offset. */
947
948 for (x = forced_labels; x; x = XEXP (x, 1))
949 if (XEXP (x, 0))
fb3821f7 950 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
951
952 /* For each pseudo register that has an equivalent location defined,
953 try to eliminate any eliminable registers (such as the frame pointer)
954 assuming initial offsets for the replacement register, which
955 is the normal case.
956
957 If the resulting location is directly addressable, substitute
958 the MEM we just got directly for the old REG.
959
960 If it is not addressable but is a constant or the sum of a hard reg
961 and constant, it is probably not addressable because the constant is
962 out of range, in that case record the address; we will generate
963 hairy code to compute the address in a register each time it is
6491dbbb
RK
964 needed. Similarly if it is a hard register, but one that is not
965 valid as an address register.
32131a9c
RK
966
967 If the location is not addressable, but does not have one of the
968 above forms, assign a stack slot. We have to do this to avoid the
969 potential of producing lots of reloads if, e.g., a location involves
970 a pseudo that didn't get a hard register and has an equivalent memory
971 location that also involves a pseudo that didn't get a hard register.
972
973 Perhaps at some point we will improve reload_when_needed handling
974 so this problem goes away. But that's very hairy. */
975
976 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
977 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
978 {
1914f5da 979 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
980
981 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
982 XEXP (x, 0)))
983 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
984 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
985 || (GET_CODE (XEXP (x, 0)) == REG
986 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
987 || (GET_CODE (XEXP (x, 0)) == PLUS
988 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
989 && (REGNO (XEXP (XEXP (x, 0), 0))
990 < FIRST_PSEUDO_REGISTER)
991 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
992 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
993 else
994 {
995 /* Make a new stack slot. Then indicate that something
a8fdc208 996 changed so we go back and recompute offsets for
32131a9c
RK
997 eliminable registers because the allocation of memory
998 below might change some offset. reg_equiv_{mem,address}
999 will be set up for this pseudo on the next pass around
1000 the loop. */
1001 reg_equiv_memory_loc[i] = 0;
1002 reg_equiv_init[i] = 0;
1003 alter_reg (i, -1);
1004 something_changed = 1;
1005 }
1006 }
a8fdc208 1007
d45cf215 1008 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
1009 bookkeeping. */
1010 if (something_changed)
1011 continue;
1012
a8efe40d
RK
1013 /* If caller-saves needs a group, initialize the group to include
1014 the size and mode required for caller-saves. */
1015
1016 if (caller_save_group_size > 1)
1017 {
1018 group_mode[(int) caller_save_spill_class] = Pmode;
1019 group_size[(int) caller_save_spill_class] = caller_save_group_size;
1020 }
1021
32131a9c
RK
1022 /* Compute the most additional registers needed by any instruction.
1023 Collect information separately for each class of regs. */
1024
1025 for (insn = first; insn; insn = NEXT_INSN (insn))
1026 {
1027 if (global && this_block + 1 < n_basic_blocks
1028 && insn == basic_block_head[this_block+1])
1029 ++this_block;
1030
1031 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
1032 might include REG_LABEL), we need to see what effects this
1033 has on the known offsets at labels. */
1034
1035 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1036 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1037 && REG_NOTES (insn) != 0))
1038 set_label_offsets (insn, insn, 0);
1039
1040 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1041 {
1042 /* Nonzero means don't use a reload reg that overlaps
1043 the place where a function value can be returned. */
1044 rtx avoid_return_reg = 0;
1045
1046 rtx old_body = PATTERN (insn);
1047 int old_code = INSN_CODE (insn);
1048 rtx old_notes = REG_NOTES (insn);
1049 int did_elimination = 0;
546b63fb
RK
1050
1051 /* To compute the number of reload registers of each class
9faa82d8 1052 needed for an insn, we must simulate what choose_reload_regs
546b63fb
RK
1053 can do. We do this by splitting an insn into an "input" and
1054 an "output" part. RELOAD_OTHER reloads are used in both.
1055 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
1056 which must be live over the entire input section of reloads,
1057 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1058 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1059 inputs.
1060
1061 The registers needed for output are RELOAD_OTHER and
1062 RELOAD_FOR_OUTPUT, which are live for the entire output
1063 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1064 reloads for each operand.
1065
1066 The total number of registers needed is the maximum of the
1067 inputs and outputs. */
1068
8b3e912b 1069 struct needs
32131a9c 1070 {
8b3e912b
RK
1071 /* [0] is normal, [1] is nongroup. */
1072 int regs[2][N_REG_CLASSES];
1073 int groups[N_REG_CLASSES];
1074 };
1075
1076 /* Each `struct needs' corresponds to one RELOAD_... type. */
1077 struct {
1078 struct needs other;
1079 struct needs input;
1080 struct needs output;
1081 struct needs insn;
1082 struct needs other_addr;
1083 struct needs op_addr;
893bc853 1084 struct needs op_addr_reload;
8b3e912b 1085 struct needs in_addr[MAX_RECOG_OPERANDS];
47c8cf91 1086 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
8b3e912b 1087 struct needs out_addr[MAX_RECOG_OPERANDS];
47c8cf91 1088 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
8b3e912b 1089 } insn_needs;
32131a9c
RK
1090
1091 /* If needed, eliminate any eliminable registers. */
1092 if (num_eliminable)
1093 did_elimination = eliminate_regs_in_insn (insn, 0);
1094
32131a9c
RK
1095 /* Set avoid_return_reg if this is an insn
1096 that might use the value of a function call. */
f95182a4 1097 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
32131a9c
RK
1098 {
1099 if (GET_CODE (PATTERN (insn)) == SET)
1100 after_call = SET_DEST (PATTERN (insn));
1101 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1102 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1103 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1104 else
1105 after_call = 0;
1106 }
e9a25f70 1107 else if (SMALL_REGISTER_CLASSES && after_call != 0
32131a9c 1108 && !(GET_CODE (PATTERN (insn)) == SET
b60a8416
R
1109 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
1110 && GET_CODE (PATTERN (insn)) != USE)
32131a9c 1111 {
2b979c57 1112 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
1113 avoid_return_reg = after_call;
1114 after_call = 0;
1115 }
32131a9c
RK
1116
1117 /* Analyze the instruction. */
1118 find_reloads (insn, 0, spill_indirect_levels, global,
1119 spill_reg_order);
1120
1121 /* Remember for later shortcuts which insns had any reloads or
1122 register eliminations.
1123
1124 One might think that it would be worthwhile to mark insns
1125 that need register replacements but not reloads, but this is
1126 not safe because find_reloads may do some manipulation of
1127 the insn (such as swapping commutative operands), which would
1128 be lost when we restore the old pattern after register
1129 replacement. So the actions of find_reloads must be redone in
1130 subsequent passes or in reload_as_needed.
1131
1132 However, it is safe to mark insns that need reloads
1133 but not register replacement. */
1134
1135 PUT_MODE (insn, (did_elimination ? QImode
1136 : n_reloads ? HImode
546b63fb 1137 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
1138 : VOIDmode));
1139
1140 /* Discard any register replacements done. */
1141 if (did_elimination)
1142 {
1143 obstack_free (&reload_obstack, reload_firstobj);
1144 PATTERN (insn) = old_body;
1145 INSN_CODE (insn) = old_code;
1146 REG_NOTES (insn) = old_notes;
1147 something_needs_elimination = 1;
1148 }
1149
a8efe40d 1150 /* If this insn has no reloads, we need not do anything except
a8fdc208 1151 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1152 caller-save needs reloads. */
1153
1154 if (n_reloads == 0
1155 && ! (GET_CODE (insn) == CALL_INSN
1156 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1157 continue;
1158
1159 something_needs_reloads = 1;
4c9a05bc 1160 bzero ((char *) &insn_needs, sizeof insn_needs);
32131a9c
RK
1161
1162 /* Count each reload once in every class
1163 containing the reload's own class. */
1164
1165 for (i = 0; i < n_reloads; i++)
1166 {
1167 register enum reg_class *p;
e85ddd99 1168 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1169 int size;
1170 enum machine_mode mode;
8b3e912b 1171 struct needs *this_needs;
32131a9c
RK
1172
1173 /* Don't count the dummy reloads, for which one of the
1174 regs mentioned in the insn can be used for reloading.
1175 Don't count optional reloads.
1176 Don't count reloads that got combined with others. */
1177 if (reload_reg_rtx[i] != 0
1178 || reload_optional[i] != 0
1179 || (reload_out[i] == 0 && reload_in[i] == 0
1180 && ! reload_secondary_p[i]))
1181 continue;
1182
e85ddd99
RK
1183 /* Show that a reload register of this class is needed
1184 in this basic block. We do not use insn_needs and
1185 insn_groups because they are overly conservative for
1186 this purpose. */
1187 if (global && ! basic_block_needs[(int) class][this_block])
1188 {
1189 basic_block_needs[(int) class][this_block] = 1;
1190 new_basic_block_needs = 1;
1191 }
1192
ee249c09
RK
1193 mode = reload_inmode[i];
1194 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1195 mode = reload_outmode[i];
1196 size = CLASS_MAX_NREGS (class, mode);
1197
32131a9c
RK
1198 /* Decide which time-of-use to count this reload for. */
1199 switch (reload_when_needed[i])
1200 {
1201 case RELOAD_OTHER:
8b3e912b 1202 this_needs = &insn_needs.other;
32131a9c 1203 break;
546b63fb 1204 case RELOAD_FOR_INPUT:
8b3e912b 1205 this_needs = &insn_needs.input;
32131a9c 1206 break;
546b63fb 1207 case RELOAD_FOR_OUTPUT:
8b3e912b 1208 this_needs = &insn_needs.output;
32131a9c 1209 break;
546b63fb 1210 case RELOAD_FOR_INSN:
8b3e912b 1211 this_needs = &insn_needs.insn;
546b63fb 1212 break;
546b63fb 1213 case RELOAD_FOR_OTHER_ADDRESS:
8b3e912b 1214 this_needs = &insn_needs.other_addr;
546b63fb 1215 break;
546b63fb 1216 case RELOAD_FOR_INPUT_ADDRESS:
8b3e912b 1217 this_needs = &insn_needs.in_addr[reload_opnum[i]];
546b63fb 1218 break;
47c8cf91
ILT
1219 case RELOAD_FOR_INPADDR_ADDRESS:
1220 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1221 break;
546b63fb 1222 case RELOAD_FOR_OUTPUT_ADDRESS:
8b3e912b 1223 this_needs = &insn_needs.out_addr[reload_opnum[i]];
546b63fb 1224 break;
47c8cf91
ILT
1225 case RELOAD_FOR_OUTADDR_ADDRESS:
1226 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1227 break;
32131a9c 1228 case RELOAD_FOR_OPERAND_ADDRESS:
8b3e912b 1229 this_needs = &insn_needs.op_addr;
32131a9c 1230 break;
893bc853
RK
1231 case RELOAD_FOR_OPADDR_ADDR:
1232 this_needs = &insn_needs.op_addr_reload;
1233 break;
32131a9c
RK
1234 }
1235
32131a9c
RK
1236 if (size > 1)
1237 {
1238 enum machine_mode other_mode, allocate_mode;
1239
1240 /* Count number of groups needed separately from
1241 number of individual regs needed. */
8b3e912b 1242 this_needs->groups[(int) class]++;
e85ddd99 1243 p = reg_class_superclasses[(int) class];
32131a9c 1244 while (*p != LIM_REG_CLASSES)
8b3e912b 1245 this_needs->groups[(int) *p++]++;
32131a9c
RK
1246
1247 /* Record size and mode of a group of this class. */
1248 /* If more than one size group is needed,
1249 make all groups the largest needed size. */
e85ddd99 1250 if (group_size[(int) class] < size)
32131a9c 1251 {
e85ddd99 1252 other_mode = group_mode[(int) class];
32131a9c
RK
1253 allocate_mode = mode;
1254
e85ddd99
RK
1255 group_size[(int) class] = size;
1256 group_mode[(int) class] = mode;
32131a9c
RK
1257 }
1258 else
1259 {
1260 other_mode = mode;
e85ddd99 1261 allocate_mode = group_mode[(int) class];
32131a9c
RK
1262 }
1263
1264 /* Crash if two dissimilar machine modes both need
1265 groups of consecutive regs of the same class. */
1266
8b3e912b 1267 if (other_mode != VOIDmode && other_mode != allocate_mode
32131a9c 1268 && ! modes_equiv_for_class_p (allocate_mode,
8b3e912b 1269 other_mode, class))
a89b2cc4
MM
1270 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1271 insn);
32131a9c
RK
1272 }
1273 else if (size == 1)
1274 {
f5963e61 1275 this_needs->regs[reload_nongroup[i]][(int) class] += 1;
e85ddd99 1276 p = reg_class_superclasses[(int) class];
32131a9c 1277 while (*p != LIM_REG_CLASSES)
f5963e61 1278 this_needs->regs[reload_nongroup[i]][(int) *p++] += 1;
32131a9c
RK
1279 }
1280 else
1281 abort ();
1282 }
1283
1284 /* All reloads have been counted for this insn;
1285 now merge the various times of use.
1286 This sets insn_needs, etc., to the maximum total number
1287 of registers needed at any point in this insn. */
1288
1289 for (i = 0; i < N_REG_CLASSES; i++)
1290 {
546b63fb
RK
1291 int in_max, out_max;
1292
8b3e912b
RK
1293 /* Compute normal and nongroup needs. */
1294 for (j = 0; j <= 1; j++)
546b63fb 1295 {
8b3e912b
RK
1296 for (in_max = 0, out_max = 0, k = 0;
1297 k < reload_n_operands; k++)
1298 {
47c8cf91
ILT
1299 in_max
1300 = MAX (in_max,
b080c137
RK
1301 (insn_needs.in_addr[k].regs[j][i]
1302 + insn_needs.in_addr_addr[k].regs[j][i]));
8b3e912b
RK
1303 out_max
1304 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
47c8cf91
ILT
1305 out_max
1306 = MAX (out_max,
1307 insn_needs.out_addr_addr[k].regs[j][i]);
8b3e912b 1308 }
546b63fb 1309
8b3e912b
RK
1310 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1311 and operand addresses but not things used to reload
1312 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1313 don't conflict with things needed to reload inputs or
0f41302f 1314 outputs. */
546b63fb 1315
a94ce333
JW
1316 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1317 insn_needs.op_addr_reload.regs[j][i]),
893bc853
RK
1318 in_max);
1319
8b3e912b 1320 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
546b63fb 1321
8b3e912b
RK
1322 insn_needs.input.regs[j][i]
1323 = MAX (insn_needs.input.regs[j][i]
1324 + insn_needs.op_addr.regs[j][i]
1325 + insn_needs.insn.regs[j][i],
1326 in_max + insn_needs.input.regs[j][i]);
546b63fb 1327
8b3e912b
RK
1328 insn_needs.output.regs[j][i] += out_max;
1329 insn_needs.other.regs[j][i]
1330 += MAX (MAX (insn_needs.input.regs[j][i],
1331 insn_needs.output.regs[j][i]),
1332 insn_needs.other_addr.regs[j][i]);
546b63fb 1333
ce0e109b
RK
1334 }
1335
8b3e912b 1336 /* Now compute group needs. */
546b63fb
RK
1337 for (in_max = 0, out_max = 0, j = 0;
1338 j < reload_n_operands; j++)
1339 {
8b3e912b 1340 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
47c8cf91
ILT
1341 in_max = MAX (in_max,
1342 insn_needs.in_addr_addr[j].groups[i]);
8b3e912b
RK
1343 out_max
1344 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
47c8cf91
ILT
1345 out_max
1346 = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
546b63fb
RK
1347 }
1348
a94ce333
JW
1349 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1350 insn_needs.op_addr_reload.groups[i]),
893bc853 1351 in_max);
8b3e912b 1352 out_max = MAX (out_max, insn_needs.insn.groups[i]);
546b63fb 1353
8b3e912b
RK
1354 insn_needs.input.groups[i]
1355 = MAX (insn_needs.input.groups[i]
1356 + insn_needs.op_addr.groups[i]
1357 + insn_needs.insn.groups[i],
1358 in_max + insn_needs.input.groups[i]);
546b63fb 1359
8b3e912b
RK
1360 insn_needs.output.groups[i] += out_max;
1361 insn_needs.other.groups[i]
1362 += MAX (MAX (insn_needs.input.groups[i],
1363 insn_needs.output.groups[i]),
1364 insn_needs.other_addr.groups[i]);
546b63fb
RK
1365 }
1366
a8efe40d
RK
1367 /* If this is a CALL_INSN and caller-saves will need
1368 a spill register, act as if the spill register is
1369 needed for this insn. However, the spill register
1370 can be used by any reload of this insn, so we only
1371 need do something if no need for that class has
a8fdc208 1372 been recorded.
a8efe40d
RK
1373
1374 The assumption that every CALL_INSN will trigger a
1375 caller-save is highly conservative, however, the number
1376 of cases where caller-saves will need a spill register but
1377 a block containing a CALL_INSN won't need a spill register
1378 of that class should be quite rare.
1379
1380 If a group is needed, the size and mode of the group will
d45cf215 1381 have been set up at the beginning of this loop. */
a8efe40d
RK
1382
1383 if (GET_CODE (insn) == CALL_INSN
1384 && caller_save_spill_class != NO_REGS)
1385 {
f5963e61
JL
1386 /* See if this register would conflict with any reload that
1387 needs a group or any reload that needs a nongroup. */
8b3e912b
RK
1388 int nongroup_need = 0;
1389 int *caller_save_needs;
1390
1391 for (j = 0; j < n_reloads; j++)
f5963e61
JL
1392 if (reg_classes_intersect_p (caller_save_spill_class,
1393 reload_reg_class[j])
1394 && ((CLASS_MAX_NREGS
1395 (reload_reg_class[j],
1396 (GET_MODE_SIZE (reload_outmode[j])
1397 > GET_MODE_SIZE (reload_inmode[j]))
1398 ? reload_outmode[j] : reload_inmode[j])
1399 > 1)
1400 || reload_nongroup[j]))
8b3e912b
RK
1401 {
1402 nongroup_need = 1;
1403 break;
1404 }
1405
1406 caller_save_needs
1407 = (caller_save_group_size > 1
1408 ? insn_needs.other.groups
1409 : insn_needs.other.regs[nongroup_need]);
a8efe40d
RK
1410
1411 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1412 {
1413 register enum reg_class *p
1414 = reg_class_superclasses[(int) caller_save_spill_class];
1415
1416 caller_save_needs[(int) caller_save_spill_class]++;
1417
1418 while (*p != LIM_REG_CLASSES)
0aaa6af8 1419 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1420 }
1421
8b3e912b 1422 /* Show that this basic block will need a register of
d1c1397e
RS
1423 this class. */
1424
8b3e912b
RK
1425 if (global
1426 && ! (basic_block_needs[(int) caller_save_spill_class]
1427 [this_block]))
1428 {
1429 basic_block_needs[(int) caller_save_spill_class]
1430 [this_block] = 1;
1431 new_basic_block_needs = 1;
1432 }
a8efe40d
RK
1433 }
1434
32131a9c
RK
1435 /* If this insn stores the value of a function call,
1436 and that value is in a register that has been spilled,
1437 and if the insn needs a reload in a class
1438 that might use that register as the reload register,
38e01259 1439 then add an extra need in that class.
32131a9c
RK
1440 This makes sure we have a register available that does
1441 not overlap the return value. */
8b3e912b 1442
f95182a4 1443 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
32131a9c
RK
1444 {
1445 int regno = REGNO (avoid_return_reg);
1446 int nregs
1447 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1448 int r;
546b63fb
RK
1449 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1450
1451 /* First compute the "basic needs", which counts a
1452 need only in the smallest class in which it
1453 is required. */
1454
9b232232
RK
1455 bcopy ((char *) insn_needs.other.regs[0],
1456 (char *) basic_needs, sizeof basic_needs);
1457 bcopy ((char *) insn_needs.other.groups,
1458 (char *) basic_groups, sizeof basic_groups);
546b63fb
RK
1459
1460 for (i = 0; i < N_REG_CLASSES; i++)
1461 {
1462 enum reg_class *p;
1463
1464 if (basic_needs[i] >= 0)
1465 for (p = reg_class_superclasses[i];
1466 *p != LIM_REG_CLASSES; p++)
1467 basic_needs[(int) *p] -= basic_needs[i];
1468
1469 if (basic_groups[i] >= 0)
1470 for (p = reg_class_superclasses[i];
1471 *p != LIM_REG_CLASSES; p++)
1472 basic_groups[(int) *p] -= basic_groups[i];
1473 }
1474
1475 /* Now count extra regs if there might be a conflict with
0f41302f 1476 the return value register. */
546b63fb 1477
32131a9c
RK
1478 for (r = regno; r < regno + nregs; r++)
1479 if (spill_reg_order[r] >= 0)
1480 for (i = 0; i < N_REG_CLASSES; i++)
1481 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1482 {
af432130 1483 if (basic_needs[i] > 0)
546b63fb
RK
1484 {
1485 enum reg_class *p;
1486
8b3e912b 1487 insn_needs.other.regs[0][i]++;
546b63fb
RK
1488 p = reg_class_superclasses[i];
1489 while (*p != LIM_REG_CLASSES)
8b3e912b 1490 insn_needs.other.regs[0][(int) *p++]++;
546b63fb 1491 }
af432130
RK
1492 if (basic_groups[i] > 0)
1493 {
1494 enum reg_class *p;
1495
1496 insn_needs.other.groups[i]++;
1497 p = reg_class_superclasses[i];
1498 while (*p != LIM_REG_CLASSES)
1499 insn_needs.other.groups[(int) *p++]++;
1500 }
32131a9c 1501 }
32131a9c 1502 }
32131a9c
RK
1503
1504 /* For each class, collect maximum need of any insn. */
1505
1506 for (i = 0; i < N_REG_CLASSES; i++)
1507 {
8b3e912b 1508 if (max_needs[i] < insn_needs.other.regs[0][i])
5352b11a 1509 {
8b3e912b 1510 max_needs[i] = insn_needs.other.regs[0][i];
5352b11a
RS
1511 max_needs_insn[i] = insn;
1512 }
8b3e912b 1513 if (max_groups[i] < insn_needs.other.groups[i])
5352b11a 1514 {
8b3e912b 1515 max_groups[i] = insn_needs.other.groups[i];
5352b11a
RS
1516 max_groups_insn[i] = insn;
1517 }
8b3e912b 1518 if (max_nongroups[i] < insn_needs.other.regs[1][i])
ce0e109b 1519 {
8b3e912b 1520 max_nongroups[i] = insn_needs.other.regs[1][i];
ce0e109b
RK
1521 max_nongroups_insn[i] = insn;
1522 }
32131a9c
RK
1523 }
1524 }
1525 /* Note that there is a continue statement above. */
1526 }
1527
0dadecf6
RK
1528 /* If we allocated any new memory locations, make another pass
1529 since it might have changed elimination offsets. */
1530 if (starting_frame_size != get_frame_size ())
1531 something_changed = 1;
1532
e404a39a
RK
1533 if (dumpfile)
1534 for (i = 0; i < N_REG_CLASSES; i++)
1535 {
1536 if (max_needs[i] > 0)
1537 fprintf (dumpfile,
1538 ";; Need %d reg%s of class %s (for insn %d).\n",
1539 max_needs[i], max_needs[i] == 1 ? "" : "s",
1540 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1541 if (max_nongroups[i] > 0)
1542 fprintf (dumpfile,
1543 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1544 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1545 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1546 if (max_groups[i] > 0)
1547 fprintf (dumpfile,
1548 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1549 max_groups[i], max_groups[i] == 1 ? "" : "s",
1550 mode_name[(int) group_mode[i]],
1551 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1552 }
1553
d445b551 1554 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1555 will need a spill register. */
32131a9c 1556
37c0e55f 1557 if (caller_save_needed)
32131a9c 1558 {
37c0e55f
RK
1559 /* Set the offsets for setup_save_areas. */
1560 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1561 ep++)
1562 ep->previous_offset = ep->max_offset;
1563
1564 if ( ! setup_save_areas (&something_changed)
1565 && caller_save_spill_class == NO_REGS)
1566 {
1567 /* The class we will need depends on whether the machine
1568 supports the sum of two registers for an address; see
1569 find_address_reloads for details. */
1570
1571 caller_save_spill_class
1572 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1573 caller_save_group_size
1574 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1575 something_changed = 1;
1576 }
32131a9c
RK
1577 }
1578
5c23c401
RK
1579 /* See if anything that happened changes which eliminations are valid.
1580 For example, on the Sparc, whether or not the frame pointer can
1581 be eliminated can depend on what registers have been used. We need
1582 not check some conditions again (such as flag_omit_frame_pointer)
1583 since they can't have changed. */
1584
1585 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3ec2ea3e 1586 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5c23c401
RK
1587#ifdef ELIMINABLE_REGS
1588 || ! CAN_ELIMINATE (ep->from, ep->to)
1589#endif
1590 )
1591 ep->can_eliminate = 0;
1592
32131a9c
RK
1593 /* Look for the case where we have discovered that we can't replace
1594 register A with register B and that means that we will now be
1595 trying to replace register A with register C. This means we can
1596 no longer replace register C with register B and we need to disable
1597 such an elimination, if it exists. This occurs often with A == ap,
1598 B == sp, and C == fp. */
a8fdc208 1599
32131a9c
RK
1600 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1601 {
1602 struct elim_table *op;
1603 register int new_to = -1;
1604
1605 if (! ep->can_eliminate && ep->can_eliminate_previous)
1606 {
1607 /* Find the current elimination for ep->from, if there is a
1608 new one. */
1609 for (op = reg_eliminate;
1610 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1611 if (op->from == ep->from && op->can_eliminate)
1612 {
1613 new_to = op->to;
1614 break;
1615 }
1616
1617 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1618 disable it. */
1619 for (op = reg_eliminate;
1620 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1621 if (op->from == new_to && op->to == ep->to)
1622 op->can_eliminate = 0;
1623 }
1624 }
1625
1626 /* See if any registers that we thought we could eliminate the previous
1627 time are no longer eliminable. If so, something has changed and we
1628 must spill the register. Also, recompute the number of eliminable
1629 registers and see if the frame pointer is needed; it is if there is
1630 no elimination of the frame pointer that we can perform. */
1631
1632 frame_pointer_needed = 1;
1633 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1634 {
3ec2ea3e
DE
1635 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1636 && ep->to != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
1637 frame_pointer_needed = 0;
1638
1639 if (! ep->can_eliminate && ep->can_eliminate_previous)
1640 {
1641 ep->can_eliminate_previous = 0;
1642 spill_hard_reg (ep->from, global, dumpfile, 1);
32131a9c
RK
1643 something_changed = 1;
1644 num_eliminable--;
1645 }
1646 }
1647
9ff3516a
RK
1648#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1649 /* If we didn't need a frame pointer last time, but we do now, spill
1650 the hard frame pointer. */
1651 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1652 {
1653 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1654 something_changed = 1;
1655 }
1656#endif
1657
32131a9c
RK
1658 /* If all needs are met, we win. */
1659
1660 for (i = 0; i < N_REG_CLASSES; i++)
1661 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1662 break;
1663 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1664 break;
1665
546b63fb
RK
1666 /* Not all needs are met; must spill some hard regs. */
1667
1668 /* Put all registers spilled so far back in potential_reload_regs, but
1669 put them at the front, since we've already spilled most of the
9faa82d8 1670 pseudos in them (we might have left some pseudos unspilled if they
546b63fb
RK
1671 were in a block that didn't need any spill registers of a conflicting
1672 class. We used to try to mark off the need for those registers,
1673 but doing so properly is very complex and reallocating them is the
1674 simpler approach. First, "pack" potential_reload_regs by pushing
1675 any nonnegative entries towards the end. That will leave room
1676 for the registers we already spilled.
1677
1678 Also, undo the marking of the spill registers from the last time
1679 around in FORBIDDEN_REGS since we will be probably be allocating
1680 them again below.
1681
1682 ??? It is theoretically possible that we might end up not using one
1683 of our previously-spilled registers in this allocation, even though
1684 they are at the head of the list. It's not clear what to do about
1685 this, but it was no better before, when we marked off the needs met
1686 by the previously-spilled registers. With the current code, globals
1687 can be allocated into these registers, but locals cannot. */
1688
1689 if (n_spills)
1690 {
1691 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1692 if (potential_reload_regs[i] != -1)
1693 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1694
546b63fb
RK
1695 for (i = 0; i < n_spills; i++)
1696 {
1697 potential_reload_regs[i] = spill_regs[i];
1698 spill_reg_order[spill_regs[i]] = -1;
1699 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1700 }
32131a9c 1701
546b63fb
RK
1702 n_spills = 0;
1703 }
32131a9c
RK
1704
1705 /* Now find more reload regs to satisfy the remaining need
1706 Do it by ascending class number, since otherwise a reg
1707 might be spilled for a big class and might fail to count
1708 for a smaller class even though it belongs to that class.
1709
1710 Count spilled regs in `spills', and add entries to
1711 `spill_regs' and `spill_reg_order'.
1712
1713 ??? Note there is a problem here.
1714 When there is a need for a group in a high-numbered class,
1715 and also need for non-group regs that come from a lower class,
1716 the non-group regs are chosen first. If there aren't many regs,
1717 they might leave no room for a group.
1718
1719 This was happening on the 386. To fix it, we added the code
1720 that calls possible_group_p, so that the lower class won't
1721 break up the last possible group.
1722
1723 Really fixing the problem would require changes above
1724 in counting the regs already spilled, and in choose_reload_regs.
1725 It might be hard to avoid introducing bugs there. */
1726
546b63fb
RK
1727 CLEAR_HARD_REG_SET (counted_for_groups);
1728 CLEAR_HARD_REG_SET (counted_for_nongroups);
1729
32131a9c
RK
1730 for (class = 0; class < N_REG_CLASSES; class++)
1731 {
1732 /* First get the groups of registers.
1733 If we got single registers first, we might fragment
1734 possible groups. */
1735 while (max_groups[class] > 0)
1736 {
1737 /* If any single spilled regs happen to form groups,
1738 count them now. Maybe we don't really need
1739 to spill another group. */
066aca28
RK
1740 count_possible_groups (group_size, group_mode, max_groups,
1741 class);
32131a9c 1742
93193ab5
RK
1743 if (max_groups[class] <= 0)
1744 break;
1745
32131a9c
RK
1746 /* Groups of size 2 (the only groups used on most machines)
1747 are treated specially. */
1748 if (group_size[class] == 2)
1749 {
1750 /* First, look for a register that will complete a group. */
1751 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1752 {
32131a9c 1753 int other;
546b63fb
RK
1754
1755 j = potential_reload_regs[i];
32131a9c
RK
1756 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1757 &&
1758 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1759 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1760 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1761 && HARD_REGNO_MODE_OK (other, group_mode[class])
1762 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1763 other)
1764 /* We don't want one part of another group.
1765 We could get "two groups" that overlap! */
1766 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1767 ||
1768 (j < FIRST_PSEUDO_REGISTER - 1
1769 && (other = j + 1, spill_reg_order[other] >= 0)
1770 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1771 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1772 && HARD_REGNO_MODE_OK (j, group_mode[class])
1773 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1774 other)
1775 && ! TEST_HARD_REG_BIT (counted_for_groups,
1776 other))))
1777 {
1778 register enum reg_class *p;
1779
1780 /* We have found one that will complete a group,
1781 so count off one group as provided. */
1782 max_groups[class]--;
1783 p = reg_class_superclasses[class];
1784 while (*p != LIM_REG_CLASSES)
d601d5da
JW
1785 {
1786 if (group_size [(int) *p] <= group_size [class])
1787 max_groups[(int) *p]--;
1788 p++;
1789 }
32131a9c
RK
1790
1791 /* Indicate both these regs are part of a group. */
1792 SET_HARD_REG_BIT (counted_for_groups, j);
1793 SET_HARD_REG_BIT (counted_for_groups, other);
1794 break;
1795 }
1796 }
1797 /* We can't complete a group, so start one. */
92b0556d 1798 /* Look for a pair neither of which is explicitly used. */
f95182a4 1799 if (SMALL_REGISTER_CLASSES && i == FIRST_PSEUDO_REGISTER)
92b0556d
RS
1800 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1801 {
1802 int k;
1803 j = potential_reload_regs[i];
1804 /* Verify that J+1 is a potential reload reg. */
1805 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1806 if (potential_reload_regs[k] == j + 1)
1807 break;
1808 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1809 && k < FIRST_PSEUDO_REGISTER
1810 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1811 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1812 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1813 && HARD_REGNO_MODE_OK (j, group_mode[class])
1814 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1815 j + 1)
1816 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1817 /* Reject J at this stage
1818 if J+1 was explicitly used. */
1819 && ! regs_explicitly_used[j + 1])
1820 break;
1821 }
92b0556d
RS
1822 /* Now try any group at all
1823 whose registers are not in bad_spill_regs. */
32131a9c
RK
1824 if (i == FIRST_PSEUDO_REGISTER)
1825 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1826 {
57697575 1827 int k;
546b63fb 1828 j = potential_reload_regs[i];
57697575
RS
1829 /* Verify that J+1 is a potential reload reg. */
1830 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1831 if (potential_reload_regs[k] == j + 1)
1832 break;
32131a9c 1833 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1834 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1835 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1836 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1837 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1838 && HARD_REGNO_MODE_OK (j, group_mode[class])
1839 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1840 j + 1)
1841 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1842 break;
1843 }
1844
1845 /* I should be the index in potential_reload_regs
1846 of the new reload reg we have found. */
1847
5352b11a
RS
1848 if (i >= FIRST_PSEUDO_REGISTER)
1849 {
1850 /* There are no groups left to spill. */
1851 spill_failure (max_groups_insn[class]);
1852 failure = 1;
1853 goto failed;
1854 }
1855 else
1856 something_changed
fb3821f7 1857 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1858 global, dumpfile);
32131a9c
RK
1859 }
1860 else
1861 {
1862 /* For groups of more than 2 registers,
1863 look for a sufficient sequence of unspilled registers,
1864 and spill them all at once. */
1865 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1866 {
32131a9c 1867 int k;
546b63fb
RK
1868
1869 j = potential_reload_regs[i];
9d1a4667
RS
1870 if (j >= 0
1871 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1872 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1873 {
1874 /* Check each reg in the sequence. */
1875 for (k = 0; k < group_size[class]; k++)
1876 if (! (spill_reg_order[j + k] < 0
1877 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1878 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1879 break;
1880 /* We got a full sequence, so spill them all. */
1881 if (k == group_size[class])
1882 {
1883 register enum reg_class *p;
1884 for (k = 0; k < group_size[class]; k++)
1885 {
1886 int idx;
1887 SET_HARD_REG_BIT (counted_for_groups, j + k);
1888 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1889 if (potential_reload_regs[idx] == j + k)
1890 break;
9d1a4667
RS
1891 something_changed
1892 |= new_spill_reg (idx, class,
1893 max_needs, NULL_PTR,
1894 global, dumpfile);
32131a9c
RK
1895 }
1896
1897 /* We have found one that will complete a group,
1898 so count off one group as provided. */
1899 max_groups[class]--;
1900 p = reg_class_superclasses[class];
1901 while (*p != LIM_REG_CLASSES)
d601d5da
JW
1902 {
1903 if (group_size [(int) *p]
1904 <= group_size [class])
1905 max_groups[(int) *p]--;
1906 p++;
1907 }
32131a9c
RK
1908 break;
1909 }
1910 }
1911 }
fa52261e 1912 /* We couldn't find any registers for this reload.
9d1a4667
RS
1913 Avoid going into an infinite loop. */
1914 if (i >= FIRST_PSEUDO_REGISTER)
1915 {
1916 /* There are no groups left. */
1917 spill_failure (max_groups_insn[class]);
1918 failure = 1;
1919 goto failed;
1920 }
32131a9c
RK
1921 }
1922 }
1923
1924 /* Now similarly satisfy all need for single registers. */
1925
1926 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1927 {
9a6cde3a
RS
1928 /* If we spilled enough regs, but they weren't counted
1929 against the non-group need, see if we can count them now.
1930 If so, we can avoid some actual spilling. */
1931 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1932 for (i = 0; i < n_spills; i++)
1933 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1934 spill_regs[i])
1935 && !TEST_HARD_REG_BIT (counted_for_groups,
1936 spill_regs[i])
1937 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1938 spill_regs[i])
1939 && max_nongroups[class] > 0)
1940 {
1941 register enum reg_class *p;
1942
1943 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1944 max_nongroups[class]--;
1945 p = reg_class_superclasses[class];
1946 while (*p != LIM_REG_CLASSES)
1947 max_nongroups[(int) *p++]--;
1948 }
1949 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1950 break;
9a6cde3a 1951
32131a9c
RK
1952 /* Consider the potential reload regs that aren't
1953 yet in use as reload regs, in order of preference.
1954 Find the most preferred one that's in this class. */
1955
1956 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1957 if (potential_reload_regs[i] >= 0
1958 && TEST_HARD_REG_BIT (reg_class_contents[class],
1959 potential_reload_regs[i])
1960 /* If this reg will not be available for groups,
1961 pick one that does not foreclose possible groups.
1962 This is a kludge, and not very general,
1963 but it should be sufficient to make the 386 work,
1964 and the problem should not occur on machines with
1965 more registers. */
1966 && (max_nongroups[class] == 0
1967 || possible_group_p (potential_reload_regs[i], max_groups)))
1968 break;
1969
e404a39a
RK
1970 /* If we couldn't get a register, try to get one even if we
1971 might foreclose possible groups. This may cause problems
1972 later, but that's better than aborting now, since it is
1973 possible that we will, in fact, be able to form the needed
1974 group even with this allocation. */
1975
1976 if (i >= FIRST_PSEUDO_REGISTER
1977 && (asm_noperands (max_needs[class] > 0
1978 ? max_needs_insn[class]
1979 : max_nongroups_insn[class])
1980 < 0))
1981 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1982 if (potential_reload_regs[i] >= 0
1983 && TEST_HARD_REG_BIT (reg_class_contents[class],
1984 potential_reload_regs[i]))
1985 break;
1986
32131a9c
RK
1987 /* I should be the index in potential_reload_regs
1988 of the new reload reg we have found. */
1989
5352b11a
RS
1990 if (i >= FIRST_PSEUDO_REGISTER)
1991 {
1992 /* There are no possible registers left to spill. */
1993 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1994 : max_nongroups_insn[class]);
1995 failure = 1;
1996 goto failed;
1997 }
1998 else
1999 something_changed
2000 |= new_spill_reg (i, class, max_needs, max_nongroups,
2001 global, dumpfile);
32131a9c
RK
2002 }
2003 }
2004 }
2005
2006 /* If global-alloc was run, notify it of any register eliminations we have
2007 done. */
2008 if (global)
2009 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2010 if (ep->can_eliminate)
2011 mark_elimination (ep->from, ep->to);
2012
32131a9c 2013 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
2014 around calls. Tell if what mode to use so that we will process
2015 those insns in reload_as_needed if we have to. */
32131a9c
RK
2016
2017 if (caller_save_needed)
a8efe40d
RK
2018 save_call_clobbered_regs (num_eliminable ? QImode
2019 : caller_save_spill_class != NO_REGS ? HImode
2020 : VOIDmode);
32131a9c
RK
2021
2022 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
2023 If that insn didn't set the register (i.e., it copied the register to
2024 memory), just delete that insn instead of the equivalencing insn plus
2025 anything now dead. If we call delete_dead_insn on that insn, we may
2026 delete the insn that actually sets the register if the register die
2027 there and that is incorrect. */
2028
2029 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2030 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
2031 && GET_CODE (reg_equiv_init[i]) != NOTE)
2032 {
2033 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
2034 delete_dead_insn (reg_equiv_init[i]);
2035 else
2036 {
2037 PUT_CODE (reg_equiv_init[i], NOTE);
2038 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
2039 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
2040 }
2041 }
2042
2043 /* Use the reload registers where necessary
2044 by generating move instructions to move the must-be-register
2045 values into or out of the reload registers. */
2046
a8efe40d
RK
2047 if (something_needs_reloads || something_needs_elimination
2048 || (caller_save_needed && num_eliminable)
2049 || caller_save_spill_class != NO_REGS)
32131a9c
RK
2050 reload_as_needed (first, global);
2051
2a1f8b6b 2052 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 2053 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
2054 virtue of being in a pseudo, that pseudo will be marked live
2055 and hence the frame pointer will be known to be live via that
2056 pseudo. */
2057
2058 if (! frame_pointer_needed)
2059 for (i = 0; i < n_basic_blocks; i++)
8e08106d
MM
2060 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
2061 HARD_FRAME_POINTER_REGNUM);
2a1f8b6b 2062
5352b11a
RS
2063 /* Come here (with failure set nonzero) if we can't get enough spill regs
2064 and we decide not to abort about it. */
2065 failed:
2066
a3ec87a8
RS
2067 reload_in_progress = 0;
2068
32131a9c
RK
2069 /* Now eliminate all pseudo regs by modifying them into
2070 their equivalent memory references.
2071 The REG-rtx's for the pseudos are modified in place,
2072 so all insns that used to refer to them now refer to memory.
2073
2074 For a reg that has a reg_equiv_address, all those insns
2075 were changed by reloading so that no insns refer to it any longer;
2076 but the DECL_RTL of a variable decl may refer to it,
2077 and if so this causes the debugging info to mention the variable. */
2078
2079 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2080 {
2081 rtx addr = 0;
ab1fd483 2082 int in_struct = 0;
9ec36da5
JL
2083 int is_readonly = 0;
2084
2085 if (reg_equiv_memory_loc[i])
ab1fd483 2086 {
9ec36da5
JL
2087 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
2088 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
ab1fd483 2089 }
9ec36da5
JL
2090
2091 if (reg_equiv_mem[i])
2092 addr = XEXP (reg_equiv_mem[i], 0);
2093
32131a9c
RK
2094 if (reg_equiv_address[i])
2095 addr = reg_equiv_address[i];
9ec36da5 2096
32131a9c
RK
2097 if (addr)
2098 {
2099 if (reg_renumber[i] < 0)
2100 {
2101 rtx reg = regno_reg_rtx[i];
2102 XEXP (reg, 0) = addr;
2103 REG_USERVAR_P (reg) = 0;
9ec36da5 2104 RTX_UNCHANGING_P (reg) = is_readonly;
ab1fd483 2105 MEM_IN_STRUCT_P (reg) = in_struct;
41472af8
MM
2106 /* We have no alias information about this newly created
2107 MEM. */
2108 MEM_ALIAS_SET (reg) = 0;
32131a9c
RK
2109 PUT_CODE (reg, MEM);
2110 }
2111 else if (reg_equiv_mem[i])
2112 XEXP (reg_equiv_mem[i], 0) = addr;
2113 }
2114 }
2115
b60a8416
R
2116 /* Make a pass over all the insns and delete all USEs which we inserted
2117 only to tag a REG_EQUAL note on them; if PRESERVE_DEATH_INFO_REGNO_P
2118 is defined, also remove death notes for things that are no longer
2119 registers or no longer die in the insn (e.g., an input and output
2120 pseudo being tied). */
32131a9c
RK
2121
2122 for (insn = first; insn; insn = NEXT_INSN (insn))
2123 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2124 {
487a6e06 2125#ifdef PRESERVE_DEATH_INFO_REGNO_P
32131a9c 2126 rtx note, next;
487a6e06 2127#endif
32131a9c 2128
4d3eb414 2129 if (GET_CODE (PATTERN (insn)) == USE
b60a8416
R
2130 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
2131 {
2132 PUT_CODE (insn, NOTE);
2133 NOTE_SOURCE_FILE (insn) = 0;
2134 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2135 continue;
2136 }
2137#ifdef PRESERVE_DEATH_INFO_REGNO_P
32131a9c
RK
2138 for (note = REG_NOTES (insn); note; note = next)
2139 {
2140 next = XEXP (note, 1);
2141 if (REG_NOTE_KIND (note) == REG_DEAD
2142 && (GET_CODE (XEXP (note, 0)) != REG
2143 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2144 remove_note (insn, note);
2145 }
32131a9c 2146#endif
b60a8416 2147 }
32131a9c 2148
76e0d211
RK
2149 /* If we are doing stack checking, give a warning if this function's
2150 frame size is larger than we expect. */
2151 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
2152 {
2153 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
2154
2155 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2156 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
2157 size += UNITS_PER_WORD;
2158
2159 if (size > STACK_CHECK_MAX_FRAME_SIZE)
2160 warning ("frame size too large for reliable stack checking");
2161 }
2162
32131a9c
RK
2163 /* Indicate that we no longer have known memory locations or constants. */
2164 reg_equiv_constant = 0;
2165 reg_equiv_memory_loc = 0;
5352b11a 2166
a68d4b75
BK
2167 if (real_known_ptr)
2168 free (real_known_ptr);
2169 if (real_at_ptr)
2170 free (real_at_ptr);
2171
c8ab4464
RS
2172 if (scratch_list)
2173 free (scratch_list);
c307c237 2174 scratch_list = 0;
c8ab4464
RS
2175 if (scratch_block)
2176 free (scratch_block);
c307c237
RK
2177 scratch_block = 0;
2178
8b4f9969
JW
2179 CLEAR_HARD_REG_SET (used_spill_regs);
2180 for (i = 0; i < n_spills; i++)
2181 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2182
5352b11a 2183 return failure;
32131a9c
RK
2184}
2185\f
2186/* Nonzero if, after spilling reg REGNO for non-groups,
2187 it will still be possible to find a group if we still need one. */
2188
2189static int
2190possible_group_p (regno, max_groups)
2191 int regno;
2192 int *max_groups;
2193{
2194 int i;
2195 int class = (int) NO_REGS;
2196
2197 for (i = 0; i < (int) N_REG_CLASSES; i++)
2198 if (max_groups[i] > 0)
2199 {
2200 class = i;
2201 break;
2202 }
2203
2204 if (class == (int) NO_REGS)
2205 return 1;
2206
2207 /* Consider each pair of consecutive registers. */
2208 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2209 {
2210 /* Ignore pairs that include reg REGNO. */
2211 if (i == regno || i + 1 == regno)
2212 continue;
2213
2214 /* Ignore pairs that are outside the class that needs the group.
2215 ??? Here we fail to handle the case where two different classes
2216 independently need groups. But this never happens with our
2217 current machine descriptions. */
2218 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2219 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2220 continue;
2221
2222 /* A pair of consecutive regs we can still spill does the trick. */
2223 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2224 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2225 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2226 return 1;
2227
2228 /* A pair of one already spilled and one we can spill does it
2229 provided the one already spilled is not otherwise reserved. */
2230 if (spill_reg_order[i] < 0
2231 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2232 && spill_reg_order[i + 1] >= 0
2233 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2234 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2235 return 1;
2236 if (spill_reg_order[i + 1] < 0
2237 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2238 && spill_reg_order[i] >= 0
2239 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2240 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2241 return 1;
2242 }
2243
2244 return 0;
2245}
2246\f
066aca28
RK
2247/* Count any groups of CLASS that can be formed from the registers recently
2248 spilled. */
32131a9c
RK
2249
2250static void
066aca28 2251count_possible_groups (group_size, group_mode, max_groups, class)
546b63fb 2252 int *group_size;
32131a9c 2253 enum machine_mode *group_mode;
546b63fb 2254 int *max_groups;
066aca28 2255 int class;
32131a9c 2256{
066aca28
RK
2257 HARD_REG_SET new;
2258 int i, j;
2259
32131a9c
RK
2260 /* Now find all consecutive groups of spilled registers
2261 and mark each group off against the need for such groups.
2262 But don't count them against ordinary need, yet. */
2263
066aca28
RK
2264 if (group_size[class] == 0)
2265 return;
2266
2267 CLEAR_HARD_REG_SET (new);
2268
2269 /* Make a mask of all the regs that are spill regs in class I. */
2270 for (i = 0; i < n_spills; i++)
2271 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2272 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2273 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2274 SET_HARD_REG_BIT (new, spill_regs[i]);
2275
2276 /* Find each consecutive group of them. */
2277 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2278 if (TEST_HARD_REG_BIT (new, i)
2279 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2280 && HARD_REGNO_MODE_OK (i, group_mode[class]))
32131a9c 2281 {
066aca28
RK
2282 for (j = 1; j < group_size[class]; j++)
2283 if (! TEST_HARD_REG_BIT (new, i + j))
2284 break;
32131a9c 2285
066aca28
RK
2286 if (j == group_size[class])
2287 {
2288 /* We found a group. Mark it off against this class's need for
2289 groups, and against each superclass too. */
2290 register enum reg_class *p;
2291
2292 max_groups[class]--;
2293 p = reg_class_superclasses[class];
2294 while (*p != LIM_REG_CLASSES)
d601d5da
JW
2295 {
2296 if (group_size [(int) *p] <= group_size [class])
2297 max_groups[(int) *p]--;
2298 p++;
2299 }
066aca28
RK
2300
2301 /* Don't count these registers again. */
46a70e45 2302 for (j = 0; j < group_size[class]; j++)
066aca28
RK
2303 SET_HARD_REG_BIT (counted_for_groups, i + j);
2304 }
2305
2306 /* Skip to the last reg in this group. When i is incremented above,
2307 it will then point to the first reg of the next possible group. */
2308 i += j - 1;
2309 }
32131a9c
RK
2310}
2311\f
2312/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2313 another mode that needs to be reloaded for the same register class CLASS.
2314 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2315 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2316
2317 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2318 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2319 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2320 causes unnecessary failures on machines requiring alignment of register
2321 groups when the two modes are different sizes, because the larger mode has
2322 more strict alignment rules than the smaller mode. */
2323
2324static int
2325modes_equiv_for_class_p (allocate_mode, other_mode, class)
2326 enum machine_mode allocate_mode, other_mode;
2327 enum reg_class class;
2328{
2329 register int regno;
2330 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2331 {
2332 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2333 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2334 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2335 return 0;
2336 }
2337 return 1;
2338}
2339
5352b11a
RS
2340/* Handle the failure to find a register to spill.
2341 INSN should be one of the insns which needed this particular spill reg. */
2342
2343static void
2344spill_failure (insn)
2345 rtx insn;
2346{
2347 if (asm_noperands (PATTERN (insn)) >= 0)
2348 error_for_asm (insn, "`asm' needs too many reloads");
2349 else
a89b2cc4 2350 fatal_insn ("Unable to find a register to spill.", insn);
5352b11a
RS
2351}
2352
32131a9c
RK
2353/* Add a new register to the tables of available spill-registers
2354 (as well as spilling all pseudos allocated to the register).
2355 I is the index of this register in potential_reload_regs.
2356 CLASS is the regclass whose need is being satisfied.
2357 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2358 so that this register can count off against them.
2359 MAX_NONGROUPS is 0 if this register is part of a group.
2360 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2361
2362static int
2363new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2364 int i;
2365 int class;
2366 int *max_needs;
2367 int *max_nongroups;
2368 int global;
2369 FILE *dumpfile;
2370{
2371 register enum reg_class *p;
2372 int val;
2373 int regno = potential_reload_regs[i];
2374
2375 if (i >= FIRST_PSEUDO_REGISTER)
2376 abort (); /* Caller failed to find any register. */
2377
2378 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
da275344
MM
2379 {
2380 static char *reg_class_names[] = REG_CLASS_NAMES;
2381 fatal ("fixed or forbidden register %d (%s) was spilled for class %s.\n\
56f58d3a 2382This may be due to a compiler bug or to impossible asm\n\
da275344
MM
2383statements or clauses.", regno, reg_names[regno], reg_class_names[class]);
2384 }
32131a9c
RK
2385
2386 /* Make reg REGNO an additional reload reg. */
2387
2388 potential_reload_regs[i] = -1;
2389 spill_regs[n_spills] = regno;
2390 spill_reg_order[regno] = n_spills;
2391 if (dumpfile)
2392 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2393
2394 /* Clear off the needs we just satisfied. */
2395
2396 max_needs[class]--;
2397 p = reg_class_superclasses[class];
2398 while (*p != LIM_REG_CLASSES)
2399 max_needs[(int) *p++]--;
2400
2401 if (max_nongroups && max_nongroups[class] > 0)
2402 {
2403 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2404 max_nongroups[class]--;
2405 p = reg_class_superclasses[class];
2406 while (*p != LIM_REG_CLASSES)
2407 max_nongroups[(int) *p++]--;
2408 }
2409
2410 /* Spill every pseudo reg that was allocated to this reg
2411 or to something that overlaps this reg. */
2412
2413 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2414
2415 /* If there are some registers still to eliminate and this register
2416 wasn't ever used before, additional stack space may have to be
2417 allocated to store this register. Thus, we may have changed the offset
2418 between the stack and frame pointers, so mark that something has changed.
2419 (If new pseudos were spilled, thus requiring more space, VAL would have
2420 been set non-zero by the call to spill_hard_reg above since additional
2421 reloads may be needed in that case.
2422
2423 One might think that we need only set VAL to 1 if this is a call-used
2424 register. However, the set of registers that must be saved by the
2425 prologue is not identical to the call-used set. For example, the
2426 register used by the call insn for the return PC is a call-used register,
2427 but must be saved by the prologue. */
2428 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2429 val = 1;
2430
2431 regs_ever_live[spill_regs[n_spills]] = 1;
2432 n_spills++;
2433
2434 return val;
2435}
2436\f
2437/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2438 data that is dead in INSN. */
2439
2440static void
2441delete_dead_insn (insn)
2442 rtx insn;
2443{
2444 rtx prev = prev_real_insn (insn);
2445 rtx prev_dest;
2446
2447 /* If the previous insn sets a register that dies in our insn, delete it
2448 too. */
2449 if (prev && GET_CODE (PATTERN (prev)) == SET
2450 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2451 && reg_mentioned_p (prev_dest, PATTERN (insn))
b294ca38
R
2452 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2453 && ! side_effects_p (SET_SRC (PATTERN (prev))))
32131a9c
RK
2454 delete_dead_insn (prev);
2455
2456 PUT_CODE (insn, NOTE);
2457 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2458 NOTE_SOURCE_FILE (insn) = 0;
2459}
2460
2461/* Modify the home of pseudo-reg I.
2462 The new home is present in reg_renumber[I].
2463
2464 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2465 or it may be -1, meaning there is none or it is not relevant.
2466 This is used so that all pseudos spilled from a given hard reg
2467 can share one stack slot. */
2468
2469static void
2470alter_reg (i, from_reg)
2471 register int i;
2472 int from_reg;
2473{
2474 /* When outputting an inline function, this can happen
2475 for a reg that isn't actually used. */
2476 if (regno_reg_rtx[i] == 0)
2477 return;
2478
2479 /* If the reg got changed to a MEM at rtl-generation time,
2480 ignore it. */
2481 if (GET_CODE (regno_reg_rtx[i]) != REG)
2482 return;
2483
2484 /* Modify the reg-rtx to contain the new hard reg
2485 number or else to contain its pseudo reg number. */
2486 REGNO (regno_reg_rtx[i])
2487 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2488
2489 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2490 allocate a stack slot for it. */
2491
2492 if (reg_renumber[i] < 0
b1f21e0a 2493 && REG_N_REFS (i) > 0
32131a9c
RK
2494 && reg_equiv_constant[i] == 0
2495 && reg_equiv_memory_loc[i] == 0)
2496 {
2497 register rtx x;
2498 int inherent_size = PSEUDO_REGNO_BYTES (i);
2499 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2500 int adjust = 0;
2501
2502 /* Each pseudo reg has an inherent size which comes from its own mode,
2503 and a total size which provides room for paradoxical subregs
2504 which refer to the pseudo reg in wider modes.
2505
2506 We can use a slot already allocated if it provides both
2507 enough inherent space and enough total space.
2508 Otherwise, we allocate a new slot, making sure that it has no less
2509 inherent space, and no less total space, then the previous slot. */
2510 if (from_reg == -1)
2511 {
2512 /* No known place to spill from => no slot to reuse. */
cabcf079
ILT
2513 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2514 inherent_size == total_size ? 0 : -1);
f76b9db2 2515 if (BYTES_BIG_ENDIAN)
02db8dd0
RK
2516 /* Cancel the big-endian correction done in assign_stack_local.
2517 Get the address of the beginning of the slot.
2518 This is so we can do a big-endian correction unconditionally
2519 below. */
2520 adjust = inherent_size - total_size;
2521
2522 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
32131a9c
RK
2523 }
2524 /* Reuse a stack slot if possible. */
2525 else if (spill_stack_slot[from_reg] != 0
2526 && spill_stack_slot_width[from_reg] >= total_size
2527 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2528 >= inherent_size))
2529 x = spill_stack_slot[from_reg];
2530 /* Allocate a bigger slot. */
2531 else
2532 {
2533 /* Compute maximum size needed, both for inherent size
2534 and for total size. */
2535 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
4f2d3674 2536 rtx stack_slot;
32131a9c
RK
2537 if (spill_stack_slot[from_reg])
2538 {
2539 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2540 > inherent_size)
2541 mode = GET_MODE (spill_stack_slot[from_reg]);
2542 if (spill_stack_slot_width[from_reg] > total_size)
2543 total_size = spill_stack_slot_width[from_reg];
2544 }
2545 /* Make a slot with that size. */
cabcf079
ILT
2546 x = assign_stack_local (mode, total_size,
2547 inherent_size == total_size ? 0 : -1);
4f2d3674 2548 stack_slot = x;
f76b9db2
ILT
2549 if (BYTES_BIG_ENDIAN)
2550 {
2551 /* Cancel the big-endian correction done in assign_stack_local.
2552 Get the address of the beginning of the slot.
2553 This is so we can do a big-endian correction unconditionally
2554 below. */
2555 adjust = GET_MODE_SIZE (mode) - total_size;
4f2d3674 2556 if (adjust)
38a448ca
RH
2557 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2558 * BITS_PER_UNIT,
2559 MODE_INT, 1),
02db8dd0 2560 plus_constant (XEXP (x, 0), adjust));
f76b9db2 2561 }
4f2d3674 2562 spill_stack_slot[from_reg] = stack_slot;
32131a9c
RK
2563 spill_stack_slot_width[from_reg] = total_size;
2564 }
2565
32131a9c
RK
2566 /* On a big endian machine, the "address" of the slot
2567 is the address of the low part that fits its inherent mode. */
f76b9db2 2568 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
32131a9c 2569 adjust += (total_size - inherent_size);
32131a9c
RK
2570
2571 /* If we have any adjustment to make, or if the stack slot is the
2572 wrong mode, make a new stack slot. */
2573 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2574 {
38a448ca 2575 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
32131a9c 2576 plus_constant (XEXP (x, 0), adjust));
9ec36da5
JL
2577
2578 /* If this was shared among registers, must ensure we never
2579 set it readonly since that can cause scheduling
2580 problems. Note we would only have in this adjustment
2581 case in any event, since the code above doesn't set it. */
2582
2583 if (from_reg == -1)
2584 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
32131a9c
RK
2585 }
2586
2587 /* Save the stack slot for later. */
2588 reg_equiv_memory_loc[i] = x;
2589 }
2590}
2591
2592/* Mark the slots in regs_ever_live for the hard regs
2593 used by pseudo-reg number REGNO. */
2594
2595void
2596mark_home_live (regno)
2597 int regno;
2598{
2599 register int i, lim;
2600 i = reg_renumber[regno];
2601 if (i < 0)
2602 return;
2603 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2604 while (i < lim)
2605 regs_ever_live[i++] = 1;
2606}
c307c237
RK
2607
2608/* Mark the registers used in SCRATCH as being live. */
2609
2610static void
2611mark_scratch_live (scratch)
2612 rtx scratch;
2613{
2614 register int i;
2615 int regno = REGNO (scratch);
2616 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2617
2618 for (i = regno; i < lim; i++)
2619 regs_ever_live[i] = 1;
2620}
32131a9c
RK
2621\f
2622/* This function handles the tracking of elimination offsets around branches.
2623
2624 X is a piece of RTL being scanned.
2625
2626 INSN is the insn that it came from, if any.
2627
2628 INITIAL_P is non-zero if we are to set the offset to be the initial
2629 offset and zero if we are setting the offset of the label to be the
2630 current offset. */
2631
2632static void
2633set_label_offsets (x, insn, initial_p)
2634 rtx x;
2635 rtx insn;
2636 int initial_p;
2637{
2638 enum rtx_code code = GET_CODE (x);
2639 rtx tem;
2640 int i;
2641 struct elim_table *p;
2642
2643 switch (code)
2644 {
2645 case LABEL_REF:
8be386d9
RS
2646 if (LABEL_REF_NONLOCAL_P (x))
2647 return;
2648
32131a9c
RK
2649 x = XEXP (x, 0);
2650
0f41302f 2651 /* ... fall through ... */
32131a9c
RK
2652
2653 case CODE_LABEL:
2654 /* If we know nothing about this label, set the desired offsets. Note
2655 that this sets the offset at a label to be the offset before a label
2656 if we don't know anything about the label. This is not correct for
2657 the label after a BARRIER, but is the best guess we can make. If
2658 we guessed wrong, we will suppress an elimination that might have
2659 been possible had we been able to guess correctly. */
2660
2661 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2662 {
2663 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2664 offsets_at[CODE_LABEL_NUMBER (x)][i]
2665 = (initial_p ? reg_eliminate[i].initial_offset
2666 : reg_eliminate[i].offset);
2667 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2668 }
2669
2670 /* Otherwise, if this is the definition of a label and it is
d45cf215 2671 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2672 that label. */
2673
2674 else if (x == insn
2675 && (tem = prev_nonnote_insn (insn)) != 0
2676 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2677 {
2678 num_not_at_initial_offset = 0;
2679 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2680 {
2681 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2682 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2683 if (reg_eliminate[i].can_eliminate
2684 && (reg_eliminate[i].offset
2685 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2686 num_not_at_initial_offset++;
2687 }
2688 }
32131a9c
RK
2689
2690 else
2691 /* If neither of the above cases is true, compare each offset
2692 with those previously recorded and suppress any eliminations
2693 where the offsets disagree. */
a8fdc208 2694
32131a9c
RK
2695 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2696 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2697 != (initial_p ? reg_eliminate[i].initial_offset
2698 : reg_eliminate[i].offset))
2699 reg_eliminate[i].can_eliminate = 0;
2700
2701 return;
2702
2703 case JUMP_INSN:
2704 set_label_offsets (PATTERN (insn), insn, initial_p);
2705
0f41302f 2706 /* ... fall through ... */
32131a9c
RK
2707
2708 case INSN:
2709 case CALL_INSN:
2710 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2711 and hence must have all eliminations at their initial offsets. */
2712 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2713 if (REG_NOTE_KIND (tem) == REG_LABEL)
2714 set_label_offsets (XEXP (tem, 0), insn, 1);
2715 return;
2716
2717 case ADDR_VEC:
2718 case ADDR_DIFF_VEC:
2719 /* Each of the labels in the address vector must be at their initial
38e01259 2720 offsets. We want the first field for ADDR_VEC and the second
32131a9c
RK
2721 field for ADDR_DIFF_VEC. */
2722
2723 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2724 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2725 insn, initial_p);
2726 return;
2727
2728 case SET:
2729 /* We only care about setting PC. If the source is not RETURN,
2730 IF_THEN_ELSE, or a label, disable any eliminations not at
2731 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2732 isn't one of those possibilities. For branches to a label,
2733 call ourselves recursively.
2734
2735 Note that this can disable elimination unnecessarily when we have
2736 a non-local goto since it will look like a non-constant jump to
2737 someplace in the current function. This isn't a significant
2738 problem since such jumps will normally be when all elimination
2739 pairs are back to their initial offsets. */
2740
2741 if (SET_DEST (x) != pc_rtx)
2742 return;
2743
2744 switch (GET_CODE (SET_SRC (x)))
2745 {
2746 case PC:
2747 case RETURN:
2748 return;
2749
2750 case LABEL_REF:
2751 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2752 return;
2753
2754 case IF_THEN_ELSE:
2755 tem = XEXP (SET_SRC (x), 1);
2756 if (GET_CODE (tem) == LABEL_REF)
2757 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2758 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2759 break;
2760
2761 tem = XEXP (SET_SRC (x), 2);
2762 if (GET_CODE (tem) == LABEL_REF)
2763 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2764 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2765 break;
2766 return;
e9a25f70
JL
2767
2768 default:
2769 break;
32131a9c
RK
2770 }
2771
2772 /* If we reach here, all eliminations must be at their initial
2773 offset because we are doing a jump to a variable address. */
2774 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2775 if (p->offset != p->initial_offset)
2776 p->can_eliminate = 0;
e9a25f70
JL
2777 break;
2778
2779 default:
2780 break;
32131a9c
RK
2781 }
2782}
2783\f
2784/* Used for communication between the next two function to properly share
2785 the vector for an ASM_OPERANDS. */
2786
2787static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2788
a8fdc208 2789/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2790 replacement (such as sp), plus an offset.
2791
2792 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2793 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2794 MEM, we are allowed to replace a sum of a register and the constant zero
2795 with the register, which we cannot do outside a MEM. In addition, we need
2796 to record the fact that a register is referenced outside a MEM.
2797
ff32812a 2798 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2799 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2800 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
38e01259 2801 the REG is being modified.
32131a9c 2802
ff32812a
RS
2803 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2804 That's used when we eliminate in expressions stored in notes.
2805 This means, do not set ref_outside_mem even if the reference
2806 is outside of MEMs.
2807
32131a9c
RK
2808 If we see a modification to a register we know about, take the
2809 appropriate action (see case SET, below).
2810
2811 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2812 replacements done assuming all offsets are at their initial values. If
2813 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2814 encounter, return the actual location so that find_reloads will do
2815 the proper thing. */
2816
2817rtx
1914f5da 2818eliminate_regs (x, mem_mode, insn)
32131a9c
RK
2819 rtx x;
2820 enum machine_mode mem_mode;
2821 rtx insn;
2822{
2823 enum rtx_code code = GET_CODE (x);
2824 struct elim_table *ep;
2825 int regno;
2826 rtx new;
2827 int i, j;
2828 char *fmt;
2829 int copied = 0;
2830
2831 switch (code)
2832 {
2833 case CONST_INT:
2834 case CONST_DOUBLE:
2835 case CONST:
2836 case SYMBOL_REF:
2837 case CODE_LABEL:
2838 case PC:
2839 case CC0:
2840 case ASM_INPUT:
2841 case ADDR_VEC:
2842 case ADDR_DIFF_VEC:
2843 case RETURN:
2844 return x;
2845
e9a25f70
JL
2846 case ADDRESSOF:
2847 /* This is only for the benefit of the debugging backends, which call
2848 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2849 removed after CSE. */
1914f5da 2850 new = eliminate_regs (XEXP (x, 0), 0, insn);
e9a25f70
JL
2851 if (GET_CODE (new) == MEM)
2852 return XEXP (new, 0);
2853 return x;
2854
32131a9c
RK
2855 case REG:
2856 regno = REGNO (x);
2857
2858 /* First handle the case where we encounter a bare register that
2859 is eliminable. Replace it with a PLUS. */
2860 if (regno < FIRST_PSEUDO_REGISTER)
2861 {
2862 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2863 ep++)
2864 if (ep->from_rtx == x && ep->can_eliminate)
2865 {
ff32812a
RS
2866 if (! mem_mode
2867 /* Refs inside notes don't count for this purpose. */
fe089a90 2868 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2869 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2870 ep->ref_outside_mem = 1;
2871 return plus_constant (ep->to_rtx, ep->previous_offset);
2872 }
2873
2874 }
2875 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2876 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2877 {
2878 /* In this case, find_reloads would attempt to either use an
2879 incorrect address (if something is not at its initial offset)
2880 or substitute an replaced address into an insn (which loses
2881 if the offset is changed by some later action). So we simply
2882 return the replaced stack slot (assuming it is changed by
2883 elimination) and ignore the fact that this is actually a
2884 reference to the pseudo. Ensure we make a copy of the
2885 address in case it is shared. */
1914f5da 2886 new = eliminate_regs (reg_equiv_memory_loc[regno], mem_mode, insn);
32131a9c 2887 if (new != reg_equiv_memory_loc[regno])
208dffa5 2888 {
b60a8416
R
2889 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2890 && GET_CODE (insn) != INSN_LIST)
2891 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn))
2892 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
208dffa5
RS
2893 return copy_rtx (new);
2894 }
32131a9c
RK
2895 }
2896 return x;
2897
2898 case PLUS:
2899 /* If this is the sum of an eliminable register and a constant, rework
2900 the sum. */
2901 if (GET_CODE (XEXP (x, 0)) == REG
2902 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2903 && CONSTANT_P (XEXP (x, 1)))
2904 {
2905 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2906 ep++)
2907 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2908 {
e5687447
JW
2909 if (! mem_mode
2910 /* Refs inside notes don't count for this purpose. */
2911 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2912 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2913 ep->ref_outside_mem = 1;
2914
2915 /* The only time we want to replace a PLUS with a REG (this
2916 occurs when the constant operand of the PLUS is the negative
2917 of the offset) is when we are inside a MEM. We won't want
2918 to do so at other times because that would change the
2919 structure of the insn in a way that reload can't handle.
2920 We special-case the commonest situation in
2921 eliminate_regs_in_insn, so just replace a PLUS with a
2922 PLUS here, unless inside a MEM. */
a23b64d5 2923 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2924 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2925 return ep->to_rtx;
2926 else
38a448ca
RH
2927 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2928 plus_constant (XEXP (x, 1),
2929 ep->previous_offset));
32131a9c
RK
2930 }
2931
2932 /* If the register is not eliminable, we are done since the other
2933 operand is a constant. */
2934 return x;
2935 }
2936
2937 /* If this is part of an address, we want to bring any constant to the
2938 outermost PLUS. We will do this by doing register replacement in
2939 our operands and seeing if a constant shows up in one of them.
2940
2941 We assume here this is part of an address (or a "load address" insn)
2942 since an eliminable register is not likely to appear in any other
2943 context.
2944
2945 If we have (plus (eliminable) (reg)), we want to produce
930aeef3 2946 (plus (plus (replacement) (reg) (const))). If this was part of a
32131a9c
RK
2947 normal add insn, (plus (replacement) (reg)) will be pushed as a
2948 reload. This is the desired action. */
2949
2950 {
1914f5da
RH
2951 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2952 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2953
2954 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2955 {
2956 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2957 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2958 we must replace the constant here since it may no longer
2959 be in the position of any operand. */
2960 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2961 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2962 && reg_renumber[REGNO (new1)] < 0
2963 && reg_equiv_constant != 0
2964 && reg_equiv_constant[REGNO (new1)] != 0)
2965 new1 = reg_equiv_constant[REGNO (new1)];
2966 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2967 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2968 && reg_renumber[REGNO (new0)] < 0
2969 && reg_equiv_constant[REGNO (new0)] != 0)
2970 new0 = reg_equiv_constant[REGNO (new0)];
2971
2972 new = form_sum (new0, new1);
2973
2974 /* As above, if we are not inside a MEM we do not want to
2975 turn a PLUS into something else. We might try to do so here
2976 for an addition of 0 if we aren't optimizing. */
2977 if (! mem_mode && GET_CODE (new) != PLUS)
38a448ca 2978 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
32131a9c
RK
2979 else
2980 return new;
2981 }
2982 }
2983 return x;
2984
981c7390
RK
2985 case MULT:
2986 /* If this is the product of an eliminable register and a
2987 constant, apply the distribute law and move the constant out
2988 so that we have (plus (mult ..) ..). This is needed in order
9faa82d8 2989 to keep load-address insns valid. This case is pathological.
981c7390
RK
2990 We ignore the possibility of overflow here. */
2991 if (GET_CODE (XEXP (x, 0)) == REG
2992 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2993 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2994 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2995 ep++)
2996 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2997 {
2998 if (! mem_mode
2999 /* Refs inside notes don't count for this purpose. */
3000 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
3001 || GET_CODE (insn) == INSN_LIST)))
3002 ep->ref_outside_mem = 1;
3003
3004 return
38a448ca 3005 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
981c7390
RK
3006 ep->previous_offset * INTVAL (XEXP (x, 1)));
3007 }
32131a9c 3008
0f41302f 3009 /* ... fall through ... */
32131a9c 3010
32131a9c
RK
3011 case CALL:
3012 case COMPARE:
930aeef3 3013 case MINUS:
32131a9c
RK
3014 case DIV: case UDIV:
3015 case MOD: case UMOD:
3016 case AND: case IOR: case XOR:
45620ed4
RK
3017 case ROTATERT: case ROTATE:
3018 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
3019 case NE: case EQ:
3020 case GE: case GT: case GEU: case GTU:
3021 case LE: case LT: case LEU: case LTU:
3022 {
1914f5da 3023 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 3024 rtx new1
1914f5da 3025 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
3026
3027 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
38a448ca 3028 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
32131a9c
RK
3029 }
3030 return x;
3031
981c7390
RK
3032 case EXPR_LIST:
3033 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
3034 if (XEXP (x, 0))
3035 {
1914f5da 3036 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
981c7390 3037 if (new != XEXP (x, 0))
38a448ca 3038 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
981c7390
RK
3039 }
3040
0f41302f 3041 /* ... fall through ... */
981c7390
RK
3042
3043 case INSN_LIST:
3044 /* Now do eliminations in the rest of the chain. If this was
3045 an EXPR_LIST, this might result in allocating more memory than is
3046 strictly needed, but it simplifies the code. */
3047 if (XEXP (x, 1))
3048 {
1914f5da 3049 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
981c7390 3050 if (new != XEXP (x, 1))
38a448ca 3051 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
981c7390
RK
3052 }
3053 return x;
3054
32131a9c
RK
3055 case PRE_INC:
3056 case POST_INC:
3057 case PRE_DEC:
3058 case POST_DEC:
3059 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3060 if (ep->to_rtx == XEXP (x, 0))
3061 {
4c05b187
RK
3062 int size = GET_MODE_SIZE (mem_mode);
3063
3064 /* If more bytes than MEM_MODE are pushed, account for them. */
3065#ifdef PUSH_ROUNDING
3066 if (ep->to_rtx == stack_pointer_rtx)
3067 size = PUSH_ROUNDING (size);
3068#endif
32131a9c 3069 if (code == PRE_DEC || code == POST_DEC)
4c05b187 3070 ep->offset += size;
32131a9c 3071 else
4c05b187 3072 ep->offset -= size;
32131a9c
RK
3073 }
3074
3075 /* Fall through to generic unary operation case. */
32131a9c
RK
3076 case STRICT_LOW_PART:
3077 case NEG: case NOT:
3078 case SIGN_EXTEND: case ZERO_EXTEND:
3079 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3080 case FLOAT: case FIX:
3081 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3082 case ABS:
3083 case SQRT:
3084 case FFS:
1914f5da 3085 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c 3086 if (new != XEXP (x, 0))
38a448ca 3087 return gen_rtx_fmt_e (code, GET_MODE (x), new);
32131a9c
RK
3088 return x;
3089
3090 case SUBREG:
3091 /* Similar to above processing, but preserve SUBREG_WORD.
3092 Convert (subreg (mem)) to (mem) if not paradoxical.
3093 Also, if we have a non-paradoxical (subreg (pseudo)) and the
3094 pseudo didn't get a hard reg, we must replace this with the
3095 eliminated version of the memory location because push_reloads
3096 may do the replacement in certain circumstances. */
3097 if (GET_CODE (SUBREG_REG (x)) == REG
3098 && (GET_MODE_SIZE (GET_MODE (x))
3099 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3100 && reg_equiv_memory_loc != 0
3101 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
3102 {
3103 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
1914f5da 3104 mem_mode, insn);
32131a9c
RK
3105
3106 /* If we didn't change anything, we must retain the pseudo. */
3107 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
59e2c378 3108 new = SUBREG_REG (x);
32131a9c 3109 else
59e2c378 3110 {
59e2c378
RK
3111 /* In this case, we must show that the pseudo is used in this
3112 insn so that delete_output_reload will do the right thing. */
3113 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3114 && GET_CODE (insn) != INSN_LIST)
b60a8416
R
3115 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode,
3116 SUBREG_REG (x)),
3117 insn))
3118 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
3119
3120 /* Ensure NEW isn't shared in case we have to reload it. */
3121 new = copy_rtx (new);
59e2c378 3122 }
32131a9c
RK
3123 }
3124 else
1914f5da 3125 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
3126
3127 if (new != XEXP (x, 0))
3128 {
29ae5012
RK
3129 int x_size = GET_MODE_SIZE (GET_MODE (x));
3130 int new_size = GET_MODE_SIZE (GET_MODE (new));
3131
1914f5da 3132 if (GET_CODE (new) == MEM
6d49a073 3133 && ((x_size < new_size
1914f5da 3134#ifdef WORD_REGISTER_OPERATIONS
6d49a073
JW
3135 /* On these machines, combine can create rtl of the form
3136 (set (subreg:m1 (reg:m2 R) 0) ...)
3137 where m1 < m2, and expects something interesting to
3138 happen to the entire word. Moreover, it will use the
3139 (reg:m2 R) later, expecting all bits to be preserved.
3140 So if the number of words is the same, preserve the
3141 subreg so that push_reloads can see it. */
3142 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
1914f5da 3143#endif
6d49a073
JW
3144 )
3145 || (x_size == new_size))
1914f5da 3146 )
32131a9c
RK
3147 {
3148 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3149 enum machine_mode mode = GET_MODE (x);
3150
f76b9db2
ILT
3151 if (BYTES_BIG_ENDIAN)
3152 offset += (MIN (UNITS_PER_WORD,
3153 GET_MODE_SIZE (GET_MODE (new)))
3154 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
32131a9c
RK
3155
3156 PUT_MODE (new, mode);
3157 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3158 return new;
3159 }
3160 else
38a448ca 3161 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
32131a9c
RK
3162 }
3163
3164 return x;
3165
94714ecc
RK
3166 case USE:
3167 /* If using a register that is the source of an eliminate we still
3168 think can be performed, note it cannot be performed since we don't
3169 know how this register is used. */
3170 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3171 if (ep->from_rtx == XEXP (x, 0))
3172 ep->can_eliminate = 0;
3173
1914f5da 3174 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
94714ecc 3175 if (new != XEXP (x, 0))
38a448ca 3176 return gen_rtx_fmt_e (code, GET_MODE (x), new);
94714ecc
RK
3177 return x;
3178
32131a9c
RK
3179 case CLOBBER:
3180 /* If clobbering a register that is the replacement register for an
d45cf215 3181 elimination we still think can be performed, note that it cannot
32131a9c
RK
3182 be performed. Otherwise, we need not be concerned about it. */
3183 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3184 if (ep->to_rtx == XEXP (x, 0))
3185 ep->can_eliminate = 0;
3186
1914f5da 3187 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c 3188 if (new != XEXP (x, 0))
38a448ca 3189 return gen_rtx_fmt_e (code, GET_MODE (x), new);
32131a9c
RK
3190 return x;
3191
3192 case ASM_OPERANDS:
3193 {
3194 rtx *temp_vec;
3195 /* Properly handle sharing input and constraint vectors. */
3196 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3197 {
3198 /* When we come to a new vector not seen before,
3199 scan all its elements; keep the old vector if none
3200 of them changes; otherwise, make a copy. */
3201 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3202 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3203 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3204 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
1914f5da 3205 mem_mode, insn);
32131a9c
RK
3206
3207 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3208 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3209 break;
3210
3211 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3212 new_asm_operands_vec = old_asm_operands_vec;
3213 else
3214 new_asm_operands_vec
3215 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3216 }
3217
3218 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3219 if (new_asm_operands_vec == old_asm_operands_vec)
3220 return x;
3221
38a448ca
RH
3222 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3223 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3224 ASM_OPERANDS_OUTPUT_IDX (x),
3225 new_asm_operands_vec,
3226 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3227 ASM_OPERANDS_SOURCE_FILE (x),
3228 ASM_OPERANDS_SOURCE_LINE (x));
32131a9c
RK
3229 new->volatil = x->volatil;
3230 return new;
3231 }
3232
3233 case SET:
3234 /* Check for setting a register that we know about. */
3235 if (GET_CODE (SET_DEST (x)) == REG)
3236 {
3237 /* See if this is setting the replacement register for an
a8fdc208 3238 elimination.
32131a9c 3239
3ec2ea3e
DE
3240 If DEST is the hard frame pointer, we do nothing because we
3241 assume that all assignments to the frame pointer are for
3242 non-local gotos and are being done at a time when they are valid
3243 and do not disturb anything else. Some machines want to
3244 eliminate a fake argument pointer (or even a fake frame pointer)
3245 with either the real frame or the stack pointer. Assignments to
3246 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
3247
3248 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3249 ep++)
3250 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 3251 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 3252 {
6dc42e49 3253 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
3254 this elimination can't be done. */
3255 rtx src = SET_SRC (x);
3256
3257 if (GET_CODE (src) == PLUS
3258 && XEXP (src, 0) == SET_DEST (x)
3259 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3260 ep->offset -= INTVAL (XEXP (src, 1));
3261 else
3262 ep->can_eliminate = 0;
3263 }
3264
3265 /* Now check to see we are assigning to a register that can be
3266 eliminated. If so, it must be as part of a PARALLEL, since we
3267 will not have been called if this is a single SET. So indicate
3268 that we can no longer eliminate this reg. */
3269 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3270 ep++)
3271 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3272 ep->can_eliminate = 0;
3273 }
3274
3275 /* Now avoid the loop below in this common case. */
3276 {
1914f5da
RH
3277 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3278 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3279
ff32812a 3280 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3281 write a CLOBBER insn. */
3282 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3283 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3284 && GET_CODE (insn) != INSN_LIST)
38a448ca 3285 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
32131a9c
RK
3286
3287 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
38a448ca 3288 return gen_rtx_SET (VOIDmode, new0, new1);
32131a9c
RK
3289 }
3290
3291 return x;
3292
3293 case MEM:
e9a25f70
JL
3294 /* This is only for the benefit of the debugging backends, which call
3295 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3296 removed after CSE. */
3297 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
1914f5da 3298 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
e9a25f70 3299
32131a9c
RK
3300 /* Our only special processing is to pass the mode of the MEM to our
3301 recursive call and copy the flags. While we are here, handle this
3302 case more efficiently. */
1914f5da 3303 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3304 if (new != XEXP (x, 0))
3305 {
38a448ca 3306 new = gen_rtx_MEM (GET_MODE (x), new);
32131a9c
RK
3307 new->volatil = x->volatil;
3308 new->unchanging = x->unchanging;
3309 new->in_struct = x->in_struct;
3310 return new;
3311 }
3312 else
3313 return x;
e9a25f70
JL
3314
3315 default:
3316 break;
32131a9c
RK
3317 }
3318
3319 /* Process each of our operands recursively. If any have changed, make a
3320 copy of the rtx. */
3321 fmt = GET_RTX_FORMAT (code);
3322 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3323 {
3324 if (*fmt == 'e')
3325 {
1914f5da 3326 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3327 if (new != XEXP (x, i) && ! copied)
3328 {
3329 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3330 bcopy ((char *) x, (char *) new_x,
3331 (sizeof (*new_x) - sizeof (new_x->fld)
3332 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
32131a9c
RK
3333 x = new_x;
3334 copied = 1;
3335 }
3336 XEXP (x, i) = new;
3337 }
3338 else if (*fmt == 'E')
3339 {
3340 int copied_vec = 0;
3341 for (j = 0; j < XVECLEN (x, i); j++)
3342 {
1914f5da 3343 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
32131a9c
RK
3344 if (new != XVECEXP (x, i, j) && ! copied_vec)
3345 {
27108369
RK
3346 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3347 XVEC (x, i)->elem);
32131a9c
RK
3348 if (! copied)
3349 {
3350 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3351 bcopy ((char *) x, (char *) new_x,
3352 (sizeof (*new_x) - sizeof (new_x->fld)
3353 + (sizeof (new_x->fld[0])
3354 * GET_RTX_LENGTH (code))));
32131a9c
RK
3355 x = new_x;
3356 copied = 1;
3357 }
3358 XVEC (x, i) = new_v;
3359 copied_vec = 1;
3360 }
3361 XVECEXP (x, i, j) = new;
3362 }
3363 }
3364 }
3365
3366 return x;
3367}
3368\f
3369/* Scan INSN and eliminate all eliminable registers in it.
3370
3371 If REPLACE is nonzero, do the replacement destructively. Also
3372 delete the insn as dead it if it is setting an eliminable register.
3373
3374 If REPLACE is zero, do all our allocations in reload_obstack.
3375
3376 If no eliminations were done and this insn doesn't require any elimination
3377 processing (these are not identical conditions: it might be updating sp,
3378 but not referencing fp; this needs to be seen during reload_as_needed so
3379 that the offset between fp and sp can be taken into consideration), zero
3380 is returned. Otherwise, 1 is returned. */
3381
3382static int
3383eliminate_regs_in_insn (insn, replace)
3384 rtx insn;
3385 int replace;
3386{
3387 rtx old_body = PATTERN (insn);
774672d2 3388 rtx old_set = single_set (insn);
32131a9c
RK
3389 rtx new_body;
3390 int val = 0;
3391 struct elim_table *ep;
3392
3393 if (! replace)
3394 push_obstacks (&reload_obstack, &reload_obstack);
3395
774672d2
RK
3396 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3397 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
3398 {
3399 /* Check for setting an eliminable register. */
3400 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
774672d2 3401 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
32131a9c 3402 {
dd1eab0a
RK
3403#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3404 /* If this is setting the frame pointer register to the
3405 hardware frame pointer register and this is an elimination
3406 that will be done (tested above), this insn is really
3407 adjusting the frame pointer downward to compensate for
3408 the adjustment done before a nonlocal goto. */
3409 if (ep->from == FRAME_POINTER_REGNUM
3410 && ep->to == HARD_FRAME_POINTER_REGNUM)
3411 {
3412 rtx src = SET_SRC (old_set);
3413 int offset, ok = 0;
8026ebba 3414 rtx prev_insn, prev_set;
dd1eab0a
RK
3415
3416 if (src == ep->to_rtx)
3417 offset = 0, ok = 1;
3418 else if (GET_CODE (src) == PLUS
3419 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3420 offset = INTVAL (XEXP (src, 0)), ok = 1;
8026ebba
ILT
3421 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3422 && (prev_set = single_set (prev_insn)) != 0
3423 && rtx_equal_p (SET_DEST (prev_set), src))
3424 {
3425 src = SET_SRC (prev_set);
3426 if (src == ep->to_rtx)
3427 offset = 0, ok = 1;
3428 else if (GET_CODE (src) == PLUS
3429 && GET_CODE (XEXP (src, 0)) == CONST_INT
3430 && XEXP (src, 1) == ep->to_rtx)
3431 offset = INTVAL (XEXP (src, 0)), ok = 1;
3432 else if (GET_CODE (src) == PLUS
3433 && GET_CODE (XEXP (src, 1)) == CONST_INT
3434 && XEXP (src, 0) == ep->to_rtx)
3435 offset = INTVAL (XEXP (src, 1)), ok = 1;
3436 }
dd1eab0a
RK
3437
3438 if (ok)
3439 {
3440 if (replace)
3441 {
3442 rtx src
3443 = plus_constant (ep->to_rtx, offset - ep->offset);
3444
3445 /* First see if this insn remains valid when we
3446 make the change. If not, keep the INSN_CODE
3447 the same and let reload fit it up. */
3448 validate_change (insn, &SET_SRC (old_set), src, 1);
3449 validate_change (insn, &SET_DEST (old_set),
3450 ep->to_rtx, 1);
3451 if (! apply_change_group ())
3452 {
3453 SET_SRC (old_set) = src;
3454 SET_DEST (old_set) = ep->to_rtx;
3455 }
3456 }
3457
3458 val = 1;
3459 goto done;
3460 }
3461 }
3462#endif
3463
32131a9c
RK
3464 /* In this case this insn isn't serving a useful purpose. We
3465 will delete it in reload_as_needed once we know that this
3466 elimination is, in fact, being done.
3467
abc95ed3 3468 If REPLACE isn't set, we can't delete this insn, but needn't
32131a9c
RK
3469 process it since it won't be used unless something changes. */
3470 if (replace)
3471 delete_dead_insn (insn);
3472 val = 1;
3473 goto done;
3474 }
3475
3476 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3477 in the insn is the negative of the offset in FROM. Substitute
3478 (set (reg) (reg to)) for the insn and change its code.
3479
3480 We have to do this here, rather than in eliminate_regs, do that we can
3481 change the insn code. */
3482
774672d2
RK
3483 if (GET_CODE (SET_SRC (old_set)) == PLUS
3484 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3485 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
32131a9c
RK
3486 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3487 ep++)
774672d2 3488 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
922d9d40 3489 && ep->can_eliminate)
32131a9c 3490 {
922d9d40
RK
3491 /* We must stop at the first elimination that will be used.
3492 If this one would replace the PLUS with a REG, do it
3493 now. Otherwise, quit the loop and let eliminate_regs
3494 do its normal replacement. */
774672d2 3495 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
922d9d40 3496 {
774672d2
RK
3497 /* We assume here that we don't need a PARALLEL of
3498 any CLOBBERs for this assignment. There's not
3499 much we can do if we do need it. */
38a448ca
RH
3500 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3501 SET_DEST (old_set),
3502 ep->to_rtx);
922d9d40
RK
3503 INSN_CODE (insn) = -1;
3504 val = 1;
3505 goto done;
3506 }
3507
3508 break;
32131a9c
RK
3509 }
3510 }
3511
3512 old_asm_operands_vec = 0;
3513
3514 /* Replace the body of this insn with a substituted form. If we changed
05b4c365 3515 something, return non-zero.
32131a9c
RK
3516
3517 If we are replacing a body that was a (set X (plus Y Z)), try to
3518 re-recognize the insn. We do this in case we had a simple addition
3519 but now can do this as a load-address. This saves an insn in this
0f41302f 3520 common case. */
32131a9c 3521
1914f5da 3522 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3523 if (new_body != old_body)
3524 {
7c791b13
RK
3525 /* If we aren't replacing things permanently and we changed something,
3526 make another copy to ensure that all the RTL is new. Otherwise
3527 things can go wrong if find_reload swaps commutative operands
0f41302f 3528 and one is inside RTL that has been copied while the other is not. */
7c791b13 3529
4d411872
RS
3530 /* Don't copy an asm_operands because (1) there's no need and (2)
3531 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3532 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3533 new_body = copy_rtx (new_body);
3534
774672d2
RK
3535 /* If we had a move insn but now we don't, rerecognize it. This will
3536 cause spurious re-recognition if the old move had a PARALLEL since
3537 the new one still will, but we can't call single_set without
3538 having put NEW_BODY into the insn and the re-recognition won't
3539 hurt in this rare case. */
3540 if (old_set != 0
3541 && ((GET_CODE (SET_SRC (old_set)) == REG
3542 && (GET_CODE (new_body) != SET
3543 || GET_CODE (SET_SRC (new_body)) != REG))
3544 /* If this was a load from or store to memory, compare
3545 the MEM in recog_operand to the one in the insn. If they
3546 are not equal, then rerecognize the insn. */
3547 || (old_set != 0
3548 && ((GET_CODE (SET_SRC (old_set)) == MEM
3549 && SET_SRC (old_set) != recog_operand[1])
3550 || (GET_CODE (SET_DEST (old_set)) == MEM
3551 && SET_DEST (old_set) != recog_operand[0])))
3552 /* If this was an add insn before, rerecognize. */
3553 || GET_CODE (SET_SRC (old_set)) == PLUS))
4a5d0fb5
RS
3554 {
3555 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3556 /* If recognition fails, store the new body anyway.
3557 It's normal to have recognition failures here
3558 due to bizarre memory addresses; reloading will fix them. */
3559 PATTERN (insn) = new_body;
4a5d0fb5 3560 }
0ba846c7 3561 else
32131a9c
RK
3562 PATTERN (insn) = new_body;
3563
32131a9c
RK
3564 val = 1;
3565 }
a8fdc208 3566
32131a9c
RK
3567 /* Loop through all elimination pairs. See if any have changed and
3568 recalculate the number not at initial offset.
3569
a8efe40d
RK
3570 Compute the maximum offset (minimum offset if the stack does not
3571 grow downward) for each elimination pair.
3572
32131a9c
RK
3573 We also detect a cases where register elimination cannot be done,
3574 namely, if a register would be both changed and referenced outside a MEM
3575 in the resulting insn since such an insn is often undefined and, even if
3576 not, we cannot know what meaning will be given to it. Note that it is
3577 valid to have a register used in an address in an insn that changes it
3578 (presumably with a pre- or post-increment or decrement).
3579
3580 If anything changes, return nonzero. */
3581
3582 num_not_at_initial_offset = 0;
3583 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3584 {
3585 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3586 ep->can_eliminate = 0;
3587
3588 ep->ref_outside_mem = 0;
3589
3590 if (ep->previous_offset != ep->offset)
3591 val = 1;
3592
3593 ep->previous_offset = ep->offset;
3594 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3595 num_not_at_initial_offset++;
a8efe40d
RK
3596
3597#ifdef STACK_GROWS_DOWNWARD
3598 ep->max_offset = MAX (ep->max_offset, ep->offset);
3599#else
3600 ep->max_offset = MIN (ep->max_offset, ep->offset);
3601#endif
32131a9c
RK
3602 }
3603
3604 done:
9faa82d8 3605 /* If we changed something, perform elimination in REG_NOTES. This is
05b4c365
RK
3606 needed even when REPLACE is zero because a REG_DEAD note might refer
3607 to a register that we eliminate and could cause a different number
3608 of spill registers to be needed in the final reload pass than in
3609 the pre-passes. */
20748cab 3610 if (val && REG_NOTES (insn) != 0)
1914f5da 3611 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3612
32131a9c
RK
3613 if (! replace)
3614 pop_obstacks ();
3615
3616 return val;
3617}
3618
3619/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3620 replacement we currently believe is valid, mark it as not eliminable if X
3621 modifies DEST in any way other than by adding a constant integer to it.
3622
3623 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3624 all assignments to the hard frame pointer are nonlocal gotos and are being
3625 done at a time when they are valid and do not disturb anything else.
32131a9c 3626 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3627 frame or stack pointer. Assignments to the hard frame pointer must not
3628 prevent this elimination.
32131a9c
RK
3629
3630 Called via note_stores from reload before starting its passes to scan
3631 the insns of the function. */
3632
3633static void
3634mark_not_eliminable (dest, x)
3635 rtx dest;
3636 rtx x;
3637{
3638 register int i;
3639
3640 /* A SUBREG of a hard register here is just changing its mode. We should
3641 not see a SUBREG of an eliminable hard register, but check just in
3642 case. */
3643 if (GET_CODE (dest) == SUBREG)
3644 dest = SUBREG_REG (dest);
3645
3ec2ea3e 3646 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3647 return;
3648
3649 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3650 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3651 && (GET_CODE (x) != SET
3652 || GET_CODE (SET_SRC (x)) != PLUS
3653 || XEXP (SET_SRC (x), 0) != dest
3654 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3655 {
3656 reg_eliminate[i].can_eliminate_previous
3657 = reg_eliminate[i].can_eliminate = 0;
3658 num_eliminable--;
3659 }
3660}
3661\f
3662/* Kick all pseudos out of hard register REGNO.
3663 If GLOBAL is nonzero, try to find someplace else to put them.
3664 If DUMPFILE is nonzero, log actions taken on that file.
3665
3666 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3667 because we found we can't eliminate some register. In the case, no pseudos
3668 are allowed to be in the register, even if they are only in a block that
3669 doesn't require spill registers, unlike the case when we are spilling this
3670 hard reg to produce another spill register.
3671
3672 Return nonzero if any pseudos needed to be kicked out. */
3673
3674static int
3675spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3676 register int regno;
3677 int global;
3678 FILE *dumpfile;
3679 int cant_eliminate;
3680{
c307c237 3681 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3682 int something_changed = 0;
3683 register int i;
3684
3685 SET_HARD_REG_BIT (forbidden_regs, regno);
3686
9ff3516a
RK
3687 if (cant_eliminate)
3688 regs_ever_live[regno] = 1;
3689
32131a9c
RK
3690 /* Spill every pseudo reg that was allocated to this reg
3691 or to something that overlaps this reg. */
3692
3693 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3694 if (reg_renumber[i] >= 0
3695 && reg_renumber[i] <= regno
a8fdc208 3696 && (reg_renumber[i]
32131a9c
RK
3697 + HARD_REGNO_NREGS (reg_renumber[i],
3698 PSEUDO_REGNO_MODE (i))
3699 > regno))
3700 {
32131a9c
RK
3701 /* If this register belongs solely to a basic block which needed no
3702 spilling of any class that this register is contained in,
3703 leave it be, unless we are spilling this register because
3704 it was a hard register that can't be eliminated. */
3705
3706 if (! cant_eliminate
3707 && basic_block_needs[0]
b1f21e0a
MM
3708 && REG_BASIC_BLOCK (i) >= 0
3709 && basic_block_needs[(int) class][REG_BASIC_BLOCK (i)] == 0)
32131a9c
RK
3710 {
3711 enum reg_class *p;
3712
3713 for (p = reg_class_superclasses[(int) class];
3714 *p != LIM_REG_CLASSES; p++)
b1f21e0a 3715 if (basic_block_needs[(int) *p][REG_BASIC_BLOCK (i)] > 0)
32131a9c 3716 break;
a8fdc208 3717
32131a9c
RK
3718 if (*p == LIM_REG_CLASSES)
3719 continue;
3720 }
3721
3722 /* Mark it as no longer having a hard register home. */
3723 reg_renumber[i] = -1;
3724 /* We will need to scan everything again. */
3725 something_changed = 1;
3726 if (global)
2c5d9e37 3727 retry_global_alloc (i, forbidden_regs);
32131a9c
RK
3728
3729 alter_reg (i, regno);
3730 if (dumpfile)
3731 {
3732 if (reg_renumber[i] == -1)
3733 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3734 else
3735 fprintf (dumpfile, " Register %d now in %d.\n\n",
3736 i, reg_renumber[i]);
3737 }
3738 }
c307c237
RK
3739 for (i = 0; i < scratch_list_length; i++)
3740 {
4fdf79cb
CM
3741 if (scratch_list[i]
3742 && regno >= REGNO (scratch_list[i])
3743 && regno < REGNO (scratch_list[i])
3744 + HARD_REGNO_NREGS (REGNO (scratch_list[i]),
3745 GET_MODE (scratch_list[i])))
c307c237
RK
3746 {
3747 if (! cant_eliminate && basic_block_needs[0]
3748 && ! basic_block_needs[(int) class][scratch_block[i]])
3749 {
3750 enum reg_class *p;
3751
3752 for (p = reg_class_superclasses[(int) class];
3753 *p != LIM_REG_CLASSES; p++)
3754 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3755 break;
3756
3757 if (*p == LIM_REG_CLASSES)
3758 continue;
3759 }
3760 PUT_CODE (scratch_list[i], SCRATCH);
3761 scratch_list[i] = 0;
3762 something_changed = 1;
3763 continue;
3764 }
3765 }
32131a9c
RK
3766
3767 return something_changed;
3768}
3769\f
56f58d3a
RK
3770/* Find all paradoxical subregs within X and update reg_max_ref_width.
3771 Also mark any hard registers used to store user variables as
3772 forbidden from being used for spill registers. */
32131a9c
RK
3773
3774static void
3775scan_paradoxical_subregs (x)
3776 register rtx x;
3777{
3778 register int i;
3779 register char *fmt;
3780 register enum rtx_code code = GET_CODE (x);
3781
3782 switch (code)
3783 {
56f58d3a 3784 case REG:
e9a25f70 3785 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
f95182a4 3786 && REG_USERVAR_P (x))
56f58d3a 3787 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
56f58d3a
RK
3788 return;
3789
32131a9c
RK
3790 case CONST_INT:
3791 case CONST:
3792 case SYMBOL_REF:
3793 case LABEL_REF:
3794 case CONST_DOUBLE:
3795 case CC0:
3796 case PC:
32131a9c
RK
3797 case USE:
3798 case CLOBBER:
3799 return;
3800
3801 case SUBREG:
3802 if (GET_CODE (SUBREG_REG (x)) == REG
3803 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3804 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3805 = GET_MODE_SIZE (GET_MODE (x));
3806 return;
e9a25f70
JL
3807
3808 default:
3809 break;
32131a9c
RK
3810 }
3811
3812 fmt = GET_RTX_FORMAT (code);
3813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3814 {
3815 if (fmt[i] == 'e')
3816 scan_paradoxical_subregs (XEXP (x, i));
3817 else if (fmt[i] == 'E')
3818 {
3819 register int j;
3820 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3821 scan_paradoxical_subregs (XVECEXP (x, i, j));
3822 }
3823 }
3824}
3825\f
32131a9c 3826static int
788a0818
RK
3827hard_reg_use_compare (p1p, p2p)
3828 const GENERIC_PTR p1p;
3829 const GENERIC_PTR p2p;
32131a9c 3830{
788a0818
RK
3831 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p,
3832 *p2 = (struct hard_reg_n_uses *)p2p;
32131a9c
RK
3833 int tem = p1->uses - p2->uses;
3834 if (tem != 0) return tem;
3835 /* If regs are equally good, sort by regno,
3836 so that the results of qsort leave nothing to chance. */
3837 return p1->regno - p2->regno;
3838}
3839
3840/* Choose the order to consider regs for use as reload registers
3841 based on how much trouble would be caused by spilling one.
3842 Store them in order of decreasing preference in potential_reload_regs. */
3843
3844static void
2c5d9e37
RK
3845order_regs_for_reload (global)
3846 int global;
32131a9c
RK
3847{
3848 register int i;
3849 register int o = 0;
3850 int large = 0;
3851
3852 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3853
3854 CLEAR_HARD_REG_SET (bad_spill_regs);
3855
3856 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3857 potential_reload_regs[i] = -1;
3858
3859 /* Count number of uses of each hard reg by pseudo regs allocated to it
3860 and then order them by decreasing use. */
3861
3862 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3863 {
3864 hard_reg_n_uses[i].uses = 0;
3865 hard_reg_n_uses[i].regno = i;
3866 }
3867
3868 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3869 {
3870 int regno = reg_renumber[i];
3871 if (regno >= 0)
3872 {
3873 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3874 while (regno < lim)
2c5d9e37
RK
3875 {
3876 /* If allocated by local-alloc, show more uses since
3877 we're not going to be able to reallocate it, but
3878 we might if allocated by global alloc. */
3879 if (global && reg_allocno[i] < 0)
b1f21e0a 3880 hard_reg_n_uses[regno].uses += (REG_N_REFS (i) + 1) / 2;
2c5d9e37 3881
b1f21e0a 3882 hard_reg_n_uses[regno++].uses += REG_N_REFS (i);
2c5d9e37 3883 }
32131a9c 3884 }
b1f21e0a 3885 large += REG_N_REFS (i);
32131a9c
RK
3886 }
3887
3888 /* Now fixed registers (which cannot safely be used for reloading)
3889 get a very high use count so they will be considered least desirable.
3890 Registers used explicitly in the rtl code are almost as bad. */
3891
3892 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3893 {
3894 if (fixed_regs[i])
3895 {
3896 hard_reg_n_uses[i].uses += 2 * large + 2;
3897 SET_HARD_REG_BIT (bad_spill_regs, i);
3898 }
3899 else if (regs_explicitly_used[i])
3900 {
3901 hard_reg_n_uses[i].uses += large + 1;
f95182a4 3902 if (! SMALL_REGISTER_CLASSES)
e9a25f70
JL
3903 /* ??? We are doing this here because of the potential
3904 that bad code may be generated if a register explicitly
3905 used in an insn was used as a spill register for that
3906 insn. But not using these are spill registers may lose
3907 on some machine. We'll have to see how this works out. */
f95182a4 3908 SET_HARD_REG_BIT (bad_spill_regs, i);
32131a9c
RK
3909 }
3910 }
3ec2ea3e
DE
3911 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3912 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
32131a9c
RK
3913
3914#ifdef ELIMINABLE_REGS
3915 /* If registers other than the frame pointer are eliminable, mark them as
3916 poor choices. */
3917 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3918 {
3919 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3920 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3921 }
3922#endif
3923
3924 /* Prefer registers not so far used, for use in temporary loading.
3925 Among them, if REG_ALLOC_ORDER is defined, use that order.
3926 Otherwise, prefer registers not preserved by calls. */
3927
3928#ifdef REG_ALLOC_ORDER
3929 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3930 {
3931 int regno = reg_alloc_order[i];
3932
3933 if (hard_reg_n_uses[regno].uses == 0)
3934 potential_reload_regs[o++] = regno;
3935 }
3936#else
3937 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3938 {
3939 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3940 potential_reload_regs[o++] = i;
3941 }
3942 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3943 {
3944 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3945 potential_reload_regs[o++] = i;
3946 }
3947#endif
3948
3949 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3950 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3951
3952 /* Now add the regs that are already used,
3953 preferring those used less often. The fixed and otherwise forbidden
3954 registers will be at the end of this list. */
3955
3956 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3957 if (hard_reg_n_uses[i].uses != 0)
3958 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3959}
3960\f
a5339699 3961/* Used in reload_as_needed to sort the spilled regs. */
2f23a46d 3962
a5339699 3963static int
788a0818
RK
3964compare_spill_regs (r1p, r2p)
3965 const GENERIC_PTR r1p;
3966 const GENERIC_PTR r2p;
a5339699 3967{
788a0818
RK
3968 short r1 = *(short *)r1p, r2 = *(short *)r2p;
3969 return r1 - r2;
a5339699
RK
3970}
3971
32131a9c
RK
3972/* Reload pseudo-registers into hard regs around each insn as needed.
3973 Additional register load insns are output before the insn that needs it
3974 and perhaps store insns after insns that modify the reloaded pseudo reg.
3975
3976 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3977 which registers are already available in reload registers.
32131a9c
RK
3978 We update these for the reloads that we perform,
3979 as the insns are scanned. */
3980
3981static void
3982reload_as_needed (first, live_known)
3983 rtx first;
3984 int live_known;
3985{
3986 register rtx insn;
3987 register int i;
3988 int this_block = 0;
3989 rtx x;
3990 rtx after_call = 0;
3991
4c9a05bc
RK
3992 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3993 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
32131a9c 3994 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 3995 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
32131a9c 3996 reg_has_output_reload = (char *) alloca (max_regno);
e6e52be0 3997 CLEAR_HARD_REG_SET (reg_reloaded_valid);
32131a9c
RK
3998
3999 /* Reset all offsets on eliminable registers to their initial values. */
4000#ifdef ELIMINABLE_REGS
4001 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4002 {
4003 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 4004 reg_eliminate[i].initial_offset);
32131a9c
RK
4005 reg_eliminate[i].previous_offset
4006 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
4007 }
4008#else
4009 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
4010 reg_eliminate[0].previous_offset
4011 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
4012#endif
4013
4014 num_not_at_initial_offset = 0;
4015
a5339699
RK
4016 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
4017 pack registers with group needs. */
4018 if (n_spills > 1)
5f40cc2d
RK
4019 {
4020 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
4021 for (i = 0; i < n_spills; i++)
4022 spill_reg_order[spill_regs[i]] = i;
4023 }
a5339699 4024
32131a9c
RK
4025 for (insn = first; insn;)
4026 {
4027 register rtx next = NEXT_INSN (insn);
4028
4029 /* Notice when we move to a new basic block. */
aa2c50d6 4030 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
4031 && insn == basic_block_head[this_block+1])
4032 ++this_block;
4033
4034 /* If we pass a label, copy the offsets from the label information
4035 into the current offsets of each elimination. */
4036 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
4037 {
4038 num_not_at_initial_offset = 0;
4039 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
4040 {
4041 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
4042 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
4043 if (reg_eliminate[i].can_eliminate
4044 && (reg_eliminate[i].offset
4045 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
4046 num_not_at_initial_offset++;
4047 }
4048 }
32131a9c
RK
4049
4050 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4051 {
4052 rtx avoid_return_reg = 0;
0639444f 4053 rtx oldpat = PATTERN (insn);
32131a9c 4054
32131a9c
RK
4055 /* Set avoid_return_reg if this is an insn
4056 that might use the value of a function call. */
f95182a4 4057 if (SMALL_REGISTER_CLASSES && GET_CODE (insn) == CALL_INSN)
32131a9c
RK
4058 {
4059 if (GET_CODE (PATTERN (insn)) == SET)
4060 after_call = SET_DEST (PATTERN (insn));
4061 else if (GET_CODE (PATTERN (insn)) == PARALLEL
4062 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4063 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
4064 else
4065 after_call = 0;
4066 }
e9a25f70 4067 else if (SMALL_REGISTER_CLASSES && after_call != 0
32131a9c 4068 && !(GET_CODE (PATTERN (insn)) == SET
b60a8416
R
4069 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)
4070 && GET_CODE (PATTERN (insn)) != USE)
32131a9c 4071 {
2b979c57 4072 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
4073 avoid_return_reg = after_call;
4074 after_call = 0;
4075 }
32131a9c 4076
2758481d
RS
4077 /* If this is a USE and CLOBBER of a MEM, ensure that any
4078 references to eliminable registers have been removed. */
4079
4080 if ((GET_CODE (PATTERN (insn)) == USE
4081 || GET_CODE (PATTERN (insn)) == CLOBBER)
4082 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4083 XEXP (XEXP (PATTERN (insn), 0), 0)
4084 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
29ae5012 4085 GET_MODE (XEXP (PATTERN (insn), 0)),
1914f5da 4086 NULL_RTX);
2758481d 4087
32131a9c
RK
4088 /* If we need to do register elimination processing, do so.
4089 This might delete the insn, in which case we are done. */
4090 if (num_eliminable && GET_MODE (insn) == QImode)
4091 {
4092 eliminate_regs_in_insn (insn, 1);
4093 if (GET_CODE (insn) == NOTE)
4094 {
4095 insn = next;
4096 continue;
4097 }
4098 }
4099
4100 if (GET_MODE (insn) == VOIDmode)
4101 n_reloads = 0;
4102 /* First find the pseudo regs that must be reloaded for this insn.
4103 This info is returned in the tables reload_... (see reload.h).
4104 Also modify the body of INSN by substituting RELOAD
4105 rtx's for those pseudo regs. */
4106 else
4107 {
4108 bzero (reg_has_output_reload, max_regno);
4109 CLEAR_HARD_REG_SET (reg_is_output_reload);
4110
4111 find_reloads (insn, 1, spill_indirect_levels, live_known,
4112 spill_reg_order);
4113 }
4114
4115 if (n_reloads > 0)
4116 {
3c3eeea6
RK
4117 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
4118 rtx p;
32131a9c
RK
4119 int class;
4120
4121 /* If this block has not had spilling done for a
546b63fb
RK
4122 particular clas and we have any non-optionals that need a
4123 spill reg in that class, abort. */
32131a9c
RK
4124
4125 for (class = 0; class < N_REG_CLASSES; class++)
4126 if (basic_block_needs[class] != 0
4127 && basic_block_needs[class][this_block] == 0)
4128 for (i = 0; i < n_reloads; i++)
546b63fb
RK
4129 if (class == (int) reload_reg_class[i]
4130 && reload_reg_rtx[i] == 0
4131 && ! reload_optional[i]
4132 && (reload_in[i] != 0 || reload_out[i] != 0
4133 || reload_secondary_p[i] != 0))
a89b2cc4 4134 fatal_insn ("Non-optional registers need a spill register", insn);
32131a9c
RK
4135
4136 /* Now compute which reload regs to reload them into. Perhaps
4137 reusing reload regs from previous insns, or else output
4138 load insns to reload them. Maybe output store insns too.
4139 Record the choices of reload reg in reload_reg_rtx. */
4140 choose_reload_regs (insn, avoid_return_reg);
4141
546b63fb
RK
4142 /* Merge any reloads that we didn't combine for fear of
4143 increasing the number of spill registers needed but now
4144 discover can be safely merged. */
f95182a4
ILT
4145 if (SMALL_REGISTER_CLASSES)
4146 merge_assigned_reloads (insn);
546b63fb 4147
32131a9c
RK
4148 /* Generate the insns to reload operands into or out of
4149 their reload regs. */
4150 emit_reload_insns (insn);
4151
4152 /* Substitute the chosen reload regs from reload_reg_rtx
4153 into the insn's body (or perhaps into the bodies of other
4154 load and store insn that we just made for reloading
4155 and that we moved the structure into). */
4156 subst_reloads ();
3c3eeea6
RK
4157
4158 /* If this was an ASM, make sure that all the reload insns
4159 we have generated are valid. If not, give an error
4160 and delete them. */
4161
4162 if (asm_noperands (PATTERN (insn)) >= 0)
4163 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4164 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4165 && (recog_memoized (p) < 0
4166 || (insn_extract (p),
4167 ! constrain_operands (INSN_CODE (p), 1))))
4168 {
4169 error_for_asm (insn,
4170 "`asm' operand requires impossible reload");
4171 PUT_CODE (p, NOTE);
4172 NOTE_SOURCE_FILE (p) = 0;
4173 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4174 }
32131a9c
RK
4175 }
4176 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4177 is no longer validly lying around to save a future reload.
4178 Note that this does not detect pseudos that were reloaded
4179 for this insn in order to be stored in
4180 (obeying register constraints). That is correct; such reload
4181 registers ARE still valid. */
0639444f 4182 note_stores (oldpat, forget_old_reloads_1);
32131a9c
RK
4183
4184 /* There may have been CLOBBER insns placed after INSN. So scan
4185 between INSN and NEXT and use them to forget old reloads. */
4186 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
4187 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4188 note_stores (PATTERN (x), forget_old_reloads_1);
4189
4190#ifdef AUTO_INC_DEC
4191 /* Likewise for regs altered by auto-increment in this insn.
4192 But note that the reg-notes are not changed by reloading:
4193 they still contain the pseudo-regs, not the spill regs. */
4194 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4195 if (REG_NOTE_KIND (x) == REG_INC)
4196 {
4197 /* See if this pseudo reg was reloaded in this insn.
4198 If so, its last-reload info is still valid
4199 because it is based on this insn's reload. */
4200 for (i = 0; i < n_reloads; i++)
4201 if (reload_out[i] == XEXP (x, 0))
4202 break;
4203
08fb99fa 4204 if (i == n_reloads)
9a881562 4205 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
4206 }
4207#endif
4208 }
4209 /* A reload reg's contents are unknown after a label. */
4210 if (GET_CODE (insn) == CODE_LABEL)
e6e52be0 4211 CLEAR_HARD_REG_SET (reg_reloaded_valid);
32131a9c
RK
4212
4213 /* Don't assume a reload reg is still good after a call insn
4214 if it is a call-used reg. */
546b63fb 4215 else if (GET_CODE (insn) == CALL_INSN)
e6e52be0 4216 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
32131a9c
RK
4217
4218 /* In case registers overlap, allow certain insns to invalidate
4219 particular hard registers. */
4220
4221#ifdef INSN_CLOBBERS_REGNO_P
e6e52be0
R
4222 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4223 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4224 && INSN_CLOBBERS_REGNO_P (insn, i))
4225 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
32131a9c
RK
4226#endif
4227
4228 insn = next;
4229
4230#ifdef USE_C_ALLOCA
4231 alloca (0);
4232#endif
4233 }
4234}
4235
4236/* Discard all record of any value reloaded from X,
4237 or reloaded in X from someplace else;
4238 unless X is an output reload reg of the current insn.
4239
4240 X may be a hard reg (the reload reg)
4241 or it may be a pseudo reg that was reloaded from. */
4242
4243static void
9a881562 4244forget_old_reloads_1 (x, ignored)
32131a9c 4245 rtx x;
487a6e06 4246 rtx ignored ATTRIBUTE_UNUSED;
32131a9c
RK
4247{
4248 register int regno;
4249 int nr;
0a2e51a9
RS
4250 int offset = 0;
4251
4252 /* note_stores does give us subregs of hard regs. */
4253 while (GET_CODE (x) == SUBREG)
4254 {
4255 offset += SUBREG_WORD (x);
4256 x = SUBREG_REG (x);
4257 }
32131a9c
RK
4258
4259 if (GET_CODE (x) != REG)
4260 return;
4261
0a2e51a9 4262 regno = REGNO (x) + offset;
32131a9c
RK
4263
4264 if (regno >= FIRST_PSEUDO_REGISTER)
4265 nr = 1;
4266 else
4267 {
4268 int i;
4269 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4270 /* Storing into a spilled-reg invalidates its contents.
4271 This can happen if a block-local pseudo is allocated to that reg
4272 and it wasn't spilled because this block's total need is 0.
4273 Then some insn might have an optional reload and use this reg. */
4274 for (i = 0; i < nr; i++)
e6e52be0
R
4275 /* But don't do this if the reg actually serves as an output
4276 reload reg in the current instruction. */
4277 if (n_reloads == 0
4278 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4279 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
32131a9c
RK
4280 }
4281
4282 /* Since value of X has changed,
4283 forget any value previously copied from it. */
4284
4285 while (nr-- > 0)
4286 /* But don't forget a copy if this is the output reload
4287 that establishes the copy's validity. */
4288 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4289 reg_last_reload_reg[regno + nr] = 0;
4290}
4291\f
4292/* For each reload, the mode of the reload register. */
4293static enum machine_mode reload_mode[MAX_RELOADS];
4294
4295/* For each reload, the largest number of registers it will require. */
4296static int reload_nregs[MAX_RELOADS];
4297
4298/* Comparison function for qsort to decide which of two reloads
4299 should be handled first. *P1 and *P2 are the reload numbers. */
4300
4301static int
788a0818
RK
4302reload_reg_class_lower (r1p, r2p)
4303 const GENERIC_PTR r1p;
4304 const GENERIC_PTR r2p;
32131a9c 4305{
788a0818 4306 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
32131a9c 4307 register int t;
a8fdc208 4308
32131a9c
RK
4309 /* Consider required reloads before optional ones. */
4310 t = reload_optional[r1] - reload_optional[r2];
4311 if (t != 0)
4312 return t;
4313
4314 /* Count all solitary classes before non-solitary ones. */
4315 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4316 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4317 if (t != 0)
4318 return t;
4319
4320 /* Aside from solitaires, consider all multi-reg groups first. */
4321 t = reload_nregs[r2] - reload_nregs[r1];
4322 if (t != 0)
4323 return t;
4324
4325 /* Consider reloads in order of increasing reg-class number. */
4326 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4327 if (t != 0)
4328 return t;
4329
4330 /* If reloads are equally urgent, sort by reload number,
4331 so that the results of qsort leave nothing to chance. */
4332 return r1 - r2;
4333}
4334\f
4335/* The following HARD_REG_SETs indicate when each hard register is
4336 used for a reload of various parts of the current insn. */
4337
4338/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4339static HARD_REG_SET reload_reg_used;
546b63fb
RK
4340/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4341static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
47c8cf91
ILT
4342/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4343static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
4344/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4345static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
47c8cf91
ILT
4346/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4347static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
4348/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4349static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4350/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4351static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
4352/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4353static HARD_REG_SET reload_reg_used_in_op_addr;
893bc853
RK
4354/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4355static HARD_REG_SET reload_reg_used_in_op_addr_reload;
546b63fb
RK
4356/* If reg is in use for a RELOAD_FOR_INSN reload. */
4357static HARD_REG_SET reload_reg_used_in_insn;
4358/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4359static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
4360
4361/* If reg is in use as a reload reg for any sort of reload. */
4362static HARD_REG_SET reload_reg_used_at_all;
4363
be7ae2a4
RK
4364/* If reg is use as an inherited reload. We just mark the first register
4365 in the group. */
4366static HARD_REG_SET reload_reg_used_for_inherit;
4367
546b63fb
RK
4368/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4369 TYPE. MODE is used to indicate how many consecutive regs are
4370 actually used. */
32131a9c
RK
4371
4372static void
546b63fb 4373mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 4374 int regno;
546b63fb
RK
4375 int opnum;
4376 enum reload_type type;
32131a9c
RK
4377 enum machine_mode mode;
4378{
4379 int nregs = HARD_REGNO_NREGS (regno, mode);
4380 int i;
4381
4382 for (i = regno; i < nregs + regno; i++)
4383 {
546b63fb 4384 switch (type)
32131a9c
RK
4385 {
4386 case RELOAD_OTHER:
4387 SET_HARD_REG_BIT (reload_reg_used, i);
4388 break;
4389
546b63fb
RK
4390 case RELOAD_FOR_INPUT_ADDRESS:
4391 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4392 break;
4393
47c8cf91
ILT
4394 case RELOAD_FOR_INPADDR_ADDRESS:
4395 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4396 break;
4397
546b63fb
RK
4398 case RELOAD_FOR_OUTPUT_ADDRESS:
4399 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4400 break;
4401
47c8cf91
ILT
4402 case RELOAD_FOR_OUTADDR_ADDRESS:
4403 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4404 break;
4405
32131a9c
RK
4406 case RELOAD_FOR_OPERAND_ADDRESS:
4407 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4408 break;
4409
893bc853
RK
4410 case RELOAD_FOR_OPADDR_ADDR:
4411 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4412 break;
4413
546b63fb
RK
4414 case RELOAD_FOR_OTHER_ADDRESS:
4415 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4416 break;
4417
32131a9c 4418 case RELOAD_FOR_INPUT:
546b63fb 4419 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4420 break;
4421
4422 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4423 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4424 break;
4425
4426 case RELOAD_FOR_INSN:
4427 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4428 break;
4429 }
4430
4431 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4432 }
4433}
4434
be7ae2a4
RK
4435/* Similarly, but show REGNO is no longer in use for a reload. */
4436
4437static void
4438clear_reload_reg_in_use (regno, opnum, type, mode)
4439 int regno;
4440 int opnum;
4441 enum reload_type type;
4442 enum machine_mode mode;
4443{
4444 int nregs = HARD_REGNO_NREGS (regno, mode);
4445 int i;
4446
4447 for (i = regno; i < nregs + regno; i++)
4448 {
4449 switch (type)
4450 {
4451 case RELOAD_OTHER:
4452 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4453 break;
4454
4455 case RELOAD_FOR_INPUT_ADDRESS:
4456 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4457 break;
4458
47c8cf91
ILT
4459 case RELOAD_FOR_INPADDR_ADDRESS:
4460 CLEAR_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4461 break;
4462
be7ae2a4
RK
4463 case RELOAD_FOR_OUTPUT_ADDRESS:
4464 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4465 break;
4466
47c8cf91
ILT
4467 case RELOAD_FOR_OUTADDR_ADDRESS:
4468 CLEAR_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4469 break;
4470
be7ae2a4
RK
4471 case RELOAD_FOR_OPERAND_ADDRESS:
4472 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4473 break;
4474
893bc853
RK
4475 case RELOAD_FOR_OPADDR_ADDR:
4476 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4477 break;
4478
be7ae2a4
RK
4479 case RELOAD_FOR_OTHER_ADDRESS:
4480 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4481 break;
4482
4483 case RELOAD_FOR_INPUT:
4484 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4485 break;
4486
4487 case RELOAD_FOR_OUTPUT:
4488 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4489 break;
4490
4491 case RELOAD_FOR_INSN:
4492 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4493 break;
4494 }
4495 }
4496}
4497
32131a9c 4498/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4499 specified by OPNUM and TYPE. */
32131a9c
RK
4500
4501static int
546b63fb 4502reload_reg_free_p (regno, opnum, type)
32131a9c 4503 int regno;
546b63fb
RK
4504 int opnum;
4505 enum reload_type type;
32131a9c 4506{
546b63fb
RK
4507 int i;
4508
2edc8d65
RK
4509 /* In use for a RELOAD_OTHER means it's not available for anything. */
4510 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4511 return 0;
546b63fb
RK
4512
4513 switch (type)
32131a9c
RK
4514 {
4515 case RELOAD_OTHER:
2edc8d65
RK
4516 /* In use for anything means we can't use it for RELOAD_OTHER. */
4517 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
224f1d71
RK
4518 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4519 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4520 return 0;
4521
4522 for (i = 0; i < reload_n_operands; i++)
4523 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4524 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
224f1d71 4525 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4526 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
224f1d71
RK
4527 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4528 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4529 return 0;
4530
4531 return 1;
32131a9c 4532
32131a9c 4533 case RELOAD_FOR_INPUT:
546b63fb
RK
4534 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4535 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4536 return 0;
4537
893bc853
RK
4538 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4539 return 0;
4540
546b63fb
RK
4541 /* If it is used for some other input, can't use it. */
4542 for (i = 0; i < reload_n_operands; i++)
4543 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4544 return 0;
4545
4546 /* If it is used in a later operand's address, can't use it. */
4547 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
4548 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4549 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
4550 return 0;
4551
4552 return 1;
4553
4554 case RELOAD_FOR_INPUT_ADDRESS:
4555 /* Can't use a register if it is used for an input address for this
4556 operand or used as an input in an earlier one. */
47c8cf91
ILT
4557 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4558 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4559 return 0;
4560
4561 for (i = 0; i < opnum; i++)
4562 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4563 return 0;
4564
4565 return 1;
4566
4567 case RELOAD_FOR_INPADDR_ADDRESS:
4568 /* Can't use a register if it is used for an input address
38e01259 4569 for this operand or used as an input in an earlier
47c8cf91
ILT
4570 one. */
4571 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
546b63fb
RK
4572 return 0;
4573
4574 for (i = 0; i < opnum; i++)
4575 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4576 return 0;
4577
4578 return 1;
4579
4580 case RELOAD_FOR_OUTPUT_ADDRESS:
4581 /* Can't use a register if it is used for an output address for this
4582 operand or used as an output in this or a later operand. */
4583 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4584 return 0;
4585
4586 for (i = opnum; i < reload_n_operands; i++)
4587 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4588 return 0;
4589
4590 return 1;
4591
47c8cf91
ILT
4592 case RELOAD_FOR_OUTADDR_ADDRESS:
4593 /* Can't use a register if it is used for an output address
38e01259 4594 for this operand or used as an output in this or a
47c8cf91
ILT
4595 later operand. */
4596 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4597 return 0;
4598
4599 for (i = opnum; i < reload_n_operands; i++)
4600 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4601 return 0;
4602
4603 return 1;
4604
32131a9c 4605 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4606 for (i = 0; i < reload_n_operands; i++)
4607 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4608 return 0;
4609
4610 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4611 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4612
893bc853
RK
4613 case RELOAD_FOR_OPADDR_ADDR:
4614 for (i = 0; i < reload_n_operands; i++)
4615 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4616 return 0;
4617
a94ce333 4618 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
893bc853 4619
32131a9c 4620 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4621 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4622 outputs, or an operand address for this or an earlier output. */
4623 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4624 return 0;
4625
4626 for (i = 0; i < reload_n_operands; i++)
4627 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4628 return 0;
4629
4630 for (i = 0; i <= opnum; i++)
47c8cf91
ILT
4631 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4632 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
546b63fb
RK
4633 return 0;
4634
4635 return 1;
4636
4637 case RELOAD_FOR_INSN:
4638 for (i = 0; i < reload_n_operands; i++)
4639 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4640 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4641 return 0;
4642
4643 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4644 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4645
4646 case RELOAD_FOR_OTHER_ADDRESS:
4647 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4648 }
4649 abort ();
4650}
4651
4652/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4653 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4654 is not in use for a reload in any prior part of the insn.
4655
4656 We can assume that the reload reg was already tested for availability
4657 at the time it is needed, and we should not check this again,
4658 in case the reg has already been marked in use. */
4659
4660static int
546b63fb 4661reload_reg_free_before_p (regno, opnum, type)
32131a9c 4662 int regno;
546b63fb
RK
4663 int opnum;
4664 enum reload_type type;
32131a9c 4665{
546b63fb
RK
4666 int i;
4667
4668 switch (type)
32131a9c 4669 {
546b63fb
RK
4670 case RELOAD_FOR_OTHER_ADDRESS:
4671 /* These always come first. */
32131a9c
RK
4672 return 1;
4673
546b63fb
RK
4674 case RELOAD_OTHER:
4675 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4676
32131a9c 4677 /* If this use is for part of the insn,
546b63fb
RK
4678 check the reg is not in use for any prior part. It is tempting
4679 to try to do this by falling through from objecs that occur
4680 later in the insn to ones that occur earlier, but that will not
4681 correctly take into account the fact that here we MUST ignore
4682 things that would prevent the register from being allocated in
4683 the first place, since we know that it was allocated. */
4684
4685 case RELOAD_FOR_OUTPUT_ADDRESS:
d7921434 4686 /* Earlier reloads include RELOAD_FOR_OUTADDR_ADDRESS reloads. */
75528b80
R
4687 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4688 return 0;
4689 /* ... fall through ... */
47c8cf91 4690 case RELOAD_FOR_OUTADDR_ADDRESS:
546b63fb
RK
4691 /* Earlier reloads are for earlier outputs or their addresses,
4692 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4693 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4694 RELOAD_OTHER).. */
4695 for (i = 0; i < opnum; i++)
4696 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4697 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4698 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4699 return 0;
4700
4701 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4702 return 0;
546b63fb
RK
4703
4704 for (i = 0; i < reload_n_operands; i++)
4705 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4706 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
4707 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4708 return 0;
4709
4710 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4711 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
979e20e9 4712 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
546b63fb
RK
4713 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4714
32131a9c 4715 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4716 /* This can't be used in the output address for this operand and
4717 anything that can't be used for it, except that we've already
4718 tested for RELOAD_FOR_INSN objects. */
4719
47c8cf91
ILT
4720 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)
4721 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
32131a9c 4722 return 0;
546b63fb
RK
4723
4724 for (i = 0; i < opnum; i++)
4725 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4726 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4727 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4728 return 0;
4729
4730 for (i = 0; i < reload_n_operands; i++)
4731 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4732 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
4733 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4734 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4735 return 0;
4736
4737 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4738
32131a9c 4739 case RELOAD_FOR_OPERAND_ADDRESS:
a94ce333
JW
4740 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4741 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4742 return 0;
4743
4744 /* ... fall through ... */
4745
893bc853 4746 case RELOAD_FOR_OPADDR_ADDR:
546b63fb
RK
4747 case RELOAD_FOR_INSN:
4748 /* These can't conflict with inputs, or each other, so all we have to
4749 test is input addresses and the addresses of OTHER items. */
4750
4751 for (i = 0; i < reload_n_operands; i++)
47c8cf91
ILT
4752 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4753 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
4754 return 0;
4755
4756 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4757
32131a9c 4758 case RELOAD_FOR_INPUT:
5bc80b30
JL
4759 /* The only things earlier are the address for this and
4760 earlier inputs, other inputs (which we know we don't conflict
4761 with), and addresses of RELOAD_OTHER objects. */
546b63fb 4762
5bc80b30 4763 for (i = 0; i <= opnum; i++)
47c8cf91 4764 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5bc80b30 4765 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
4766 return 0;
4767
4768 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4769
4770 case RELOAD_FOR_INPUT_ADDRESS:
75528b80
R
4771 /* Earlier reloads include RELOAD_FOR_INPADDR_ADDRESS reloads. */
4772 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4773 return 0;
4774 /* ... fall through ... */
47c8cf91 4775 case RELOAD_FOR_INPADDR_ADDRESS:
546b63fb
RK
4776 /* Similarly, all we have to check is for use in earlier inputs'
4777 addresses. */
4778 for (i = 0; i < opnum; i++)
47c8cf91
ILT
4779 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4780 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
4781 return 0;
4782
4783 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4784 }
4785 abort ();
4786}
4787
4788/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4789 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4790 is still available in REGNO at the end of the insn.
4791
4792 We can assume that the reload reg was already tested for availability
4793 at the time it is needed, and we should not check this again,
4794 in case the reg has already been marked in use. */
4795
4796static int
546b63fb 4797reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4798 int regno;
546b63fb
RK
4799 int opnum;
4800 enum reload_type type;
32131a9c 4801{
546b63fb
RK
4802 int i;
4803
4804 switch (type)
32131a9c
RK
4805 {
4806 case RELOAD_OTHER:
4807 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4808 its value must reach the end. */
4809 return 1;
4810
4811 /* If this use is for part of the insn,
546b63fb
RK
4812 its value reaches if no subsequent part uses the same register.
4813 Just like the above function, don't try to do this with lots
4814 of fallthroughs. */
4815
4816 case RELOAD_FOR_OTHER_ADDRESS:
4817 /* Here we check for everything else, since these don't conflict
4818 with anything else and everything comes later. */
4819
4820 for (i = 0; i < reload_n_operands; i++)
4821 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4822 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4823 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4824 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4825 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
4826 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4827 return 0;
4828
4829 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4830 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4831 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4832
4833 case RELOAD_FOR_INPUT_ADDRESS:
47c8cf91 4834 case RELOAD_FOR_INPADDR_ADDRESS:
546b63fb
RK
4835 /* Similar, except that we check only for this and subsequent inputs
4836 and the address of only subsequent inputs and we do not need
4837 to check for RELOAD_OTHER objects since they are known not to
4838 conflict. */
4839
4840 for (i = opnum; i < reload_n_operands; i++)
4841 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4842 return 0;
4843
4844 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
4845 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4846 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
4847 return 0;
4848
4849 for (i = 0; i < reload_n_operands; i++)
4850 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4851 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4852 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4853 return 0;
4854
893bc853
RK
4855 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4856 return 0;
4857
546b63fb
RK
4858 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4859 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4860
32131a9c 4861 case RELOAD_FOR_INPUT:
546b63fb
RK
4862 /* Similar to input address, except we start at the next operand for
4863 both input and input address and we do not check for
4864 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4865 would conflict. */
4866
4867 for (i = opnum + 1; i < reload_n_operands; i++)
4868 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4869 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
4870 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4871 return 0;
4872
0f41302f 4873 /* ... fall through ... */
546b63fb 4874
32131a9c 4875 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4876 /* Check outputs and their addresses. */
4877
4878 for (i = 0; i < reload_n_operands; i++)
4879 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4880 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4881 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4882 return 0;
4883
4884 return 1;
4885
893bc853
RK
4886 case RELOAD_FOR_OPADDR_ADDR:
4887 for (i = 0; i < reload_n_operands; i++)
4888 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4889 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
893bc853
RK
4890 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4891 return 0;
4892
a94ce333
JW
4893 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4894 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
893bc853 4895
546b63fb 4896 case RELOAD_FOR_INSN:
893bc853 4897 /* These conflict with other outputs with RELOAD_OTHER. So
546b63fb
RK
4898 we need only check for output addresses. */
4899
4900 opnum = -1;
4901
0f41302f 4902 /* ... fall through ... */
546b63fb 4903
32131a9c 4904 case RELOAD_FOR_OUTPUT:
546b63fb 4905 case RELOAD_FOR_OUTPUT_ADDRESS:
47c8cf91 4906 case RELOAD_FOR_OUTADDR_ADDRESS:
546b63fb
RK
4907 /* We already know these can't conflict with a later output. So the
4908 only thing to check are later output addresses. */
4909 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
4910 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4911 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
546b63fb
RK
4912 return 0;
4913
32131a9c
RK
4914 return 1;
4915 }
546b63fb 4916
32131a9c
RK
4917 abort ();
4918}
4919\f
351aa1c1
RK
4920/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4921 Return 0 otherwise.
4922
4923 This function uses the same algorithm as reload_reg_free_p above. */
4924
f5963e61 4925int
351aa1c1
RK
4926reloads_conflict (r1, r2)
4927 int r1, r2;
4928{
4929 enum reload_type r1_type = reload_when_needed[r1];
4930 enum reload_type r2_type = reload_when_needed[r2];
4931 int r1_opnum = reload_opnum[r1];
4932 int r2_opnum = reload_opnum[r2];
4933
2edc8d65
RK
4934 /* RELOAD_OTHER conflicts with everything. */
4935 if (r2_type == RELOAD_OTHER)
351aa1c1
RK
4936 return 1;
4937
4938 /* Otherwise, check conflicts differently for each type. */
4939
4940 switch (r1_type)
4941 {
4942 case RELOAD_FOR_INPUT:
4943 return (r2_type == RELOAD_FOR_INSN
4944 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
893bc853 4945 || r2_type == RELOAD_FOR_OPADDR_ADDR
351aa1c1 4946 || r2_type == RELOAD_FOR_INPUT
47c8cf91
ILT
4947 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4948 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4949 && r2_opnum > r1_opnum));
351aa1c1
RK
4950
4951 case RELOAD_FOR_INPUT_ADDRESS:
4952 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4953 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4954
47c8cf91
ILT
4955 case RELOAD_FOR_INPADDR_ADDRESS:
4956 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4957 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4958
351aa1c1
RK
4959 case RELOAD_FOR_OUTPUT_ADDRESS:
4960 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4961 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4962
47c8cf91
ILT
4963 case RELOAD_FOR_OUTADDR_ADDRESS:
4964 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4965 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4966
351aa1c1
RK
4967 case RELOAD_FOR_OPERAND_ADDRESS:
4968 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
a94ce333 4969 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
351aa1c1 4970
893bc853
RK
4971 case RELOAD_FOR_OPADDR_ADDR:
4972 return (r2_type == RELOAD_FOR_INPUT
a94ce333 4973 || r2_type == RELOAD_FOR_OPADDR_ADDR);
893bc853 4974
351aa1c1
RK
4975 case RELOAD_FOR_OUTPUT:
4976 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
47c8cf91
ILT
4977 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4978 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
351aa1c1
RK
4979 && r2_opnum >= r1_opnum));
4980
4981 case RELOAD_FOR_INSN:
4982 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4983 || r2_type == RELOAD_FOR_INSN
4984 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4985
4986 case RELOAD_FOR_OTHER_ADDRESS:
4987 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4988
adab4fc5 4989 case RELOAD_OTHER:
2edc8d65 4990 return 1;
adab4fc5 4991
351aa1c1
RK
4992 default:
4993 abort ();
4994 }
4995}
4996\f
32131a9c
RK
4997/* Vector of reload-numbers showing the order in which the reloads should
4998 be processed. */
4999short reload_order[MAX_RELOADS];
5000
5001/* Indexed by reload number, 1 if incoming value
5002 inherited from previous insns. */
5003char reload_inherited[MAX_RELOADS];
5004
5005/* For an inherited reload, this is the insn the reload was inherited from,
5006 if we know it. Otherwise, this is 0. */
5007rtx reload_inheritance_insn[MAX_RELOADS];
5008
5009/* If non-zero, this is a place to get the value of the reload,
5010 rather than using reload_in. */
5011rtx reload_override_in[MAX_RELOADS];
5012
e6e52be0
R
5013/* For each reload, the hard register number of the register used,
5014 or -1 if we did not need a register for this reload. */
32131a9c
RK
5015int reload_spill_index[MAX_RELOADS];
5016
6e684430
R
5017/* Return 1 if the value in reload reg REGNO, as used by a reload
5018 needed for the part of the insn specified by OPNUM and TYPE,
5019 may be used to load VALUE into it.
f5470689
R
5020
5021 Other read-only reloads with the same value do not conflict
5022 unless OUT is non-zero and these other reloads have to live while
5023 output reloads live.
5024
5025 RELOADNUM is the number of the reload we want to load this value for;
5026 a reload does not conflict with itself.
5027
6e684430
R
5028 The caller has to make sure that there is no conflict with the return
5029 register. */
5030static int
f5470689 5031reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum)
6e684430
R
5032 int regno;
5033 int opnum;
5034 enum reload_type type;
f5470689
R
5035 rtx value, out;
5036 int reloadnum;
6e684430
R
5037{
5038 int time1;
5039 int i;
5040
5041 /* We use some pseudo 'time' value to check if the lifetimes of the
5042 new register use would overlap with the one of a previous reload
5043 that is not read-only or uses a different value.
5044 The 'time' used doesn't have to be linear in any shape or form, just
5045 monotonic.
5046 Some reload types use different 'buckets' for each operand.
5047 So there are MAX_RECOG_OPERANDS different time values for each
cecbf6e2
R
5048 such reload type.
5049 We compute TIME1 as the time when the register for the prospective
5050 new reload ceases to be live, and TIME2 for each existing
5051 reload as the time when that the reload register of that reload
5052 becomes live.
5053 Where there is little to be gained by exact lifetime calculations,
5054 we just make conservative assumptions, i.e. a longer lifetime;
5055 this is done in the 'default:' cases. */
6e684430
R
5056 switch (type)
5057 {
5058 case RELOAD_FOR_OTHER_ADDRESS:
5059 time1 = 0;
5060 break;
5061 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5062 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5063 respectively, to the time values for these, we get distinct time
5064 values. To get distinct time values for each operand, we have to
5065 multiply opnum by at least three. We round that up to four because
5066 multiply by four is often cheaper. */
5067 case RELOAD_FOR_INPADDR_ADDRESS:
5068 time1 = opnum * 4 + 1;
5069 break;
5070 case RELOAD_FOR_INPUT_ADDRESS:
5071 time1 = opnum * 4 + 2;
5072 break;
5073 case RELOAD_FOR_INPUT:
cecbf6e2
R
5074 /* All RELOAD_FOR_INPUT reloads remain live till just before the
5075 instruction is executed. */
5076 time1 = (MAX_RECOG_OPERANDS - 1) * 4 + 3;
6e684430
R
5077 break;
5078 /* opnum * 4 + 3 < opnum * 4 + 4
cecbf6e2 5079 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
6e684430
R
5080 case RELOAD_FOR_OUTPUT_ADDRESS:
5081 time1 = MAX_RECOG_OPERANDS * 4 + opnum;
5082 break;
5083 default:
5084 time1 = MAX_RECOG_OPERANDS * 5;
5085 }
5086
5087 for (i = 0; i < n_reloads; i++)
5088 {
5089 rtx reg = reload_reg_rtx[i];
5090 if (reg && GET_CODE (reg) == REG
5091 && ((unsigned) regno - true_regnum (reg)
83e0821b 5092 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
f5470689 5093 && i != reloadnum)
6e684430 5094 {
f5470689
R
5095 if (out
5096 && reload_when_needed[i] != RELOAD_FOR_INPUT
5097 && reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
5098 && reload_when_needed[i] != RELOAD_FOR_INPADDR_ADDRESS)
5099 return 0;
5100 if (! reload_in[i] || ! rtx_equal_p (reload_in[i], value)
5101 || reload_out[i])
6e684430 5102 {
f5470689
R
5103 int time2;
5104 switch (reload_when_needed[i])
5105 {
5106 case RELOAD_FOR_OTHER_ADDRESS:
5107 time2 = 0;
5108 break;
5109 case RELOAD_FOR_INPADDR_ADDRESS:
5110 time2 = reload_opnum[i] * 4 + 1;
5111 break;
5112 case RELOAD_FOR_INPUT_ADDRESS:
5113 time2 = reload_opnum[i] * 4 + 2;
5114 break;
5115 case RELOAD_FOR_INPUT:
5116 time2 = reload_opnum[i] * 4 + 3;
5117 break;
5118 case RELOAD_FOR_OUTPUT:
5119 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5120 instruction is executed. */
5121 time2 = MAX_RECOG_OPERANDS * 4;
5122 break;
5123 /* The first RELOAD_FOR_OUTPUT_ADDRESS reload conflicts with the
5124 RELOAD_FOR_OUTPUT reloads, so assign it the same time value. */
5125 case RELOAD_FOR_OUTPUT_ADDRESS:
5126 time2 = MAX_RECOG_OPERANDS * 4 + reload_opnum[i];
5127 break;
5128 case RELOAD_OTHER:
5129 if (! reload_in[i] || rtx_equal_p (reload_in[i], value))
5130 {
5131 time2 = MAX_RECOG_OPERANDS * 4;
5132 break;
5133 }
5134 default:
5135 time2 = 0;
5136 }
5137 if (time1 >= time2)
5138 return 0;
6e684430 5139 }
6e684430
R
5140 }
5141 }
5142 return 1;
5143}
5144
32131a9c
RK
5145/* Find a spill register to use as a reload register for reload R.
5146 LAST_RELOAD is non-zero if this is the last reload for the insn being
5147 processed.
5148
5149 Set reload_reg_rtx[R] to the register allocated.
5150
5151 If NOERROR is nonzero, we return 1 if successful,
5152 or 0 if we couldn't find a spill reg and we didn't change anything. */
5153
5154static int
5155allocate_reload_reg (r, insn, last_reload, noerror)
5156 int r;
5157 rtx insn;
5158 int last_reload;
5159 int noerror;
5160{
5161 int i;
5162 int pass;
5163 int count;
5164 rtx new;
5165 int regno;
5166
5167 /* If we put this reload ahead, thinking it is a group,
5168 then insist on finding a group. Otherwise we can grab a
a8fdc208 5169 reg that some other reload needs.
32131a9c
RK
5170 (That can happen when we have a 68000 DATA_OR_FP_REG
5171 which is a group of data regs or one fp reg.)
5172 We need not be so restrictive if there are no more reloads
5173 for this insn.
5174
5175 ??? Really it would be nicer to have smarter handling
5176 for that kind of reg class, where a problem like this is normal.
5177 Perhaps those classes should be avoided for reloading
5178 by use of more alternatives. */
5179
5180 int force_group = reload_nregs[r] > 1 && ! last_reload;
5181
5182 /* If we want a single register and haven't yet found one,
5183 take any reg in the right class and not in use.
5184 If we want a consecutive group, here is where we look for it.
5185
5186 We use two passes so we can first look for reload regs to
5187 reuse, which are already in use for other reloads in this insn,
5188 and only then use additional registers.
5189 I think that maximizing reuse is needed to make sure we don't
5190 run out of reload regs. Suppose we have three reloads, and
5191 reloads A and B can share regs. These need two regs.
5192 Suppose A and B are given different regs.
5193 That leaves none for C. */
5194 for (pass = 0; pass < 2; pass++)
5195 {
5196 /* I is the index in spill_regs.
5197 We advance it round-robin between insns to use all spill regs
5198 equally, so that inherited reloads have a chance
a5339699
RK
5199 of leapfrogging each other. Don't do this, however, when we have
5200 group needs and failure would be fatal; if we only have a relatively
5201 small number of spill registers, and more than one of them has
5202 group needs, then by starting in the middle, we may end up
5203 allocating the first one in such a way that we are not left with
5204 sufficient groups to handle the rest. */
5205
5206 if (noerror || ! force_group)
5207 i = last_spill_reg;
5208 else
5209 i = -1;
5210
5211 for (count = 0; count < n_spills; count++)
32131a9c
RK
5212 {
5213 int class = (int) reload_reg_class[r];
5214
5215 i = (i + 1) % n_spills;
5216
6e684430
R
5217 if ((reload_reg_free_p (spill_regs[i], reload_opnum[r],
5218 reload_when_needed[r])
f5470689 5219 || (reload_in[r]
6e684430
R
5220 /* We check reload_reg_used to make sure we
5221 don't clobber the return register. */
5222 && ! TEST_HARD_REG_BIT (reload_reg_used, spill_regs[i])
5223 && reload_reg_free_for_value_p (spill_regs[i],
5224 reload_opnum[r],
5225 reload_when_needed[r],
f5470689
R
5226 reload_in[r],
5227 reload_out[r], r)))
32131a9c
RK
5228 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
5229 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
5230 /* Look first for regs to share, then for unshared. But
5231 don't share regs used for inherited reloads; they are
5232 the ones we want to preserve. */
5233 && (pass
5234 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5235 spill_regs[i])
5236 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5237 spill_regs[i]))))
32131a9c
RK
5238 {
5239 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5240 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5241 (on 68000) got us two FP regs. If NR is 1,
5242 we would reject both of them. */
5243 if (force_group)
5244 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5245 /* If we need only one reg, we have already won. */
5246 if (nr == 1)
5247 {
5248 /* But reject a single reg if we demand a group. */
5249 if (force_group)
5250 continue;
5251 break;
5252 }
5253 /* Otherwise check that as many consecutive regs as we need
5254 are available here.
5255 Also, don't use for a group registers that are
5256 needed for nongroups. */
5257 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
5258 while (nr > 1)
5259 {
5260 regno = spill_regs[i] + nr - 1;
5261 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5262 && spill_reg_order[regno] >= 0
546b63fb
RK
5263 && reload_reg_free_p (regno, reload_opnum[r],
5264 reload_when_needed[r])
32131a9c
RK
5265 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
5266 regno)))
5267 break;
5268 nr--;
5269 }
5270 if (nr == 1)
5271 break;
5272 }
5273 }
5274
5275 /* If we found something on pass 1, omit pass 2. */
5276 if (count < n_spills)
5277 break;
5278 }
5279
5280 /* We should have found a spill register by now. */
5281 if (count == n_spills)
5282 {
5283 if (noerror)
5284 return 0;
139fc12e 5285 goto failure;
32131a9c
RK
5286 }
5287
be7ae2a4
RK
5288 /* I is the index in SPILL_REG_RTX of the reload register we are to
5289 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
5290
5291 new = spill_reg_rtx[i];
5292
5293 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4 5294 spill_reg_rtx[i] = new
38a448ca 5295 = gen_rtx_REG (reload_mode[r], spill_regs[i]);
be7ae2a4 5296
32131a9c
RK
5297 regno = true_regnum (new);
5298
5299 /* Detect when the reload reg can't hold the reload mode.
5300 This used to be one `if', but Sequent compiler can't handle that. */
5301 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5302 {
5303 enum machine_mode test_mode = VOIDmode;
5304 if (reload_in[r])
5305 test_mode = GET_MODE (reload_in[r]);
5306 /* If reload_in[r] has VOIDmode, it means we will load it
5307 in whatever mode the reload reg has: to wit, reload_mode[r].
5308 We have already tested that for validity. */
5309 /* Aside from that, we need to test that the expressions
5310 to reload from or into have modes which are valid for this
5311 reload register. Otherwise the reload insns would be invalid. */
5312 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5313 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5314 if (! (reload_out[r] != 0
5315 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
5316 {
5317 /* The reg is OK. */
5318 last_spill_reg = i;
5319
5320 /* Mark as in use for this insn the reload regs we use
5321 for this. */
5322 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5323 reload_when_needed[r], reload_mode[r]);
5324
5325 reload_reg_rtx[r] = new;
e6e52be0 5326 reload_spill_index[r] = spill_regs[i];
be7ae2a4
RK
5327 return 1;
5328 }
32131a9c
RK
5329 }
5330
5331 /* The reg is not OK. */
5332 if (noerror)
5333 return 0;
5334
139fc12e 5335 failure:
32131a9c
RK
5336 if (asm_noperands (PATTERN (insn)) < 0)
5337 /* It's the compiler's fault. */
a89b2cc4 5338 fatal_insn ("Could not find a spill register", insn);
32131a9c
RK
5339
5340 /* It's the user's fault; the operand's mode and constraint
5341 don't match. Disable this reload so we don't crash in final. */
5342 error_for_asm (insn,
5343 "`asm' operand constraint incompatible with operand size");
5344 reload_in[r] = 0;
5345 reload_out[r] = 0;
5346 reload_reg_rtx[r] = 0;
5347 reload_optional[r] = 1;
5348 reload_secondary_p[r] = 1;
5349
5350 return 1;
5351}
5352\f
5353/* Assign hard reg targets for the pseudo-registers we must reload
5354 into hard regs for this insn.
5355 Also output the instructions to copy them in and out of the hard regs.
5356
5357 For machines with register classes, we are responsible for
5358 finding a reload reg in the proper class. */
5359
5360static void
5361choose_reload_regs (insn, avoid_return_reg)
5362 rtx insn;
32131a9c
RK
5363 rtx avoid_return_reg;
5364{
5365 register int i, j;
5366 int max_group_size = 1;
5367 enum reg_class group_class = NO_REGS;
5368 int inheritance;
5369
5370 rtx save_reload_reg_rtx[MAX_RELOADS];
5371 char save_reload_inherited[MAX_RELOADS];
5372 rtx save_reload_inheritance_insn[MAX_RELOADS];
5373 rtx save_reload_override_in[MAX_RELOADS];
5374 int save_reload_spill_index[MAX_RELOADS];
5375 HARD_REG_SET save_reload_reg_used;
546b63fb 5376 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
47c8cf91 5377 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
546b63fb 5378 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
47c8cf91 5379 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
5380 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5381 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 5382 HARD_REG_SET save_reload_reg_used_in_op_addr;
893bc853 5383 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
546b63fb
RK
5384 HARD_REG_SET save_reload_reg_used_in_insn;
5385 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
5386 HARD_REG_SET save_reload_reg_used_at_all;
5387
5388 bzero (reload_inherited, MAX_RELOADS);
4c9a05bc
RK
5389 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5390 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
32131a9c
RK
5391
5392 CLEAR_HARD_REG_SET (reload_reg_used);
5393 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 5394 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
893bc853 5395 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
546b63fb
RK
5396 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5397 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 5398
546b63fb
RK
5399 for (i = 0; i < reload_n_operands; i++)
5400 {
5401 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5402 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5403 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
47c8cf91 5404 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
546b63fb 5405 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
47c8cf91 5406 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
546b63fb 5407 }
32131a9c 5408
32131a9c
RK
5409 /* Don't bother with avoiding the return reg
5410 if we have no mandatory reload that could use it. */
f95182a4 5411 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
32131a9c
RK
5412 {
5413 int do_avoid = 0;
5414 int regno = REGNO (avoid_return_reg);
5415 int nregs
5416 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5417 int r;
5418
5419 for (r = regno; r < regno + nregs; r++)
5420 if (spill_reg_order[r] >= 0)
5421 for (j = 0; j < n_reloads; j++)
5422 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5423 && (reload_in[j] != 0 || reload_out[j] != 0
5424 || reload_secondary_p[j])
5425 &&
5426 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5427 do_avoid = 1;
5428 if (!do_avoid)
5429 avoid_return_reg = 0;
5430 }
32131a9c
RK
5431
5432#if 0 /* Not needed, now that we can always retry without inheritance. */
5433 /* See if we have more mandatory reloads than spill regs.
5434 If so, then we cannot risk optimizations that could prevent
a8fdc208 5435 reloads from sharing one spill register.
32131a9c
RK
5436
5437 Since we will try finding a better register than reload_reg_rtx
5438 unless it is equal to reload_in or reload_out, count such reloads. */
5439
5440 {
e9a25f70 5441 int tem = SMALL_REGISTER_CLASSES? (avoid_return_reg != 0): 0;
32131a9c
RK
5442 for (j = 0; j < n_reloads; j++)
5443 if (! reload_optional[j]
5444 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5445 && (reload_reg_rtx[j] == 0
5446 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5447 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5448 tem++;
5449 if (tem > n_spills)
5450 must_reuse = 1;
5451 }
5452#endif
5453
32131a9c
RK
5454 /* Don't use the subroutine call return reg for a reload
5455 if we are supposed to avoid it. */
f95182a4 5456 if (SMALL_REGISTER_CLASSES && avoid_return_reg)
32131a9c
RK
5457 {
5458 int regno = REGNO (avoid_return_reg);
5459 int nregs
5460 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5461 int r;
5462
5463 for (r = regno; r < regno + nregs; r++)
5464 if (spill_reg_order[r] >= 0)
5465 SET_HARD_REG_BIT (reload_reg_used, r);
5466 }
32131a9c
RK
5467
5468 /* In order to be certain of getting the registers we need,
5469 we must sort the reloads into order of increasing register class.
5470 Then our grabbing of reload registers will parallel the process
a8fdc208 5471 that provided the reload registers.
32131a9c
RK
5472
5473 Also note whether any of the reloads wants a consecutive group of regs.
5474 If so, record the maximum size of the group desired and what
5475 register class contains all the groups needed by this insn. */
5476
5477 for (j = 0; j < n_reloads; j++)
5478 {
5479 reload_order[j] = j;
5480 reload_spill_index[j] = -1;
5481
5482 reload_mode[j]
546b63fb
RK
5483 = (reload_inmode[j] == VOIDmode
5484 || (GET_MODE_SIZE (reload_outmode[j])
5485 > GET_MODE_SIZE (reload_inmode[j])))
5486 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
5487
5488 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5489
5490 if (reload_nregs[j] > 1)
5491 {
5492 max_group_size = MAX (reload_nregs[j], max_group_size);
5493 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5494 }
5495
5496 /* If we have already decided to use a certain register,
5497 don't use it in another way. */
5498 if (reload_reg_rtx[j])
546b63fb 5499 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
5500 reload_when_needed[j], reload_mode[j]);
5501 }
5502
5503 if (n_reloads > 1)
5504 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5505
4c9a05bc
RK
5506 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5507 sizeof reload_reg_rtx);
32131a9c 5508 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4c9a05bc
RK
5509 bcopy ((char *) reload_inheritance_insn,
5510 (char *) save_reload_inheritance_insn,
32131a9c 5511 sizeof reload_inheritance_insn);
4c9a05bc 5512 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
32131a9c 5513 sizeof reload_override_in);
4c9a05bc 5514 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
32131a9c
RK
5515 sizeof reload_spill_index);
5516 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5517 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
5518 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5519 reload_reg_used_in_op_addr);
893bc853
RK
5520
5521 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5522 reload_reg_used_in_op_addr_reload);
5523
546b63fb
RK
5524 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5525 reload_reg_used_in_insn);
5526 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5527 reload_reg_used_in_other_addr);
5528
5529 for (i = 0; i < reload_n_operands; i++)
5530 {
5531 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5532 reload_reg_used_in_output[i]);
5533 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5534 reload_reg_used_in_input[i]);
5535 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5536 reload_reg_used_in_input_addr[i]);
47c8cf91
ILT
5537 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5538 reload_reg_used_in_inpaddr_addr[i]);
546b63fb
RK
5539 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5540 reload_reg_used_in_output_addr[i]);
47c8cf91
ILT
5541 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5542 reload_reg_used_in_outaddr_addr[i]);
546b63fb 5543 }
32131a9c 5544
58b1581b
RS
5545 /* If -O, try first with inheritance, then turning it off.
5546 If not -O, don't do inheritance.
5547 Using inheritance when not optimizing leads to paradoxes
5548 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5549 because one side of the comparison might be inherited. */
32131a9c 5550
58b1581b 5551 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
5552 {
5553 /* Process the reloads in order of preference just found.
5554 Beyond this point, subregs can be found in reload_reg_rtx.
5555
5556 This used to look for an existing reloaded home for all
5557 of the reloads, and only then perform any new reloads.
5558 But that could lose if the reloads were done out of reg-class order
5559 because a later reload with a looser constraint might have an old
5560 home in a register needed by an earlier reload with a tighter constraint.
5561
5562 To solve this, we make two passes over the reloads, in the order
5563 described above. In the first pass we try to inherit a reload
5564 from a previous insn. If there is a later reload that needs a
5565 class that is a proper subset of the class being processed, we must
5566 also allocate a spill register during the first pass.
5567
5568 Then make a second pass over the reloads to allocate any reloads
5569 that haven't been given registers yet. */
5570
be7ae2a4
RK
5571 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5572
32131a9c
RK
5573 for (j = 0; j < n_reloads; j++)
5574 {
5575 register int r = reload_order[j];
5576
5577 /* Ignore reloads that got marked inoperative. */
b080c137
RK
5578 if (reload_out[r] == 0 && reload_in[r] == 0
5579 && ! reload_secondary_p[r])
32131a9c
RK
5580 continue;
5581
5582 /* If find_reloads chose a to use reload_in or reload_out as a reload
b080c137
RK
5583 register, we don't need to chose one. Otherwise, try even if it
5584 found one since we might save an insn if we find the value lying
5585 around. */
32131a9c
RK
5586 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5587 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5588 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5589 continue;
5590
5591#if 0 /* No longer needed for correct operation.
5592 It might give better code, or might not; worth an experiment? */
5593 /* If this is an optional reload, we can't inherit from earlier insns
5594 until we are sure that any non-optional reloads have been allocated.
5595 The following code takes advantage of the fact that optional reloads
5596 are at the end of reload_order. */
5597 if (reload_optional[r] != 0)
5598 for (i = 0; i < j; i++)
5599 if ((reload_out[reload_order[i]] != 0
5600 || reload_in[reload_order[i]] != 0
5601 || reload_secondary_p[reload_order[i]])
5602 && ! reload_optional[reload_order[i]]
5603 && reload_reg_rtx[reload_order[i]] == 0)
5604 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5605#endif
5606
5607 /* First see if this pseudo is already available as reloaded
5608 for a previous insn. We cannot try to inherit for reloads
5609 that are smaller than the maximum number of registers needed
5610 for groups unless the register we would allocate cannot be used
5611 for the groups.
5612
5613 We could check here to see if this is a secondary reload for
5614 an object that is already in a register of the desired class.
5615 This would avoid the need for the secondary reload register.
5616 But this is complex because we can't easily determine what
b080c137
RK
5617 objects might want to be loaded via this reload. So let a
5618 register be allocated here. In `emit_reload_insns' we suppress
5619 one of the loads in the case described above. */
32131a9c
RK
5620
5621 if (inheritance)
5622 {
5623 register int regno = -1;
db660765 5624 enum machine_mode mode;
32131a9c
RK
5625
5626 if (reload_in[r] == 0)
5627 ;
5628 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
5629 {
5630 regno = REGNO (reload_in[r]);
5631 mode = GET_MODE (reload_in[r]);
5632 }
32131a9c 5633 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
5634 {
5635 regno = REGNO (reload_in_reg[r]);
5636 mode = GET_MODE (reload_in_reg[r]);
5637 }
b60a8416
R
5638 else if (GET_CODE (reload_in[r]) == MEM)
5639 {
5640 rtx prev = prev_nonnote_insn (insn), note;
5641
5642 if (prev && GET_CODE (prev) == INSN
5643 && GET_CODE (PATTERN (prev)) == USE
5644 && GET_CODE (XEXP (PATTERN (prev), 0)) == REG
5645 && (REGNO (XEXP (PATTERN (prev), 0))
5646 >= FIRST_PSEUDO_REGISTER)
5647 && (note = find_reg_note (prev, REG_EQUAL, NULL_RTX))
5648 && GET_CODE (XEXP (note, 0)) == MEM)
5649 {
5650 rtx addr = XEXP (XEXP (note, 0), 0);
5651 int size_diff
5652 = (GET_MODE_SIZE (GET_MODE (addr))
5653 - GET_MODE_SIZE (GET_MODE (reload_in[r])));
5654 if (size_diff >= 0
5655 && rtx_equal_p ((BYTES_BIG_ENDIAN
5656 ? plus_constant (addr, size_diff)
5657 : addr),
5658 XEXP (reload_in[r], 0)))
5659 {
5660 regno = REGNO (XEXP (PATTERN (prev), 0));
5661 mode = GET_MODE (reload_in[r]);
b60a8416
R
5662 }
5663 }
5664 }
32131a9c
RK
5665#if 0
5666 /* This won't work, since REGNO can be a pseudo reg number.
5667 Also, it takes much more hair to keep track of all the things
5668 that can invalidate an inherited reload of part of a pseudoreg. */
5669 else if (GET_CODE (reload_in[r]) == SUBREG
5670 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5671 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5672#endif
5673
5674 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5675 {
e6e52be0 5676 i = REGNO (reg_last_reload_reg[regno]);
32131a9c
RK
5677
5678 if (reg_reloaded_contents[i] == regno
e6e52be0 5679 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
db660765
TW
5680 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5681 >= GET_MODE_SIZE (mode))
e6e52be0 5682 && HARD_REGNO_MODE_OK (i, reload_mode[r])
32131a9c 5683 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
e6e52be0 5684 i)
32131a9c
RK
5685 && (reload_nregs[r] == max_group_size
5686 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
e6e52be0 5687 i))
6e684430
R
5688 && ((reload_reg_free_p (i, reload_opnum[r],
5689 reload_when_needed[r])
5690 && reload_reg_free_before_p (i, reload_opnum[r],
5691 reload_when_needed[r]))
5692 || reload_reg_free_for_value_p (i, reload_opnum[r],
5693 reload_when_needed[r],
f5470689
R
5694 reload_in[r],
5695 reload_out[r], r)))
32131a9c
RK
5696 {
5697 /* If a group is needed, verify that all the subsequent
0f41302f 5698 registers still have their values intact. */
32131a9c 5699 int nr
e6e52be0 5700 = HARD_REGNO_NREGS (i, reload_mode[r]);
32131a9c
RK
5701 int k;
5702
5703 for (k = 1; k < nr; k++)
e6e52be0
R
5704 if (reg_reloaded_contents[i + k] != regno
5705 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
32131a9c
RK
5706 break;
5707
5708 if (k == nr)
5709 {
c74fa651
RS
5710 int i1;
5711
5712 /* We found a register that contains the
5713 value we need. If this register is the
5714 same as an `earlyclobber' operand of the
5715 current insn, just mark it as a place to
5716 reload from since we can't use it as the
5717 reload register itself. */
5718
5719 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5720 if (reg_overlap_mentioned_for_reload_p
5721 (reg_last_reload_reg[regno],
5722 reload_earlyclobbers[i1]))
5723 break;
5724
8908158d 5725 if (i1 != n_earlyclobbers
e6e52be0
R
5726 /* Don't use it if we'd clobber a pseudo reg. */
5727 || (spill_reg_order[i] < 0
5728 && reload_out[r]
5729 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
8908158d
RS
5730 /* Don't really use the inherited spill reg
5731 if we need it wider than we've got it. */
5732 || (GET_MODE_SIZE (reload_mode[r])
5733 > GET_MODE_SIZE (mode)))
c74fa651
RS
5734 reload_override_in[r] = reg_last_reload_reg[regno];
5735 else
5736 {
54c40e68 5737 int k;
c74fa651
RS
5738 /* We can use this as a reload reg. */
5739 /* Mark the register as in use for this part of
5740 the insn. */
e6e52be0 5741 mark_reload_reg_in_use (i,
c74fa651
RS
5742 reload_opnum[r],
5743 reload_when_needed[r],
5744 reload_mode[r]);
5745 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5746 reload_inherited[r] = 1;
5747 reload_inheritance_insn[r]
5748 = reg_reloaded_insn[i];
5749 reload_spill_index[r] = i;
54c40e68
RS
5750 for (k = 0; k < nr; k++)
5751 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
e6e52be0 5752 i + k);
c74fa651 5753 }
32131a9c
RK
5754 }
5755 }
5756 }
5757 }
5758
5759 /* Here's another way to see if the value is already lying around. */
5760 if (inheritance
5761 && reload_in[r] != 0
5762 && ! reload_inherited[r]
5763 && reload_out[r] == 0
5764 && (CONSTANT_P (reload_in[r])
5765 || GET_CODE (reload_in[r]) == PLUS
5766 || GET_CODE (reload_in[r]) == REG
5767 || GET_CODE (reload_in[r]) == MEM)
5768 && (reload_nregs[r] == max_group_size
5769 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5770 {
5771 register rtx equiv
5772 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 5773 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
5774 int regno;
5775
5776 if (equiv != 0)
5777 {
5778 if (GET_CODE (equiv) == REG)
5779 regno = REGNO (equiv);
5780 else if (GET_CODE (equiv) == SUBREG)
5781 {
f8a9e02b
RK
5782 /* This must be a SUBREG of a hard register.
5783 Make a new REG since this might be used in an
5784 address and not all machines support SUBREGs
5785 there. */
5786 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
38a448ca 5787 equiv = gen_rtx_REG (reload_mode[r], regno);
32131a9c
RK
5788 }
5789 else
5790 abort ();
5791 }
5792
5793 /* If we found a spill reg, reject it unless it is free
5794 and of the desired class. */
5795 if (equiv != 0
5796 && ((spill_reg_order[regno] >= 0
6e684430
R
5797 && ! (reload_reg_free_before_p (regno, reload_opnum[r],
5798 reload_when_needed[r])
5799 || reload_reg_free_for_value_p (regno,
5800 reload_opnum[r],
5801 reload_when_needed[r],
f5470689
R
5802 reload_in[r],
5803 reload_out[r], r)))
32131a9c
RK
5804 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5805 regno)))
5806 equiv = 0;
5807
5808 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5809 equiv = 0;
5810
5811 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5812 equiv = 0;
5813
5814 /* We found a register that contains the value we need.
5815 If this register is the same as an `earlyclobber' operand
5816 of the current insn, just mark it as a place to reload from
5817 since we can't use it as the reload register itself. */
5818
5819 if (equiv != 0)
5820 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5821 if (reg_overlap_mentioned_for_reload_p (equiv,
5822 reload_earlyclobbers[i]))
32131a9c
RK
5823 {
5824 reload_override_in[r] = equiv;
5825 equiv = 0;
5826 break;
5827 }
5828
3c785e47
R
5829 /* If the equiv register we have found is explicitly clobbered
5830 in the current insn, it depends on the reload type if we
5831 can use it, use it for reload_override_in, or not at all.
5832 In particular, we then can't use EQUIV for a
5833 RELOAD_FOR_OUTPUT_ADDRESS reload. */
32131a9c
RK
5834
5835 if (equiv != 0 && regno_clobbered_p (regno, insn))
5836 {
3c785e47
R
5837 switch (reload_when_needed[r])
5838 {
5839 case RELOAD_FOR_OTHER_ADDRESS:
5840 case RELOAD_FOR_INPADDR_ADDRESS:
5841 case RELOAD_FOR_INPUT_ADDRESS:
5842 case RELOAD_FOR_OPADDR_ADDR:
5843 break;
5844 case RELOAD_OTHER:
5845 case RELOAD_FOR_INPUT:
5846 case RELOAD_FOR_OPERAND_ADDRESS:
5847 reload_override_in[r] = equiv;
5848 /* Fall through. */
5849 default:
5850 equiv = 0;
5851 break;
5852 }
32131a9c
RK
5853 }
5854
5855 /* If we found an equivalent reg, say no code need be generated
5856 to load it, and use it as our reload reg. */
3ec2ea3e 5857 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c 5858 {
100338df
JL
5859 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5860 int k;
32131a9c
RK
5861 reload_reg_rtx[r] = equiv;
5862 reload_inherited[r] = 1;
100338df
JL
5863
5864 /* If any of the hard registers in EQUIV are spill
5865 registers, mark them as in use for this insn. */
5866 for (k = 0; k < nr; k++)
be7ae2a4 5867 {
100338df
JL
5868 i = spill_reg_order[regno + k];
5869 if (i >= 0)
5870 {
5871 mark_reload_reg_in_use (regno, reload_opnum[r],
5872 reload_when_needed[r],
5873 reload_mode[r]);
5874 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5875 regno + k);
5876 }
be7ae2a4 5877 }
32131a9c
RK
5878 }
5879 }
5880
5881 /* If we found a register to use already, or if this is an optional
5882 reload, we are done. */
5883 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5884 continue;
5885
5886#if 0 /* No longer needed for correct operation. Might or might not
5887 give better code on the average. Want to experiment? */
5888
5889 /* See if there is a later reload that has a class different from our
5890 class that intersects our class or that requires less register
5891 than our reload. If so, we must allocate a register to this
5892 reload now, since that reload might inherit a previous reload
5893 and take the only available register in our class. Don't do this
5894 for optional reloads since they will force all previous reloads
5895 to be allocated. Also don't do this for reloads that have been
5896 turned off. */
5897
5898 for (i = j + 1; i < n_reloads; i++)
5899 {
5900 int s = reload_order[i];
5901
d45cf215
RS
5902 if ((reload_in[s] == 0 && reload_out[s] == 0
5903 && ! reload_secondary_p[s])
32131a9c
RK
5904 || reload_optional[s])
5905 continue;
5906
5907 if ((reload_reg_class[s] != reload_reg_class[r]
5908 && reg_classes_intersect_p (reload_reg_class[r],
5909 reload_reg_class[s]))
5910 || reload_nregs[s] < reload_nregs[r])
5911 break;
5912 }
5913
5914 if (i == n_reloads)
5915 continue;
5916
5917 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5918#endif
5919 }
5920
5921 /* Now allocate reload registers for anything non-optional that
5922 didn't get one yet. */
5923 for (j = 0; j < n_reloads; j++)
5924 {
5925 register int r = reload_order[j];
5926
5927 /* Ignore reloads that got marked inoperative. */
5928 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5929 continue;
5930
5931 /* Skip reloads that already have a register allocated or are
0f41302f 5932 optional. */
32131a9c
RK
5933 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5934 continue;
5935
5936 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5937 break;
5938 }
5939
5940 /* If that loop got all the way, we have won. */
5941 if (j == n_reloads)
5942 break;
5943
5944 fail:
5945 /* Loop around and try without any inheritance. */
5946 /* First undo everything done by the failed attempt
5947 to allocate with inheritance. */
4c9a05bc
RK
5948 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5949 sizeof reload_reg_rtx);
5950 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5951 sizeof reload_inherited);
5952 bcopy ((char *) save_reload_inheritance_insn,
5953 (char *) reload_inheritance_insn,
32131a9c 5954 sizeof reload_inheritance_insn);
4c9a05bc 5955 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
32131a9c 5956 sizeof reload_override_in);
4c9a05bc 5957 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
32131a9c
RK
5958 sizeof reload_spill_index);
5959 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5960 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5961 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5962 save_reload_reg_used_in_op_addr);
893bc853
RK
5963 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5964 save_reload_reg_used_in_op_addr_reload);
546b63fb
RK
5965 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5966 save_reload_reg_used_in_insn);
5967 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5968 save_reload_reg_used_in_other_addr);
5969
5970 for (i = 0; i < reload_n_operands; i++)
5971 {
5972 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5973 save_reload_reg_used_in_input[i]);
5974 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5975 save_reload_reg_used_in_output[i]);
5976 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5977 save_reload_reg_used_in_input_addr[i]);
47c8cf91
ILT
5978 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
5979 save_reload_reg_used_in_inpaddr_addr[i]);
546b63fb
RK
5980 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5981 save_reload_reg_used_in_output_addr[i]);
47c8cf91
ILT
5982 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
5983 save_reload_reg_used_in_outaddr_addr[i]);
546b63fb 5984 }
32131a9c
RK
5985 }
5986
5987 /* If we thought we could inherit a reload, because it seemed that
5988 nothing else wanted the same reload register earlier in the insn,
5989 verify that assumption, now that all reloads have been assigned. */
5990
5991 for (j = 0; j < n_reloads; j++)
5992 {
5993 register int r = reload_order[j];
5994
5995 if (reload_inherited[r] && reload_reg_rtx[r] != 0
6e684430
R
5996 && ! (reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5997 reload_opnum[r],
5998 reload_when_needed[r])
5999 || reload_reg_free_for_value_p (true_regnum (reload_reg_rtx[r]),
6000 reload_opnum[r],
6001 reload_when_needed[r],
f5470689
R
6002 reload_in[r],
6003 reload_out[r], r)))
32131a9c 6004 reload_inherited[r] = 0;
029b38ff
R
6005 /* If we can inherit a RELOAD_FOR_INPUT, then we do not need its related
6006 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads.
6007 ??? This could be extended to other reload types, but these are
6008 more tricky to handle:
6009 RELOAD_FOR_OTHER_ADDRESS reloads might have been merged, so we
6010 can't eliminate them without a check that *all* references are
6011 now unused due to inheritance.
6012 While RELOAD_FOR_INPADDR_ADDRESS and RELOAD_FOR_OUTADDR_ADDRESS are
6013 not merged, we can't be sure that we have eliminated the use of
6014 that particular reload if we have seen just one
6015 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_OUTPUT_ADDRESS being inherited,
6016 since there might be multiple of the latter two reloads for a single
6017 operand.
6018 RELOAD_FOR_OPADDR_ADDR reloads for different operands are not
6019 merged, but might share the same register by courtesy of
6020 reload_reg_free_for_value_p. reload_reg_used_in_op_addr_reload
6021 does not differentiate by opnum, thus calling clear_reload_reg_in_use
6022 for one of these reloads would mark the register as free even though
6023 another RELOAD_FOR_OPADDR_ADDR reload might still use it. */
6024 else if (reload_inherited[r] && reload_when_needed[r] == RELOAD_FOR_INPUT)
6025 {
6026 for (i = 0; i < n_reloads; i++)
6027 {
6028 if ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6029 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6030 && reload_opnum[i] == reload_opnum[r]
6031 && reload_in[i] && reload_reg_rtx[i])
6032 {
6033 int regno = true_regnum (reload_reg_rtx[i]);
6034
6035 reload_in[i] = 0;
6036 if (spill_reg_order[regno] >= 0)
6037 clear_reload_reg_in_use (regno, reload_opnum[i],
6038 reload_when_needed[i],
6039 reload_mode[i]);
6040 reload_reg_rtx[i] = 0;
6041 reload_spill_index[i] = -1;
6042 remove_replacements (i);
6043 }
6044 }
6045 }
32131a9c
RK
6046
6047 /* If we found a better place to reload from,
6048 validate it in the same fashion, if it is a reload reg. */
6049 if (reload_override_in[r]
6050 && (GET_CODE (reload_override_in[r]) == REG
6051 || GET_CODE (reload_override_in[r]) == SUBREG))
6052 {
6053 int regno = true_regnum (reload_override_in[r]);
6054 if (spill_reg_order[regno] >= 0
546b63fb
RK
6055 && ! reload_reg_free_before_p (regno, reload_opnum[r],
6056 reload_when_needed[r]))
32131a9c
RK
6057 reload_override_in[r] = 0;
6058 }
6059 }
6060
6061 /* Now that reload_override_in is known valid,
6062 actually override reload_in. */
6063 for (j = 0; j < n_reloads; j++)
6064 if (reload_override_in[j])
6065 reload_in[j] = reload_override_in[j];
6066
6067 /* If this reload won't be done because it has been cancelled or is
6068 optional and not inherited, clear reload_reg_rtx so other
6069 routines (such as subst_reloads) don't get confused. */
6070 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
6071 if (reload_reg_rtx[j] != 0
6072 && ((reload_optional[j] && ! reload_inherited[j])
6073 || (reload_in[j] == 0 && reload_out[j] == 0
6074 && ! reload_secondary_p[j])))
6075 {
6076 int regno = true_regnum (reload_reg_rtx[j]);
6077
6078 if (spill_reg_order[regno] >= 0)
6079 clear_reload_reg_in_use (regno, reload_opnum[j],
6080 reload_when_needed[j], reload_mode[j]);
6081 reload_reg_rtx[j] = 0;
6082 }
32131a9c
RK
6083
6084 /* Record which pseudos and which spill regs have output reloads. */
6085 for (j = 0; j < n_reloads; j++)
6086 {
6087 register int r = reload_order[j];
6088
6089 i = reload_spill_index[r];
6090
e6e52be0 6091 /* I is nonneg if this reload uses a register.
32131a9c
RK
6092 If reload_reg_rtx[r] is 0, this is an optional reload
6093 that we opted to ignore. */
6094 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
6095 && reload_reg_rtx[r] != 0)
6096 {
6097 register int nregno = REGNO (reload_out[r]);
372e033b
RS
6098 int nr = 1;
6099
6100 if (nregno < FIRST_PSEUDO_REGISTER)
6101 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
6102
6103 while (--nr >= 0)
372e033b
RS
6104 reg_has_output_reload[nregno + nr] = 1;
6105
6106 if (i >= 0)
32131a9c 6107 {
e6e52be0 6108 nr = HARD_REGNO_NREGS (i, reload_mode[r]);
372e033b 6109 while (--nr >= 0)
e6e52be0 6110 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
32131a9c
RK
6111 }
6112
6113 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
6114 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
6115 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
6116 abort ();
6117 }
6118 }
6119}
6120\f
e9a25f70 6121/* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
546b63fb
RK
6122 reloads of the same item for fear that we might not have enough reload
6123 registers. However, normally they will get the same reload register
6124 and hence actually need not be loaded twice.
6125
6126 Here we check for the most common case of this phenomenon: when we have
6127 a number of reloads for the same object, each of which were allocated
6128 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6129 reload, and is not modified in the insn itself. If we find such,
6130 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6131 This will not increase the number of spill registers needed and will
6132 prevent redundant code. */
6133
546b63fb
RK
6134static void
6135merge_assigned_reloads (insn)
6136 rtx insn;
6137{
6138 int i, j;
6139
6140 /* Scan all the reloads looking for ones that only load values and
6141 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6142 assigned and not modified by INSN. */
6143
6144 for (i = 0; i < n_reloads; i++)
6145 {
d668e863
R
6146 int conflicting_input = 0;
6147 int max_input_address_opnum = -1;
6148 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6149
546b63fb
RK
6150 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
6151 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
6152 || reg_set_p (reload_reg_rtx[i], insn))
6153 continue;
6154
6155 /* Look at all other reloads. Ensure that the only use of this
6156 reload_reg_rtx is in a reload that just loads the same value
6157 as we do. Note that any secondary reloads must be of the identical
6158 class since the values, modes, and result registers are the
6159 same, so we need not do anything with any secondary reloads. */
6160
6161 for (j = 0; j < n_reloads; j++)
6162 {
6163 if (i == j || reload_reg_rtx[j] == 0
6164 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
6165 reload_reg_rtx[i]))
6166 continue;
6167
d668e863
R
6168 if (reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6169 && reload_opnum[j] > max_input_address_opnum)
6170 max_input_address_opnum = reload_opnum[j];
6171
546b63fb 6172 /* If the reload regs aren't exactly the same (e.g, different modes)
d668e863
R
6173 or if the values are different, we can't merge this reload.
6174 But if it is an input reload, we might still merge
6175 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
546b63fb
RK
6176
6177 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6178 || reload_out[j] != 0 || reload_in[j] == 0
6179 || ! rtx_equal_p (reload_in[i], reload_in[j]))
d668e863
R
6180 {
6181 if (reload_when_needed[j] != RELOAD_FOR_INPUT
6182 || ((reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
6183 || reload_opnum[i] > reload_opnum[j])
6184 && reload_when_needed[i] != RELOAD_FOR_OTHER_ADDRESS))
6185 break;
6186 conflicting_input = 1;
6187 if (min_conflicting_input_opnum > reload_opnum[j])
6188 min_conflicting_input_opnum = reload_opnum[j];
6189 }
546b63fb
RK
6190 }
6191
6192 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6193 we, in fact, found any matching reloads. */
6194
d668e863
R
6195 if (j == n_reloads
6196 && max_input_address_opnum <= min_conflicting_input_opnum)
546b63fb
RK
6197 {
6198 for (j = 0; j < n_reloads; j++)
6199 if (i != j && reload_reg_rtx[j] != 0
d668e863
R
6200 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6201 && (! conflicting_input
6202 || reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6203 || reload_when_needed[j] == RELOAD_FOR_OTHER_ADDRESS))
546b63fb
RK
6204 {
6205 reload_when_needed[i] = RELOAD_OTHER;
6206 reload_in[j] = 0;
efdb3590 6207 reload_spill_index[j] = -1;
546b63fb
RK
6208 transfer_replacements (i, j);
6209 }
6210
6211 /* If this is now RELOAD_OTHER, look for any reloads that load
6212 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6213 if they were for inputs, RELOAD_OTHER for outputs. Note that
6214 this test is equivalent to looking for reloads for this operand
6215 number. */
6216
6217 if (reload_when_needed[i] == RELOAD_OTHER)
6218 for (j = 0; j < n_reloads; j++)
6219 if (reload_in[j] != 0
6220 && reload_when_needed[i] != RELOAD_OTHER
6221 && reg_overlap_mentioned_for_reload_p (reload_in[j],
6222 reload_in[i]))
6223 reload_when_needed[j]
47c8cf91
ILT
6224 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6225 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6226 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
546b63fb
RK
6227 }
6228 }
6229}
e9a25f70 6230
546b63fb 6231\f
32131a9c
RK
6232/* Output insns to reload values in and out of the chosen reload regs. */
6233
6234static void
6235emit_reload_insns (insn)
6236 rtx insn;
6237{
6238 register int j;
546b63fb
RK
6239 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6240 rtx other_input_address_reload_insns = 0;
6241 rtx other_input_reload_insns = 0;
6242 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
47c8cf91 6243 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
546b63fb
RK
6244 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6245 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
47c8cf91 6246 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
546b63fb 6247 rtx operand_reload_insns = 0;
893bc853 6248 rtx other_operand_reload_insns = 0;
befa01b9 6249 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
32131a9c 6250 rtx following_insn = NEXT_INSN (insn);
a8efe40d 6251 rtx before_insn = insn;
32131a9c
RK
6252 int special;
6253 /* Values to be put in spill_reg_store are put here first. */
6254 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
e6e52be0
R
6255 HARD_REG_SET reg_reloaded_died;
6256
6257 CLEAR_HARD_REG_SET (reg_reloaded_died);
32131a9c 6258
546b63fb
RK
6259 for (j = 0; j < reload_n_operands; j++)
6260 input_reload_insns[j] = input_address_reload_insns[j]
47c8cf91 6261 = inpaddr_address_reload_insns[j]
befa01b9 6262 = output_reload_insns[j] = output_address_reload_insns[j]
47c8cf91 6263 = outaddr_address_reload_insns[j]
befa01b9 6264 = other_output_reload_insns[j] = 0;
546b63fb 6265
32131a9c
RK
6266 /* Now output the instructions to copy the data into and out of the
6267 reload registers. Do these in the order that the reloads were reported,
6268 since reloads of base and index registers precede reloads of operands
6269 and the operands may need the base and index registers reloaded. */
6270
6271 for (j = 0; j < n_reloads; j++)
6272 {
6273 register rtx old;
6274 rtx oldequiv_reg = 0;
80d92002 6275 rtx this_reload_insn = 0;
b60a8416 6276 int expect_occurrences = 1;
73b2ad9e
RK
6277
6278 if (reload_spill_index[j] >= 0)
6279 new_spill_reg_store[reload_spill_index[j]] = 0;
32131a9c
RK
6280
6281 old = reload_in[j];
6282 if (old != 0 && ! reload_inherited[j]
6283 && ! rtx_equal_p (reload_reg_rtx[j], old)
6284 && reload_reg_rtx[j] != 0)
6285 {
6286 register rtx reloadreg = reload_reg_rtx[j];
6287 rtx oldequiv = 0;
6288 enum machine_mode mode;
546b63fb 6289 rtx *where;
32131a9c
RK
6290
6291 /* Determine the mode to reload in.
6292 This is very tricky because we have three to choose from.
6293 There is the mode the insn operand wants (reload_inmode[J]).
6294 There is the mode of the reload register RELOADREG.
6295 There is the intrinsic mode of the operand, which we could find
6296 by stripping some SUBREGs.
6297 It turns out that RELOADREG's mode is irrelevant:
6298 we can change that arbitrarily.
6299
6300 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6301 then the reload reg may not support QImode moves, so use SImode.
6302 If foo is in memory due to spilling a pseudo reg, this is safe,
6303 because the QImode value is in the least significant part of a
6304 slot big enough for a SImode. If foo is some other sort of
6305 memory reference, then it is impossible to reload this case,
6306 so previous passes had better make sure this never happens.
6307
6308 Then consider a one-word union which has SImode and one of its
6309 members is a float, being fetched as (SUBREG:SF union:SI).
6310 We must fetch that as SFmode because we could be loading into
6311 a float-only register. In this case OLD's mode is correct.
6312
6313 Consider an immediate integer: it has VOIDmode. Here we need
6314 to get a mode from something else.
6315
6316 In some cases, there is a fourth mode, the operand's
6317 containing mode. If the insn specifies a containing mode for
6318 this operand, it overrides all others.
6319
6320 I am not sure whether the algorithm here is always right,
6321 but it does the right things in those cases. */
6322
6323 mode = GET_MODE (old);
6324 if (mode == VOIDmode)
6325 mode = reload_inmode[j];
32131a9c
RK
6326
6327#ifdef SECONDARY_INPUT_RELOAD_CLASS
6328 /* If we need a secondary register for this operation, see if
6329 the value is already in a register in that class. Don't
6330 do this if the secondary register will be used as a scratch
6331 register. */
6332
b80bba27
RK
6333 if (reload_secondary_in_reload[j] >= 0
6334 && reload_secondary_in_icode[j] == CODE_FOR_nothing
58b1581b 6335 && optimize)
32131a9c
RK
6336 oldequiv
6337 = find_equiv_reg (old, insn,
b80bba27 6338 reload_reg_class[reload_secondary_in_reload[j]],
fb3821f7 6339 -1, NULL_PTR, 0, mode);
32131a9c
RK
6340#endif
6341
6342 /* If reloading from memory, see if there is a register
6343 that already holds the same value. If so, reload from there.
6344 We can pass 0 as the reload_reg_p argument because
6345 any other reload has either already been emitted,
6346 in which case find_equiv_reg will see the reload-insn,
6347 or has yet to be emitted, in which case it doesn't matter
6348 because we will use this equiv reg right away. */
6349
58b1581b 6350 if (oldequiv == 0 && optimize
32131a9c
RK
6351 && (GET_CODE (old) == MEM
6352 || (GET_CODE (old) == REG
6353 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6354 && reg_renumber[REGNO (old)] < 0)))
546b63fb 6355 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 6356 -1, NULL_PTR, 0, mode);
32131a9c
RK
6357
6358 if (oldequiv)
6359 {
6360 int regno = true_regnum (oldequiv);
6361
6362 /* If OLDEQUIV is a spill register, don't use it for this
6363 if any other reload needs it at an earlier stage of this insn
a8fdc208 6364 or at this stage. */
32131a9c 6365 if (spill_reg_order[regno] >= 0
546b63fb
RK
6366 && (! reload_reg_free_p (regno, reload_opnum[j],
6367 reload_when_needed[j])
6368 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
6369 reload_when_needed[j])))
6370 oldequiv = 0;
6371
6372 /* If OLDEQUIV is not a spill register,
6373 don't use it if any other reload wants it. */
6374 if (spill_reg_order[regno] < 0)
6375 {
6376 int k;
6377 for (k = 0; k < n_reloads; k++)
6378 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
6379 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6380 oldequiv))
32131a9c
RK
6381 {
6382 oldequiv = 0;
6383 break;
6384 }
6385 }
546b63fb
RK
6386
6387 /* If it is no cheaper to copy from OLDEQUIV into the
6388 reload register than it would be to move from memory,
6389 don't use it. Likewise, if we need a secondary register
6390 or memory. */
6391
6392 if (oldequiv != 0
6393 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6394 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6395 reload_reg_class[j])
370b1b83 6396 >= MEMORY_MOVE_COST (mode, reload_reg_class[j], 1)))
546b63fb
RK
6397#ifdef SECONDARY_INPUT_RELOAD_CLASS
6398 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6399 mode, oldequiv)
6400 != NO_REGS)
6401#endif
6402#ifdef SECONDARY_MEMORY_NEEDED
370b1b83
R
6403 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6404 reload_reg_class[j],
546b63fb
RK
6405 mode)
6406#endif
6407 ))
6408 oldequiv = 0;
32131a9c
RK
6409 }
6410
6411 if (oldequiv == 0)
6412 oldequiv = old;
6413 else if (GET_CODE (oldequiv) == REG)
6414 oldequiv_reg = oldequiv;
6415 else if (GET_CODE (oldequiv) == SUBREG)
6416 oldequiv_reg = SUBREG_REG (oldequiv);
6417
76182796
RK
6418 /* If we are reloading from a register that was recently stored in
6419 with an output-reload, see if we can prove there was
6420 actually no need to store the old value in it. */
6421
6422 if (optimize && GET_CODE (oldequiv) == REG
6423 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
e6e52be0
R
6424 && spill_reg_store[REGNO (oldequiv)]
6425 && GET_CODE (old) == REG && dead_or_set_p (insn, old)
76182796 6426 /* This is unsafe if operand occurs more than once in current
b87b7ecd 6427 insn. Perhaps some occurrences weren't reloaded. */
e6e52be0
R
6428 && count_occurrences (PATTERN (insn), old) == 1)
6429 delete_output_reload (insn, j, spill_reg_store[REGNO (oldequiv)]);
76182796 6430
32131a9c 6431 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
6432 then load RELOADREG from OLDEQUIV. Note that we cannot use
6433 gen_lowpart_common since it can do the wrong thing when
6434 RELOADREG has a multi-word mode. Note that RELOADREG
6435 must always be a REG here. */
32131a9c
RK
6436
6437 if (GET_MODE (reloadreg) != mode)
38a448ca 6438 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
32131a9c
RK
6439 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6440 oldequiv = SUBREG_REG (oldequiv);
6441 if (GET_MODE (oldequiv) != VOIDmode
6442 && mode != GET_MODE (oldequiv))
38a448ca 6443 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
32131a9c 6444
546b63fb 6445 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
6446 switch (reload_when_needed[j])
6447 {
32131a9c 6448 case RELOAD_OTHER:
546b63fb
RK
6449 where = &other_input_reload_insns;
6450 break;
6451 case RELOAD_FOR_INPUT:
6452 where = &input_reload_insns[reload_opnum[j]];
32131a9c 6453 break;
546b63fb
RK
6454 case RELOAD_FOR_INPUT_ADDRESS:
6455 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 6456 break;
47c8cf91
ILT
6457 case RELOAD_FOR_INPADDR_ADDRESS:
6458 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6459 break;
546b63fb
RK
6460 case RELOAD_FOR_OUTPUT_ADDRESS:
6461 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c 6462 break;
47c8cf91
ILT
6463 case RELOAD_FOR_OUTADDR_ADDRESS:
6464 where = &outaddr_address_reload_insns[reload_opnum[j]];
6465 break;
32131a9c 6466 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
6467 where = &operand_reload_insns;
6468 break;
893bc853
RK
6469 case RELOAD_FOR_OPADDR_ADDR:
6470 where = &other_operand_reload_insns;
6471 break;
546b63fb
RK
6472 case RELOAD_FOR_OTHER_ADDRESS:
6473 where = &other_input_address_reload_insns;
6474 break;
6475 default:
6476 abort ();
32131a9c
RK
6477 }
6478
546b63fb 6479 push_to_sequence (*where);
32131a9c
RK
6480 special = 0;
6481
6482 /* Auto-increment addresses must be reloaded in a special way. */
6483 if (GET_CODE (oldequiv) == POST_INC
6484 || GET_CODE (oldequiv) == POST_DEC
6485 || GET_CODE (oldequiv) == PRE_INC
6486 || GET_CODE (oldequiv) == PRE_DEC)
6487 {
6488 /* We are not going to bother supporting the case where a
6489 incremented register can't be copied directly from
6490 OLDEQUIV since this seems highly unlikely. */
b80bba27 6491 if (reload_secondary_in_reload[j] >= 0)
32131a9c
RK
6492 abort ();
6493 /* Prevent normal processing of this reload. */
6494 special = 1;
6495 /* Output a special code sequence for this case. */
546b63fb 6496 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
6497 }
6498
6499 /* If we are reloading a pseudo-register that was set by the previous
6500 insn, see if we can get rid of that pseudo-register entirely
6501 by redirecting the previous insn into our reload register. */
6502
6503 else if (optimize && GET_CODE (old) == REG
6504 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6505 && dead_or_set_p (insn, old)
6506 /* This is unsafe if some other reload
6507 uses the same reg first. */
546b63fb
RK
6508 && reload_reg_free_before_p (REGNO (reloadreg),
6509 reload_opnum[j],
6510 reload_when_needed[j]))
32131a9c
RK
6511 {
6512 rtx temp = PREV_INSN (insn);
6513 while (temp && GET_CODE (temp) == NOTE)
6514 temp = PREV_INSN (temp);
6515 if (temp
6516 && GET_CODE (temp) == INSN
6517 && GET_CODE (PATTERN (temp)) == SET
6518 && SET_DEST (PATTERN (temp)) == old
6519 /* Make sure we can access insn_operand_constraint. */
6520 && asm_noperands (PATTERN (temp)) < 0
6521 /* This is unsafe if prev insn rejects our reload reg. */
6522 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6523 reloadreg)
6524 /* This is unsafe if operand occurs more than once in current
6525 insn. Perhaps some occurrences aren't reloaded. */
6526 && count_occurrences (PATTERN (insn), old) == 1
6527 /* Don't risk splitting a matching pair of operands. */
6528 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6529 {
6530 /* Store into the reload register instead of the pseudo. */
6531 SET_DEST (PATTERN (temp)) = reloadreg;
6532 /* If these are the only uses of the pseudo reg,
6533 pretend for GDB it lives in the reload reg we used. */
b1f21e0a
MM
6534 if (REG_N_DEATHS (REGNO (old)) == 1
6535 && REG_N_SETS (REGNO (old)) == 1)
32131a9c
RK
6536 {
6537 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6538 alter_reg (REGNO (old), -1);
6539 }
6540 special = 1;
6541 }
6542 }
6543
546b63fb
RK
6544 /* We can't do that, so output an insn to load RELOADREG. */
6545
32131a9c
RK
6546 if (! special)
6547 {
6548#ifdef SECONDARY_INPUT_RELOAD_CLASS
6549 rtx second_reload_reg = 0;
6550 enum insn_code icode;
6551
6552 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
6553 and icode, if any. If OLDEQUIV and OLD are different or
6554 if this is an in-out reload, recompute whether or not we
6555 still need a secondary register and what the icode should
6556 be. If we still need a secondary register and the class or
6557 icode is different, go back to reloading from OLD if using
6558 OLDEQUIV means that we got the wrong type of register. We
6559 cannot have different class or icode due to an in-out reload
6560 because we don't make such reloads when both the input and
6561 output need secondary reload registers. */
32131a9c 6562
b80bba27 6563 if (reload_secondary_in_reload[j] >= 0)
32131a9c 6564 {
b80bba27 6565 int secondary_reload = reload_secondary_in_reload[j];
1554c2c6
RK
6566 rtx real_oldequiv = oldequiv;
6567 rtx real_old = old;
6568
6569 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6570 and similarly for OLD.
b80bba27 6571 See comments in get_secondary_reload in reload.c. */
1554c2c6
RK
6572 if (GET_CODE (oldequiv) == REG
6573 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6574 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6575 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6576
6577 if (GET_CODE (old) == REG
6578 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6579 && reg_equiv_mem[REGNO (old)] != 0)
6580 real_old = reg_equiv_mem[REGNO (old)];
6581
32131a9c 6582 second_reload_reg = reload_reg_rtx[secondary_reload];
b80bba27 6583 icode = reload_secondary_in_icode[j];
32131a9c 6584
d445b551
RK
6585 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6586 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
6587 {
6588 enum reg_class new_class
6589 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 6590 mode, real_oldequiv);
32131a9c
RK
6591
6592 if (new_class == NO_REGS)
6593 second_reload_reg = 0;
6594 else
6595 {
6596 enum insn_code new_icode;
6597 enum machine_mode new_mode;
6598
6599 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6600 REGNO (second_reload_reg)))
1554c2c6 6601 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6602 else
6603 {
6604 new_icode = reload_in_optab[(int) mode];
6605 if (new_icode != CODE_FOR_nothing
6606 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 6607 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 6608 (reloadreg, mode)))
a8fdc208
RS
6609 || (insn_operand_predicate[(int) new_icode][1]
6610 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 6611 (real_oldequiv, mode)))))
32131a9c
RK
6612 new_icode = CODE_FOR_nothing;
6613
6614 if (new_icode == CODE_FOR_nothing)
6615 new_mode = mode;
6616 else
196ddf8a 6617 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
6618
6619 if (GET_MODE (second_reload_reg) != new_mode)
6620 {
6621 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6622 new_mode))
1554c2c6 6623 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6624 else
6625 second_reload_reg
38a448ca
RH
6626 = gen_rtx_REG (new_mode,
6627 REGNO (second_reload_reg));
32131a9c
RK
6628 }
6629 }
6630 }
6631 }
6632
6633 /* If we still need a secondary reload register, check
6634 to see if it is being used as a scratch or intermediate
1554c2c6
RK
6635 register and generate code appropriately. If we need
6636 a scratch register, use REAL_OLDEQUIV since the form of
6637 the insn may depend on the actual address if it is
6638 a MEM. */
32131a9c
RK
6639
6640 if (second_reload_reg)
6641 {
6642 if (icode != CODE_FOR_nothing)
6643 {
5e03c156
RK
6644 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6645 second_reload_reg));
32131a9c
RK
6646 special = 1;
6647 }
6648 else
6649 {
6650 /* See if we need a scratch register to load the
6651 intermediate register (a tertiary reload). */
6652 enum insn_code tertiary_icode
b80bba27 6653 = reload_secondary_in_icode[secondary_reload];
32131a9c
RK
6654
6655 if (tertiary_icode != CODE_FOR_nothing)
6656 {
6657 rtx third_reload_reg
b80bba27 6658 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
32131a9c 6659
546b63fb
RK
6660 emit_insn ((GEN_FCN (tertiary_icode)
6661 (second_reload_reg, real_oldequiv,
6662 third_reload_reg)));
32131a9c
RK
6663 }
6664 else
5e03c156
RK
6665 gen_reload (second_reload_reg, oldequiv,
6666 reload_opnum[j],
6667 reload_when_needed[j]);
546b63fb
RK
6668
6669 oldequiv = second_reload_reg;
32131a9c
RK
6670 }
6671 }
6672 }
6673#endif
6674
2d182c6f 6675 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
5e03c156
RK
6676 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6677 reload_when_needed[j]);
32131a9c
RK
6678
6679#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6680 /* We may have to make a REG_DEAD note for the secondary reload
6681 register in the insns we just made. Find the last insn that
6682 mentioned the register. */
6683 if (! special && second_reload_reg
6684 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6685 {
6686 rtx prev;
6687
546b63fb 6688 for (prev = get_last_insn (); prev;
32131a9c
RK
6689 prev = PREV_INSN (prev))
6690 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
6691 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6692 PATTERN (prev)))
32131a9c 6693 {
38a448ca
RH
6694 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_DEAD,
6695 second_reload_reg,
6696 REG_NOTES (prev));
32131a9c
RK
6697 break;
6698 }
6699 }
6700#endif
6701 }
6702
80d92002 6703 this_reload_insn = get_last_insn ();
546b63fb
RK
6704 /* End this sequence. */
6705 *where = get_insns ();
6706 end_sequence ();
32131a9c
RK
6707 }
6708
b60a8416
R
6709 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6710 e.g. inheriting a SImode output reload for
6711 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6712 if (optimize && reload_inherited[j] && reload_in[j]
6713 && GET_CODE (reload_in[j]) == MEM
6714 && reload_spill_index[j] >= 0
6715 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6716 {
6717 expect_occurrences
6718 = count_occurrences (PATTERN (insn), reload_in[j]) == 1 ? 0 : -1;
6719 reload_in[j]
6720 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6721 }
32131a9c
RK
6722 /* Add a note saying the input reload reg
6723 dies in this insn, if anyone cares. */
6724#ifdef PRESERVE_DEATH_INFO_REGNO_P
6725 if (old != 0
6726 && reload_reg_rtx[j] != old
6727 && reload_reg_rtx[j] != 0
6728 && reload_out[j] == 0
6729 && ! reload_inherited[j]
6730 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6731 {
6732 register rtx reloadreg = reload_reg_rtx[j];
6733
a8fdc208 6734#if 0
32131a9c
RK
6735 /* We can't abort here because we need to support this for sched.c.
6736 It's not terrible to miss a REG_DEAD note, but we should try
6737 to figure out how to do this correctly. */
6738 /* The code below is incorrect for address-only reloads. */
6739 if (reload_when_needed[j] != RELOAD_OTHER
6740 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6741 abort ();
6742#endif
6743
6744 /* Add a death note to this insn, for an input reload. */
6745
6746 if ((reload_when_needed[j] == RELOAD_OTHER
6747 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6748 && ! dead_or_set_p (insn, reloadreg))
6749 REG_NOTES (insn)
38a448ca
RH
6750 = gen_rtx_EXPR_LIST (REG_DEAD,
6751 reloadreg, REG_NOTES (insn));
32131a9c
RK
6752 }
6753
6754 /* When we inherit a reload, the last marked death of the reload reg
6755 may no longer really be a death. */
6756 if (reload_reg_rtx[j] != 0
6757 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6758 && reload_inherited[j])
6759 {
6760 /* Handle inheriting an output reload.
6761 Remove the death note from the output reload insn. */
6762 if (reload_spill_index[j] >= 0
6763 && GET_CODE (reload_in[j]) == REG
6764 && spill_reg_store[reload_spill_index[j]] != 0
6765 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6766 REG_DEAD, REGNO (reload_reg_rtx[j])))
6767 remove_death (REGNO (reload_reg_rtx[j]),
6768 spill_reg_store[reload_spill_index[j]]);
6769 /* Likewise for input reloads that were inherited. */
6770 else if (reload_spill_index[j] >= 0
6771 && GET_CODE (reload_in[j]) == REG
6772 && spill_reg_store[reload_spill_index[j]] == 0
6773 && reload_inheritance_insn[j] != 0
a8fdc208 6774 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
6775 REGNO (reload_reg_rtx[j])))
6776 remove_death (REGNO (reload_reg_rtx[j]),
6777 reload_inheritance_insn[j]);
6778 else
6779 {
6780 rtx prev;
6781
6782 /* We got this register from find_equiv_reg.
6783 Search back for its last death note and get rid of it.
6784 But don't search back too far.
6785 Don't go past a place where this reg is set,
6786 since a death note before that remains valid. */
6787 for (prev = PREV_INSN (insn);
6788 prev && GET_CODE (prev) != CODE_LABEL;
6789 prev = PREV_INSN (prev))
6790 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6791 && dead_or_set_p (prev, reload_reg_rtx[j]))
6792 {
6793 if (find_regno_note (prev, REG_DEAD,
6794 REGNO (reload_reg_rtx[j])))
6795 remove_death (REGNO (reload_reg_rtx[j]), prev);
6796 break;
6797 }
6798 }
6799 }
6800
6801 /* We might have used find_equiv_reg above to choose an alternate
6802 place from which to reload. If so, and it died, we need to remove
6803 that death and move it to one of the insns we just made. */
6804
6805 if (oldequiv_reg != 0
6806 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6807 {
6808 rtx prev, prev1;
6809
6810 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6811 prev = PREV_INSN (prev))
6812 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6813 && dead_or_set_p (prev, oldequiv_reg))
6814 {
6815 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6816 {
6817 for (prev1 = this_reload_insn;
6818 prev1; prev1 = PREV_INSN (prev1))
6819 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
6820 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6821 PATTERN (prev1)))
32131a9c 6822 {
38a448ca
RH
6823 REG_NOTES (prev1) = gen_rtx_EXPR_LIST (REG_DEAD,
6824 oldequiv_reg,
6825 REG_NOTES (prev1));
32131a9c
RK
6826 break;
6827 }
6828 remove_death (REGNO (oldequiv_reg), prev);
6829 }
6830 break;
6831 }
6832 }
6833#endif
6834
6835 /* If we are reloading a register that was recently stored in with an
6836 output-reload, see if we can prove there was
6837 actually no need to store the old value in it. */
6838
6839 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 6840 && reload_in[j] != 0
32131a9c
RK
6841 && GET_CODE (reload_in[j]) == REG
6842#if 0
6843 /* There doesn't seem to be any reason to restrict this to pseudos
6844 and doing so loses in the case where we are copying from a
6845 register of the wrong class. */
6846 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6847#endif
6848 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb 6849 /* This is unsafe if some other reload uses the same reg first. */
e6e52be0 6850 && reload_reg_free_before_p (reload_spill_index[j],
546b63fb 6851 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
6852 && dead_or_set_p (insn, reload_in[j])
6853 /* This is unsafe if operand occurs more than once in current
6854 insn. Perhaps some occurrences weren't reloaded. */
b60a8416
R
6855 && (count_occurrences (PATTERN (insn), reload_in[j])
6856 == expect_occurrences))
32131a9c
RK
6857 delete_output_reload (insn, j,
6858 spill_reg_store[reload_spill_index[j]]);
6859
6860 /* Input-reloading is done. Now do output-reloading,
6861 storing the value from the reload-register after the main insn
6862 if reload_out[j] is nonzero.
6863
6864 ??? At some point we need to support handling output reloads of
6865 JUMP_INSNs or insns that set cc0. */
6866 old = reload_out[j];
6867 if (old != 0
6868 && reload_reg_rtx[j] != old
6869 && reload_reg_rtx[j] != 0)
6870 {
6871 register rtx reloadreg = reload_reg_rtx[j];
29a82058 6872#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
32131a9c 6873 register rtx second_reloadreg = 0;
29a82058 6874#endif
32131a9c
RK
6875 rtx note, p;
6876 enum machine_mode mode;
6877 int special = 0;
6878
6879 /* An output operand that dies right away does need a reload,
6880 but need not be copied from it. Show the new location in the
6881 REG_UNUSED note. */
6882 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6883 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6884 {
6885 XEXP (note, 0) = reload_reg_rtx[j];
6886 continue;
6887 }
a7911cd2
RK
6888 /* Likewise for a SUBREG of an operand that dies. */
6889 else if (GET_CODE (old) == SUBREG
6890 && GET_CODE (SUBREG_REG (old)) == REG
6891 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6892 SUBREG_REG (old))))
6893 {
6894 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6895 reload_reg_rtx[j]);
6896 continue;
6897 }
32131a9c
RK
6898 else if (GET_CODE (old) == SCRATCH)
6899 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6900 but we don't want to make an output reload. */
6901 continue;
6902
6903#if 0
6904 /* Strip off of OLD any size-increasing SUBREGs such as
6905 (SUBREG:SI foo:QI 0). */
6906
6907 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6908 && (GET_MODE_SIZE (GET_MODE (old))
6909 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6910 old = SUBREG_REG (old);
6911#endif
6912
6913 /* If is a JUMP_INSN, we can't support output reloads yet. */
6914 if (GET_CODE (insn) == JUMP_INSN)
6915 abort ();
6916
d7e0324f 6917 if (reload_when_needed[j] == RELOAD_OTHER)
5ca582cf 6918 start_sequence ();
d7e0324f
RK
6919 else
6920 push_to_sequence (output_reload_insns[reload_opnum[j]]);
546b63fb 6921
32131a9c
RK
6922 /* Determine the mode to reload in.
6923 See comments above (for input reloading). */
6924
6925 mode = GET_MODE (old);
6926 if (mode == VOIDmode)
79a365a7
RS
6927 {
6928 /* VOIDmode should never happen for an output. */
6929 if (asm_noperands (PATTERN (insn)) < 0)
6930 /* It's the compiler's fault. */
a89b2cc4 6931 fatal_insn ("VOIDmode on an output", insn);
79a365a7
RS
6932 error_for_asm (insn, "output operand is constant in `asm'");
6933 /* Prevent crash--use something we know is valid. */
6934 mode = word_mode;
38a448ca 6935 old = gen_rtx_REG (mode, REGNO (reloadreg));
79a365a7 6936 }
32131a9c 6937
32131a9c 6938 if (GET_MODE (reloadreg) != mode)
38a448ca 6939 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
32131a9c
RK
6940
6941#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6942
6943 /* If we need two reload regs, set RELOADREG to the intermediate
5e03c156 6944 one, since it will be stored into OLD. We might need a secondary
32131a9c
RK
6945 register only for an input reload, so check again here. */
6946
b80bba27 6947 if (reload_secondary_out_reload[j] >= 0)
32131a9c 6948 {
1554c2c6 6949 rtx real_old = old;
32131a9c 6950
1554c2c6
RK
6951 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6952 && reg_equiv_mem[REGNO (old)] != 0)
6953 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6954
1554c2c6
RK
6955 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6956 mode, real_old)
6957 != NO_REGS))
6958 {
6959 second_reloadreg = reloadreg;
b80bba27 6960 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
32131a9c 6961
1554c2c6
RK
6962 /* See if RELOADREG is to be used as a scratch register
6963 or as an intermediate register. */
b80bba27 6964 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
32131a9c 6965 {
b80bba27 6966 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
546b63fb 6967 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6968 special = 1;
32131a9c
RK
6969 }
6970 else
1554c2c6
RK
6971 {
6972 /* See if we need both a scratch and intermediate reload
6973 register. */
5e03c156 6974
b80bba27 6975 int secondary_reload = reload_secondary_out_reload[j];
1554c2c6 6976 enum insn_code tertiary_icode
b80bba27 6977 = reload_secondary_out_icode[secondary_reload];
32131a9c 6978
1554c2c6 6979 if (GET_MODE (reloadreg) != mode)
38a448ca 6980 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
1554c2c6
RK
6981
6982 if (tertiary_icode != CODE_FOR_nothing)
6983 {
6984 rtx third_reloadreg
b80bba27 6985 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
a7911cd2 6986 rtx tem;
5e03c156
RK
6987
6988 /* Copy primary reload reg to secondary reload reg.
6989 (Note that these have been swapped above, then
6990 secondary reload reg to OLD using our insn. */
6991
a7911cd2
RK
6992 /* If REAL_OLD is a paradoxical SUBREG, remove it
6993 and try to put the opposite SUBREG on
6994 RELOADREG. */
6995 if (GET_CODE (real_old) == SUBREG
6996 && (GET_MODE_SIZE (GET_MODE (real_old))
6997 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6998 && 0 != (tem = gen_lowpart_common
6999 (GET_MODE (SUBREG_REG (real_old)),
7000 reloadreg)))
7001 real_old = SUBREG_REG (real_old), reloadreg = tem;
7002
5e03c156
RK
7003 gen_reload (reloadreg, second_reloadreg,
7004 reload_opnum[j], reload_when_needed[j]);
7005 emit_insn ((GEN_FCN (tertiary_icode)
7006 (real_old, reloadreg, third_reloadreg)));
7007 special = 1;
9ad5f9f6 7008 }
5e03c156 7009
1554c2c6 7010 else
5e03c156
RK
7011 /* Copy between the reload regs here and then to
7012 OUT later. */
1554c2c6 7013
5e03c156
RK
7014 gen_reload (reloadreg, second_reloadreg,
7015 reload_opnum[j], reload_when_needed[j]);
1554c2c6 7016 }
32131a9c
RK
7017 }
7018 }
7019#endif
7020
7021 /* Output the last reload insn. */
7022 if (! special)
d7c2e385
L
7023 {
7024 rtx set;
7025
7026 /* Don't output the last reload if OLD is not the dest of
7027 INSN and is in the src and is clobbered by INSN. */
7028 if (! flag_expensive_optimizations
7029 || GET_CODE (old) != REG
7030 || !(set = single_set (insn))
7031 || rtx_equal_p (old, SET_DEST (set))
7032 || !reg_mentioned_p (old, SET_SRC (set))
7033 || !regno_clobbered_p (REGNO (old), insn))
7034 gen_reload (old, reloadreg, reload_opnum[j],
7035 reload_when_needed[j]);
7036 }
32131a9c
RK
7037
7038#ifdef PRESERVE_DEATH_INFO_REGNO_P
7039 /* If final will look at death notes for this reg,
7040 put one on the last output-reload insn to use it. Similarly
7041 for any secondary register. */
7042 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 7043 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 7044 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
7045 && reg_overlap_mentioned_for_reload_p (reloadreg,
7046 PATTERN (p)))
38a448ca
RH
7047 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
7048 reloadreg, REG_NOTES (p));
32131a9c
RK
7049
7050#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
32051ff5 7051 if (! special && second_reloadreg
32131a9c 7052 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 7053 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 7054 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
7055 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
7056 PATTERN (p)))
38a448ca
RH
7057 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
7058 second_reloadreg,
7059 REG_NOTES (p));
32131a9c
RK
7060#endif
7061#endif
7062 /* Look at all insns we emitted, just to be safe. */
546b63fb 7063 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
7064 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
7065 {
e6e52be0
R
7066 rtx pat = PATTERN (p);
7067
32131a9c
RK
7068 /* If this output reload doesn't come from a spill reg,
7069 clear any memory of reloaded copies of the pseudo reg.
7070 If this output reload comes from a spill reg,
7071 reg_has_output_reload will make this do nothing. */
e6e52be0
R
7072 note_stores (pat, forget_old_reloads_1);
7073
7074 if (reg_mentioned_p (reload_reg_rtx[j], pat))
7075 {
7076 if (reload_spill_index[j] < 0
7077 && GET_CODE (pat) == SET
7078 && SET_SRC (pat) == reload_reg_rtx[j])
7079 {
7080 int src = REGNO (SET_SRC (pat));
32131a9c 7081
e6e52be0
R
7082 reload_spill_index[j] = src;
7083 SET_HARD_REG_BIT (reg_is_output_reload, src);
7084 if (find_regno_note (insn, REG_DEAD, src))
7085 SET_HARD_REG_BIT (reg_reloaded_died, src);
7086 }
7087 if (reload_spill_index[j] >= 0)
7088 new_spill_reg_store[reload_spill_index[j]] = p;
7089 }
32131a9c
RK
7090 }
7091
d7e0324f 7092 if (reload_when_needed[j] == RELOAD_OTHER)
befa01b9
JW
7093 {
7094 emit_insns (other_output_reload_insns[reload_opnum[j]]);
7095 other_output_reload_insns[reload_opnum[j]] = get_insns ();
7096 }
7097 else
7098 output_reload_insns[reload_opnum[j]] = get_insns ();
d7e0324f 7099
546b63fb 7100 end_sequence ();
32131a9c 7101 }
32131a9c
RK
7102 }
7103
546b63fb
RK
7104 /* Now write all the insns we made for reloads in the order expected by
7105 the allocation functions. Prior to the insn being reloaded, we write
7106 the following reloads:
7107
7108 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7109
2edc8d65 7110 RELOAD_OTHER reloads.
546b63fb 7111
47c8cf91
ILT
7112 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7113 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7114 RELOAD_FOR_INPUT reload for the operand.
546b63fb 7115
893bc853
RK
7116 RELOAD_FOR_OPADDR_ADDRS reloads.
7117
546b63fb
RK
7118 RELOAD_FOR_OPERAND_ADDRESS reloads.
7119
7120 After the insn being reloaded, we write the following:
7121
47c8cf91
ILT
7122 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7123 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7124 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7125 reloads for the operand. The RELOAD_OTHER output reloads are
7126 output in descending order by reload number. */
546b63fb
RK
7127
7128 emit_insns_before (other_input_address_reload_insns, before_insn);
7129 emit_insns_before (other_input_reload_insns, before_insn);
7130
7131 for (j = 0; j < reload_n_operands; j++)
7132 {
47c8cf91 7133 emit_insns_before (inpaddr_address_reload_insns[j], before_insn);
546b63fb
RK
7134 emit_insns_before (input_address_reload_insns[j], before_insn);
7135 emit_insns_before (input_reload_insns[j], before_insn);
7136 }
7137
893bc853 7138 emit_insns_before (other_operand_reload_insns, before_insn);
546b63fb
RK
7139 emit_insns_before (operand_reload_insns, before_insn);
7140
7141 for (j = 0; j < reload_n_operands; j++)
7142 {
47c8cf91 7143 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
546b63fb
RK
7144 emit_insns_before (output_address_reload_insns[j], following_insn);
7145 emit_insns_before (output_reload_insns[j], following_insn);
befa01b9 7146 emit_insns_before (other_output_reload_insns[j], following_insn);
546b63fb
RK
7147 }
7148
32131a9c
RK
7149 /* Move death notes from INSN
7150 to output-operand-address and output reload insns. */
7151#ifdef PRESERVE_DEATH_INFO_REGNO_P
7152 {
7153 rtx insn1;
7154 /* Loop over those insns, last ones first. */
7155 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
7156 insn1 = PREV_INSN (insn1))
7157 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
7158 {
7159 rtx source = SET_SRC (PATTERN (insn1));
7160 rtx dest = SET_DEST (PATTERN (insn1));
7161
7162 /* The note we will examine next. */
7163 rtx reg_notes = REG_NOTES (insn);
7164 /* The place that pointed to this note. */
7165 rtx *prev_reg_note = &REG_NOTES (insn);
7166
7167 /* If the note is for something used in the source of this
7168 reload insn, or in the output address, move the note. */
7169 while (reg_notes)
7170 {
7171 rtx next_reg_notes = XEXP (reg_notes, 1);
7172 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
7173 && GET_CODE (XEXP (reg_notes, 0)) == REG
7174 && ((GET_CODE (dest) != REG
bfa30b22
RK
7175 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
7176 dest))
7177 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
7178 source)))
32131a9c
RK
7179 {
7180 *prev_reg_note = next_reg_notes;
7181 XEXP (reg_notes, 1) = REG_NOTES (insn1);
7182 REG_NOTES (insn1) = reg_notes;
7183 }
7184 else
7185 prev_reg_note = &XEXP (reg_notes, 1);
7186
7187 reg_notes = next_reg_notes;
7188 }
7189 }
7190 }
7191#endif
7192
7193 /* For all the spill regs newly reloaded in this instruction,
7194 record what they were reloaded from, so subsequent instructions
d445b551
RK
7195 can inherit the reloads.
7196
7197 Update spill_reg_store for the reloads of this insn.
e9e79d69 7198 Copy the elements that were updated in the loop above. */
32131a9c
RK
7199
7200 for (j = 0; j < n_reloads; j++)
7201 {
7202 register int r = reload_order[j];
7203 register int i = reload_spill_index[r];
7204
e6e52be0 7205 /* I is nonneg if this reload used a register.
32131a9c 7206 If reload_reg_rtx[r] is 0, this is an optional reload
51f0c3b7 7207 that we opted to ignore. */
d445b551 7208
51f0c3b7 7209 if (i >= 0 && reload_reg_rtx[r] != 0)
32131a9c 7210 {
32131a9c 7211 int nr
e6e52be0 7212 = HARD_REGNO_NREGS (i, GET_MODE (reload_reg_rtx[r]));
32131a9c 7213 int k;
51f0c3b7
JW
7214 int part_reaches_end = 0;
7215 int all_reaches_end = 1;
32131a9c 7216
51f0c3b7
JW
7217 /* For a multi register reload, we need to check if all or part
7218 of the value lives to the end. */
32131a9c
RK
7219 for (k = 0; k < nr; k++)
7220 {
e6e52be0 7221 if (reload_reg_reaches_end_p (i + k, reload_opnum[r],
51f0c3b7
JW
7222 reload_when_needed[r]))
7223 part_reaches_end = 1;
7224 else
7225 all_reaches_end = 0;
32131a9c
RK
7226 }
7227
51f0c3b7
JW
7228 /* Ignore reloads that don't reach the end of the insn in
7229 entirety. */
7230 if (all_reaches_end)
32131a9c 7231 {
51f0c3b7
JW
7232 /* First, clear out memory of what used to be in this spill reg.
7233 If consecutive registers are used, clear them all. */
d08ea79f 7234
32131a9c 7235 for (k = 0; k < nr; k++)
e6e52be0 7236 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
d08ea79f 7237
51f0c3b7
JW
7238 /* Maybe the spill reg contains a copy of reload_out. */
7239 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7240 {
7241 register int nregno = REGNO (reload_out[r]);
7242 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7243 : HARD_REGNO_NREGS (nregno,
7244 GET_MODE (reload_reg_rtx[r])));
7245
7246 spill_reg_store[i] = new_spill_reg_store[i];
7247 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7248
7249 /* If NREGNO is a hard register, it may occupy more than
7250 one register. If it does, say what is in the
7251 rest of the registers assuming that both registers
7252 agree on how many words the object takes. If not,
7253 invalidate the subsequent registers. */
7254
7255 if (nregno < FIRST_PSEUDO_REGISTER)
7256 for (k = 1; k < nnr; k++)
7257 reg_last_reload_reg[nregno + k]
7258 = (nr == nnr
38a448ca
RH
7259 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7260 REGNO (reload_reg_rtx[r]) + k)
51f0c3b7
JW
7261 : 0);
7262
7263 /* Now do the inverse operation. */
7264 for (k = 0; k < nr; k++)
7265 {
e6e52be0
R
7266 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7267 reg_reloaded_contents[i + k]
51f0c3b7
JW
7268 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7269 ? nregno
7270 : nregno + k);
e6e52be0
R
7271 reg_reloaded_insn[i + k] = insn;
7272 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
51f0c3b7
JW
7273 }
7274 }
d08ea79f 7275
51f0c3b7
JW
7276 /* Maybe the spill reg contains a copy of reload_in. Only do
7277 something if there will not be an output reload for
7278 the register being reloaded. */
7279 else if (reload_out[r] == 0
7280 && reload_in[r] != 0
e6e52be0 7281 && spill_reg_order[i] >= 0
51f0c3b7
JW
7282 && ((GET_CODE (reload_in[r]) == REG
7283 && ! reg_has_output_reload[REGNO (reload_in[r])])
7284 || (GET_CODE (reload_in_reg[r]) == REG
7285 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
7286 {
7287 register int nregno;
7288 int nnr;
d445b551 7289
51f0c3b7
JW
7290 if (GET_CODE (reload_in[r]) == REG)
7291 nregno = REGNO (reload_in[r]);
7292 else
7293 nregno = REGNO (reload_in_reg[r]);
d08ea79f 7294
51f0c3b7
JW
7295 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7296 : HARD_REGNO_NREGS (nregno,
7297 GET_MODE (reload_reg_rtx[r])));
7298
7299 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7300
7301 if (nregno < FIRST_PSEUDO_REGISTER)
7302 for (k = 1; k < nnr; k++)
7303 reg_last_reload_reg[nregno + k]
7304 = (nr == nnr
38a448ca
RH
7305 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7306 REGNO (reload_reg_rtx[r]) + k)
51f0c3b7
JW
7307 : 0);
7308
7309 /* Unless we inherited this reload, show we haven't
7310 recently done a store. */
7311 if (! reload_inherited[r])
7312 spill_reg_store[i] = 0;
7313
7314 for (k = 0; k < nr; k++)
7315 {
e6e52be0
R
7316 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7317 reg_reloaded_contents[i + k]
51f0c3b7
JW
7318 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7319 ? nregno
7320 : nregno + k);
e6e52be0
R
7321 reg_reloaded_insn[i + k] = insn;
7322 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
51f0c3b7
JW
7323 }
7324 }
7325 }
d445b551 7326
51f0c3b7
JW
7327 /* However, if part of the reload reaches the end, then we must
7328 invalidate the old info for the part that survives to the end. */
7329 else if (part_reaches_end)
7330 {
546b63fb 7331 for (k = 0; k < nr; k++)
e6e52be0 7332 if (reload_reg_reaches_end_p (i + k,
51f0c3b7
JW
7333 reload_opnum[r],
7334 reload_when_needed[r]))
e6e52be0 7335 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
32131a9c
RK
7336 }
7337 }
7338
7339 /* The following if-statement was #if 0'd in 1.34 (or before...).
7340 It's reenabled in 1.35 because supposedly nothing else
7341 deals with this problem. */
7342
7343 /* If a register gets output-reloaded from a non-spill register,
7344 that invalidates any previous reloaded copy of it.
7345 But forget_old_reloads_1 won't get to see it, because
7346 it thinks only about the original insn. So invalidate it here. */
7347 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
7348 {
7349 register int nregno = REGNO (reload_out[r]);
c7093272
RK
7350 if (nregno >= FIRST_PSEUDO_REGISTER)
7351 reg_last_reload_reg[nregno] = 0;
7352 else
7353 {
7354 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
36281332 7355
c7093272
RK
7356 while (num_regs-- > 0)
7357 reg_last_reload_reg[nregno + num_regs] = 0;
7358 }
32131a9c
RK
7359 }
7360 }
e6e52be0 7361 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
32131a9c
RK
7362}
7363\f
5e03c156
RK
7364/* Emit code to perform a reload from IN (which may be a reload register) to
7365 OUT (which may also be a reload register). IN or OUT is from operand
7366 OPNUM with reload type TYPE.
546b63fb 7367
3c3eeea6 7368 Returns first insn emitted. */
32131a9c
RK
7369
7370rtx
5e03c156
RK
7371gen_reload (out, in, opnum, type)
7372 rtx out;
32131a9c 7373 rtx in;
546b63fb
RK
7374 int opnum;
7375 enum reload_type type;
32131a9c 7376{
546b63fb 7377 rtx last = get_last_insn ();
7a5b18b0
RK
7378 rtx tem;
7379
7380 /* If IN is a paradoxical SUBREG, remove it and try to put the
7381 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7382 if (GET_CODE (in) == SUBREG
7383 && (GET_MODE_SIZE (GET_MODE (in))
7384 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7385 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7386 in = SUBREG_REG (in), out = tem;
7387 else if (GET_CODE (out) == SUBREG
7388 && (GET_MODE_SIZE (GET_MODE (out))
7389 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7390 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7391 out = SUBREG_REG (out), in = tem;
32131a9c 7392
a8fdc208 7393 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
7394 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7395 register that didn't get a hard register. In that case we can just
7396 call emit_move_insn.
7397
a7fd196c
JW
7398 We can also be asked to reload a PLUS that adds a register or a MEM to
7399 another register, constant or MEM. This can occur during frame pointer
7400 elimination and while reloading addresses. This case is handled by
7401 trying to emit a single insn to perform the add. If it is not valid,
7402 we use a two insn sequence.
32131a9c
RK
7403
7404 Finally, we could be called to handle an 'o' constraint by putting
7405 an address into a register. In that case, we first try to do this
7406 with a named pattern of "reload_load_address". If no such pattern
7407 exists, we just emit a SET insn and hope for the best (it will normally
7408 be valid on machines that use 'o').
7409
7410 This entire process is made complex because reload will never
7411 process the insns we generate here and so we must ensure that
7412 they will fit their constraints and also by the fact that parts of
7413 IN might be being reloaded separately and replaced with spill registers.
7414 Because of this, we are, in some sense, just guessing the right approach
7415 here. The one listed above seems to work.
7416
7417 ??? At some point, this whole thing needs to be rethought. */
7418
7419 if (GET_CODE (in) == PLUS
a7fd196c 7420 && (GET_CODE (XEXP (in, 0)) == REG
5c6b1bd2 7421 || GET_CODE (XEXP (in, 0)) == SUBREG
a7fd196c
JW
7422 || GET_CODE (XEXP (in, 0)) == MEM)
7423 && (GET_CODE (XEXP (in, 1)) == REG
5c6b1bd2 7424 || GET_CODE (XEXP (in, 1)) == SUBREG
a7fd196c
JW
7425 || CONSTANT_P (XEXP (in, 1))
7426 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 7427 {
a7fd196c
JW
7428 /* We need to compute the sum of a register or a MEM and another
7429 register, constant, or MEM, and put it into the reload
3002e160
JW
7430 register. The best possible way of doing this is if the machine
7431 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
7432
7433 The simplest approach is to try to generate such an insn and see if it
7434 is recognized and matches its constraints. If so, it can be used.
7435
7436 It might be better not to actually emit the insn unless it is valid,
0009eff2 7437 but we need to pass the insn as an operand to `recog' and
b36d7dd7 7438 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 7439 not valid than to dummy things up. */
a8fdc208 7440
af929c62 7441 rtx op0, op1, tem, insn;
32131a9c 7442 int code;
a8fdc208 7443
af929c62
RK
7444 op0 = find_replacement (&XEXP (in, 0));
7445 op1 = find_replacement (&XEXP (in, 1));
7446
32131a9c
RK
7447 /* Since constraint checking is strict, commutativity won't be
7448 checked, so we need to do that here to avoid spurious failure
7449 if the add instruction is two-address and the second operand
7450 of the add is the same as the reload reg, which is frequently
7451 the case. If the insn would be A = B + A, rearrange it so
0f41302f 7452 it will be A = A + B as constrain_operands expects. */
a8fdc208 7453
32131a9c 7454 if (GET_CODE (XEXP (in, 1)) == REG
5e03c156 7455 && REGNO (out) == REGNO (XEXP (in, 1)))
af929c62
RK
7456 tem = op0, op0 = op1, op1 = tem;
7457
7458 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
38a448ca 7459 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
32131a9c 7460
38a448ca 7461 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
32131a9c
RK
7462 code = recog_memoized (insn);
7463
7464 if (code >= 0)
7465 {
7466 insn_extract (insn);
7467 /* We want constrain operands to treat this insn strictly in
7468 its validity determination, i.e., the way it would after reload
7469 has completed. */
7470 if (constrain_operands (code, 1))
7471 return insn;
7472 }
7473
546b63fb 7474 delete_insns_since (last);
32131a9c
RK
7475
7476 /* If that failed, we must use a conservative two-insn sequence.
7477 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
7478 register since "move" will be able to handle an arbitrary operand,
7479 unlike add which can't, in general. Then add the registers.
32131a9c
RK
7480
7481 If there is another way to do this for a specific machine, a
7482 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7483 we emit below. */
7484
5c6b1bd2 7485 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
af929c62
RK
7486 || (GET_CODE (op1) == REG
7487 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7488 tem = op0, op0 = op1, op1 = tem;
32131a9c 7489
5c6b1bd2 7490 gen_reload (out, op0, opnum, type);
39b56c2a 7491
5e03c156 7492 /* If OP0 and OP1 are the same, we can use OUT for OP1.
39b56c2a
RK
7493 This fixes a problem on the 32K where the stack pointer cannot
7494 be used as an operand of an add insn. */
7495
7496 if (rtx_equal_p (op0, op1))
5e03c156 7497 op1 = out;
39b56c2a 7498
5e03c156 7499 insn = emit_insn (gen_add2_insn (out, op1));
c77c9766
RK
7500
7501 /* If that failed, copy the address register to the reload register.
0f41302f 7502 Then add the constant to the reload register. */
c77c9766
RK
7503
7504 code = recog_memoized (insn);
7505
7506 if (code >= 0)
7507 {
7508 insn_extract (insn);
7509 /* We want constrain operands to treat this insn strictly in
7510 its validity determination, i.e., the way it would after reload
7511 has completed. */
7512 if (constrain_operands (code, 1))
4117a96b
R
7513 {
7514 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7515 REG_NOTES (insn)
9e6a5703 7516 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
4117a96b
R
7517 return insn;
7518 }
c77c9766
RK
7519 }
7520
7521 delete_insns_since (last);
7522
5c6b1bd2 7523 gen_reload (out, op1, opnum, type);
4117a96b 7524 insn = emit_insn (gen_add2_insn (out, op0));
9e6a5703 7525 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
32131a9c
RK
7526 }
7527
0dadecf6
RK
7528#ifdef SECONDARY_MEMORY_NEEDED
7529 /* If we need a memory location to do the move, do it that way. */
7530 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
5e03c156 7531 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
0dadecf6 7532 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
5e03c156
RK
7533 REGNO_REG_CLASS (REGNO (out)),
7534 GET_MODE (out)))
0dadecf6
RK
7535 {
7536 /* Get the memory to use and rewrite both registers to its mode. */
5e03c156 7537 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
0dadecf6 7538
5e03c156 7539 if (GET_MODE (loc) != GET_MODE (out))
38a448ca 7540 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
0dadecf6
RK
7541
7542 if (GET_MODE (loc) != GET_MODE (in))
38a448ca 7543 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
0dadecf6 7544
5c6b1bd2
RK
7545 gen_reload (loc, in, opnum, type);
7546 gen_reload (out, loc, opnum, type);
0dadecf6
RK
7547 }
7548#endif
7549
32131a9c
RK
7550 /* If IN is a simple operand, use gen_move_insn. */
7551 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
5e03c156 7552 emit_insn (gen_move_insn (out, in));
32131a9c
RK
7553
7554#ifdef HAVE_reload_load_address
7555 else if (HAVE_reload_load_address)
5e03c156 7556 emit_insn (gen_reload_load_address (out, in));
32131a9c
RK
7557#endif
7558
5e03c156 7559 /* Otherwise, just write (set OUT IN) and hope for the best. */
32131a9c 7560 else
38a448ca 7561 emit_insn (gen_rtx_SET (VOIDmode, out, in));
32131a9c
RK
7562
7563 /* Return the first insn emitted.
546b63fb 7564 We can not just return get_last_insn, because there may have
32131a9c
RK
7565 been multiple instructions emitted. Also note that gen_move_insn may
7566 emit more than one insn itself, so we can not assume that there is one
7567 insn emitted per emit_insn_before call. */
7568
546b63fb 7569 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
7570}
7571\f
7572/* Delete a previously made output-reload
7573 whose result we now believe is not needed.
7574 First we double-check.
7575
7576 INSN is the insn now being processed.
7577 OUTPUT_RELOAD_INSN is the insn of the output reload.
7578 J is the reload-number for this insn. */
7579
7580static void
7581delete_output_reload (insn, j, output_reload_insn)
7582 rtx insn;
7583 int j;
7584 rtx output_reload_insn;
7585{
7586 register rtx i1;
7587
7588 /* Get the raw pseudo-register referred to. */
7589
7590 rtx reg = reload_in[j];
7591 while (GET_CODE (reg) == SUBREG)
7592 reg = SUBREG_REG (reg);
7593
7594 /* If the pseudo-reg we are reloading is no longer referenced
7595 anywhere between the store into it and here,
7596 and no jumps or labels intervene, then the value can get
7597 here through the reload reg alone.
7598 Otherwise, give up--return. */
7599 for (i1 = NEXT_INSN (output_reload_insn);
7600 i1 != insn; i1 = NEXT_INSN (i1))
7601 {
7602 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7603 return;
7604 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7605 && reg_mentioned_p (reg, PATTERN (i1)))
aa6498c2
R
7606 {
7607 /* If this is just a single USE with an REG_EQUAL note in front
7608 of INSN, this is no problem, because this mentions just the
7609 address that we are using here.
7610 But if there is more than one such USE, the insn might use
7611 the operand directly, or another reload might do that.
7612 This is analogous to the count_occurences check in the callers. */
7613 int num_occurences = 0;
7614
7615 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE
7616 && find_reg_note (i1, REG_EQUAL, NULL_RTX))
7617 {
7618 num_occurences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7619 i1 = NEXT_INSN (i1);
7620 }
7621 if (num_occurences == 1 && i1 == insn)
7622 break;
7623 return;
7624 }
32131a9c
RK
7625 }
7626
aa6498c2
R
7627 /* The caller has already checked that REG dies or is set in INSN.
7628 It has also checked that we are optimizing, and thus some inaccurancies
7629 in the debugging information are acceptable.
7630 So we could just delete output_reload_insn.
7631 But in some cases we can improve the debugging information without
7632 sacrificing optimization - maybe even improving the code:
7633 See if the pseudo reg has been completely replaced
32131a9c
RK
7634 with reload regs. If so, delete the store insn
7635 and forget we had a stack slot for the pseudo. */
aa6498c2
R
7636 if (reload_out[j] != reload_in[j]
7637 && REG_N_DEATHS (REGNO (reg)) == 1
7638 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7639 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
32131a9c
RK
7640 {
7641 rtx i2;
7642
7643 /* We know that it was used only between here
7644 and the beginning of the current basic block.
7645 (We also know that the last use before INSN was
7646 the output reload we are thinking of deleting, but never mind that.)
7647 Search that range; see if any ref remains. */
7648 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7649 {
d445b551
RK
7650 rtx set = single_set (i2);
7651
32131a9c
RK
7652 /* Uses which just store in the pseudo don't count,
7653 since if they are the only uses, they are dead. */
d445b551 7654 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
7655 continue;
7656 if (GET_CODE (i2) == CODE_LABEL
7657 || GET_CODE (i2) == JUMP_INSN)
7658 break;
7659 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7660 && reg_mentioned_p (reg, PATTERN (i2)))
aa6498c2
R
7661 {
7662 /* Some other ref remains; just delete the output reload we
7663 know to be dead. */
7664 delete_insn (output_reload_insn);
7665 return;
7666 }
32131a9c
RK
7667 }
7668
7669 /* Delete the now-dead stores into this pseudo. */
7670 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7671 {
d445b551
RK
7672 rtx set = single_set (i2);
7673
7674 if (set != 0 && SET_DEST (set) == reg)
5507b94b
RK
7675 {
7676 /* This might be a basic block head,
7677 thus don't use delete_insn. */
7678 PUT_CODE (i2, NOTE);
7679 NOTE_SOURCE_FILE (i2) = 0;
7680 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7681 }
32131a9c
RK
7682 if (GET_CODE (i2) == CODE_LABEL
7683 || GET_CODE (i2) == JUMP_INSN)
7684 break;
7685 }
7686
7687 /* For the debugging info,
7688 say the pseudo lives in this reload reg. */
7689 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7690 alter_reg (REGNO (reg), -1);
7691 }
aa6498c2
R
7692 delete_insn (output_reload_insn);
7693
32131a9c 7694}
32131a9c 7695\f
a8fdc208 7696/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 7697 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
7698 is a register or memory location;
7699 so reloading involves incrementing that location.
7700
7701 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 7702 This cannot be deduced from VALUE. */
32131a9c 7703
546b63fb
RK
7704static void
7705inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
7706 rtx reloadreg;
7707 rtx value;
7708 int inc_amount;
32131a9c
RK
7709{
7710 /* REG or MEM to be copied and incremented. */
7711 rtx incloc = XEXP (value, 0);
7712 /* Nonzero if increment after copying. */
7713 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 7714 rtx last;
0009eff2
RK
7715 rtx inc;
7716 rtx add_insn;
7717 int code;
32131a9c
RK
7718
7719 /* No hard register is equivalent to this register after
7720 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7721 we could inc/dec that register as well (maybe even using it for
7722 the source), but I'm not sure it's worth worrying about. */
7723 if (GET_CODE (incloc) == REG)
7724 reg_last_reload_reg[REGNO (incloc)] = 0;
7725
7726 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7727 inc_amount = - inc_amount;
7728
fb3821f7 7729 inc = GEN_INT (inc_amount);
0009eff2
RK
7730
7731 /* If this is post-increment, first copy the location to the reload reg. */
7732 if (post)
546b63fb 7733 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
7734
7735 /* See if we can directly increment INCLOC. Use a method similar to that
5e03c156 7736 in gen_reload. */
0009eff2 7737
546b63fb 7738 last = get_last_insn ();
38a448ca
RH
7739 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
7740 gen_rtx_PLUS (GET_MODE (incloc),
7741 incloc, inc)));
0009eff2
RK
7742
7743 code = recog_memoized (add_insn);
7744 if (code >= 0)
32131a9c 7745 {
0009eff2
RK
7746 insn_extract (add_insn);
7747 if (constrain_operands (code, 1))
32131a9c 7748 {
0009eff2
RK
7749 /* If this is a pre-increment and we have incremented the value
7750 where it lives, copy the incremented value to RELOADREG to
7751 be used as an address. */
7752
7753 if (! post)
546b63fb
RK
7754 emit_insn (gen_move_insn (reloadreg, incloc));
7755
7756 return;
32131a9c
RK
7757 }
7758 }
0009eff2 7759
546b63fb 7760 delete_insns_since (last);
0009eff2
RK
7761
7762 /* If couldn't do the increment directly, must increment in RELOADREG.
7763 The way we do this depends on whether this is pre- or post-increment.
7764 For pre-increment, copy INCLOC to the reload register, increment it
7765 there, then save back. */
7766
7767 if (! post)
7768 {
546b63fb
RK
7769 emit_insn (gen_move_insn (reloadreg, incloc));
7770 emit_insn (gen_add2_insn (reloadreg, inc));
7771 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 7772 }
32131a9c
RK
7773 else
7774 {
0009eff2
RK
7775 /* Postincrement.
7776 Because this might be a jump insn or a compare, and because RELOADREG
7777 may not be available after the insn in an input reload, we must do
7778 the incrementation before the insn being reloaded for.
7779
7780 We have already copied INCLOC to RELOADREG. Increment the copy in
7781 RELOADREG, save that back, then decrement RELOADREG so it has
7782 the original value. */
7783
546b63fb
RK
7784 emit_insn (gen_add2_insn (reloadreg, inc));
7785 emit_insn (gen_move_insn (incloc, reloadreg));
7786 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 7787 }
0009eff2 7788
546b63fb 7789 return;
32131a9c
RK
7790}
7791\f
7792/* Return 1 if we are certain that the constraint-string STRING allows
7793 the hard register REG. Return 0 if we can't be sure of this. */
7794
7795static int
7796constraint_accepts_reg_p (string, reg)
7797 char *string;
7798 rtx reg;
7799{
7800 int value = 0;
7801 int regno = true_regnum (reg);
7802 int c;
7803
7804 /* Initialize for first alternative. */
7805 value = 0;
7806 /* Check that each alternative contains `g' or `r'. */
7807 while (1)
7808 switch (c = *string++)
7809 {
7810 case 0:
7811 /* If an alternative lacks `g' or `r', we lose. */
7812 return value;
7813 case ',':
7814 /* If an alternative lacks `g' or `r', we lose. */
7815 if (value == 0)
7816 return 0;
7817 /* Initialize for next alternative. */
7818 value = 0;
7819 break;
7820 case 'g':
7821 case 'r':
7822 /* Any general reg wins for this alternative. */
7823 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7824 value = 1;
7825 break;
7826 default:
7827 /* Any reg in specified class wins for this alternative. */
7828 {
0009eff2 7829 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 7830
0009eff2 7831 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
7832 value = 1;
7833 }
7834 }
7835}
7836\f
d445b551
RK
7837/* Return the number of places FIND appears within X, but don't count
7838 an occurrence if some SET_DEST is FIND. */
32131a9c 7839
184bb750 7840int
32131a9c
RK
7841count_occurrences (x, find)
7842 register rtx x, find;
7843{
7844 register int i, j;
7845 register enum rtx_code code;
7846 register char *format_ptr;
7847 int count;
7848
7849 if (x == find)
7850 return 1;
7851 if (x == 0)
7852 return 0;
7853
7854 code = GET_CODE (x);
7855
7856 switch (code)
7857 {
7858 case REG:
7859 case QUEUED:
7860 case CONST_INT:
7861 case CONST_DOUBLE:
7862 case SYMBOL_REF:
7863 case CODE_LABEL:
7864 case PC:
7865 case CC0:
7866 return 0;
d445b551
RK
7867
7868 case SET:
7869 if (SET_DEST (x) == find)
7870 return count_occurrences (SET_SRC (x), find);
7871 break;
e9a25f70
JL
7872
7873 default:
7874 break;
32131a9c
RK
7875 }
7876
7877 format_ptr = GET_RTX_FORMAT (code);
7878 count = 0;
7879
7880 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7881 {
7882 switch (*format_ptr++)
7883 {
7884 case 'e':
7885 count += count_occurrences (XEXP (x, i), find);
7886 break;
7887
7888 case 'E':
7889 if (XVEC (x, i) != NULL)
7890 {
7891 for (j = 0; j < XVECLEN (x, i); j++)
7892 count += count_occurrences (XVECEXP (x, i, j), find);
7893 }
7894 break;
7895 }
7896 }
7897 return count;
7898}
2a9fb548
ILT
7899\f
7900/* This array holds values which are equivalent to a hard register
7901 during reload_cse_regs. Each array element is an EXPR_LIST of
7902 values. Each time a hard register is set, we set the corresponding
7903 array element to the value. Each time a hard register is copied
7904 into memory, we add the memory location to the corresponding array
7905 element. We don't store values or memory addresses with side
7906 effects in this array.
7907
7908 If the value is a CONST_INT, then the mode of the containing
7909 EXPR_LIST is the mode in which that CONST_INT was referenced.
7910
7911 We sometimes clobber a specific entry in a list. In that case, we
7912 just set XEXP (list-entry, 0) to 0. */
7913
7914static rtx *reg_values;
7915
ba325eba
ILT
7916/* This is a preallocated REG rtx which we use as a temporary in
7917 reload_cse_invalidate_regno, so that we don't need to allocate a
7918 new one each time through a loop in that function. */
7919
7920static rtx invalidate_regno_rtx;
7921
e9a25f70
JL
7922/* This is a set of registers for which we must remove REG_DEAD notes in
7923 previous insns, because our modifications made them invalid. That can
7924 happen if we introduced the register into the current insn, or we deleted
7925 the current insn which used to set the register. */
7926
7927static HARD_REG_SET no_longer_dead_regs;
7928
2a9fb548
ILT
7929/* Invalidate any entries in reg_values which depend on REGNO,
7930 including those for REGNO itself. This is called if REGNO is
7931 changing. If CLOBBER is true, then always forget anything we
7932 currently know about REGNO. MODE is the mode of the assignment to
7933 REGNO, which is used to determine how many hard registers are being
7934 changed. If MODE is VOIDmode, then only REGNO is being changed;
7935 this is used when invalidating call clobbered registers across a
7936 call. */
7937
7938static void
7939reload_cse_invalidate_regno (regno, mode, clobber)
7940 int regno;
7941 enum machine_mode mode;
7942 int clobber;
7943{
7944 int endregno;
7945 register int i;
7946
7947 /* Our callers don't always go through true_regnum; we may see a
7948 pseudo-register here from a CLOBBER or the like. We probably
7949 won't ever see a pseudo-register that has a real register number,
7950 for we check anyhow for safety. */
7951 if (regno >= FIRST_PSEUDO_REGISTER)
7952 regno = reg_renumber[regno];
7953 if (regno < 0)
7954 return;
7955
7956 if (mode == VOIDmode)
7957 endregno = regno + 1;
7958 else
7959 endregno = regno + HARD_REGNO_NREGS (regno, mode);
7960
7961 if (clobber)
7962 for (i = regno; i < endregno; i++)
7963 reg_values[i] = 0;
7964
7965 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7966 {
7967 rtx x;
7968
7969 for (x = reg_values[i]; x; x = XEXP (x, 1))
7970 {
7971 if (XEXP (x, 0) != 0
9e148ceb 7972 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
2a9fb548
ILT
7973 {
7974 /* If this is the only entry on the list, clear
7975 reg_values[i]. Otherwise, just clear this entry on
7976 the list. */
7977 if (XEXP (x, 1) == 0 && x == reg_values[i])
7978 {
7979 reg_values[i] = 0;
7980 break;
7981 }
7982 XEXP (x, 0) = 0;
7983 }
7984 }
7985 }
ba325eba
ILT
7986
7987 /* We must look at earlier registers, in case REGNO is part of a
7988 multi word value but is not the first register. If an earlier
7989 register has a value in a mode which overlaps REGNO, then we must
7990 invalidate that earlier register. Note that we do not need to
7991 check REGNO or later registers (we must not check REGNO itself,
7992 because we would incorrectly conclude that there was a conflict). */
7993
7994 for (i = 0; i < regno; i++)
7995 {
7996 rtx x;
7997
7998 for (x = reg_values[i]; x; x = XEXP (x, 1))
7999 {
8000 if (XEXP (x, 0) != 0)
8001 {
dbd7556e 8002 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
ba325eba
ILT
8003 REGNO (invalidate_regno_rtx) = i;
8004 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
8005 NULL_PTR))
8006 {
8007 reload_cse_invalidate_regno (i, VOIDmode, 1);
8008 break;
8009 }
8010 }
8011 }
8012 }
2a9fb548
ILT
8013}
8014
866aa3b6
DE
8015/* The memory at address MEM_BASE is being changed.
8016 Return whether this change will invalidate VAL. */
2a9fb548
ILT
8017
8018static int
cbfc3ad3 8019reload_cse_mem_conflict_p (mem_base, val)
2a9fb548 8020 rtx mem_base;
2a9fb548
ILT
8021 rtx val;
8022{
8023 enum rtx_code code;
8024 char *fmt;
8025 int i;
8026
8027 code = GET_CODE (val);
8028 switch (code)
8029 {
8030 /* Get rid of a few simple cases quickly. */
8031 case REG:
2a9fb548
ILT
8032 case PC:
8033 case CC0:
8034 case SCRATCH:
8035 case CONST:
8036 case CONST_INT:
8037 case CONST_DOUBLE:
8038 case SYMBOL_REF:
8039 case LABEL_REF:
8040 return 0;
8041
8042 case MEM:
866aa3b6
DE
8043 if (GET_MODE (mem_base) == BLKmode
8044 || GET_MODE (val) == BLKmode)
8045 return 1;
e9a25f70
JL
8046 if (anti_dependence (val, mem_base))
8047 return 1;
8048 /* The address may contain nested MEMs. */
8049 break;
2a9fb548
ILT
8050
8051 default:
8052 break;
8053 }
8054
8055 fmt = GET_RTX_FORMAT (code);
8056
8057 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8058 {
8059 if (fmt[i] == 'e')
8060 {
cbfc3ad3 8061 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
2a9fb548
ILT
8062 return 1;
8063 }
8064 else if (fmt[i] == 'E')
8065 {
8066 int j;
8067
8068 for (j = 0; j < XVECLEN (val, i); j++)
cbfc3ad3 8069 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
2a9fb548
ILT
8070 return 1;
8071 }
8072 }
8073
8074 return 0;
8075}
8076
8077/* Invalidate any entries in reg_values which are changed because of a
8078 store to MEM_RTX. If this is called because of a non-const call
8079 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8080
8081static void
8082reload_cse_invalidate_mem (mem_rtx)
8083 rtx mem_rtx;
8084{
8085 register int i;
2a9fb548
ILT
8086
8087 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8088 {
8089 rtx x;
8090
8091 for (x = reg_values[i]; x; x = XEXP (x, 1))
8092 {
8093 if (XEXP (x, 0) != 0
cbfc3ad3 8094 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
2a9fb548
ILT
8095 {
8096 /* If this is the only entry on the list, clear
8097 reg_values[i]. Otherwise, just clear this entry on
8098 the list. */
8099 if (XEXP (x, 1) == 0 && x == reg_values[i])
8100 {
8101 reg_values[i] = 0;
8102 break;
8103 }
8104 XEXP (x, 0) = 0;
8105 }
8106 }
8107 }
8108}
8109
8110/* Invalidate DEST, which is being assigned to or clobbered. The
8111 second parameter exists so that this function can be passed to
8112 note_stores; it is ignored. */
8113
8114static void
8115reload_cse_invalidate_rtx (dest, ignore)
8116 rtx dest;
487a6e06 8117 rtx ignore ATTRIBUTE_UNUSED;
2a9fb548
ILT
8118{
8119 while (GET_CODE (dest) == STRICT_LOW_PART
8120 || GET_CODE (dest) == SIGN_EXTRACT
8121 || GET_CODE (dest) == ZERO_EXTRACT
8122 || GET_CODE (dest) == SUBREG)
8123 dest = XEXP (dest, 0);
8124
8125 if (GET_CODE (dest) == REG)
8126 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8127 else if (GET_CODE (dest) == MEM)
8128 reload_cse_invalidate_mem (dest);
8129}
8130
e9a25f70
JL
8131/* Possibly delete death notes on the insns before INSN if modifying INSN
8132 extended the lifespan of the registers. */
8133
8134static void
8135reload_cse_delete_death_notes (insn)
8136 rtx insn;
8137{
8138 int dreg;
8139
8140 for (dreg = 0; dreg < FIRST_PSEUDO_REGISTER; dreg++)
8141 {
8142 rtx trial;
8143
8144 if (! TEST_HARD_REG_BIT (no_longer_dead_regs, dreg))
8145 continue;
8146
8147 for (trial = prev_nonnote_insn (insn);
8148 (trial
8149 && GET_CODE (trial) != CODE_LABEL
8150 && GET_CODE (trial) != BARRIER);
8151 trial = prev_nonnote_insn (trial))
8152 {
8153 if (find_regno_note (trial, REG_DEAD, dreg))
8154 {
8155 remove_death (dreg, trial);
8156 break;
8157 }
8158 }
8159 }
8160}
8161
8162/* Record that the current insn uses hard reg REGNO in mode MODE. This
8163 will be used in reload_cse_delete_death_notes to delete prior REG_DEAD
8164 notes for this register. */
8165
8166static void
8167reload_cse_no_longer_dead (regno, mode)
8168 int regno;
8169 enum machine_mode mode;
8170{
8171 int nregs = HARD_REGNO_NREGS (regno, mode);
8172 while (nregs-- > 0)
8173 {
8174 SET_HARD_REG_BIT (no_longer_dead_regs, regno);
8175 regno++;
8176 }
8177}
8178
8179
2a9fb548
ILT
8180/* Do a very simple CSE pass over the hard registers.
8181
8182 This function detects no-op moves where we happened to assign two
8183 different pseudo-registers to the same hard register, and then
8184 copied one to the other. Reload will generate a useless
8185 instruction copying a register to itself.
8186
8187 This function also detects cases where we load a value from memory
8188 into two different registers, and (if memory is more expensive than
8189 registers) changes it to simply copy the first register into the
e9a25f70
JL
8190 second register.
8191
8192 Another optimization is performed that scans the operands of each
8193 instruction to see whether the value is already available in a
8194 hard register. It then replaces the operand with the hard register
8195 if possible, much like an optional reload would. */
2a9fb548 8196
cbfc3ad3 8197void
2a9fb548
ILT
8198reload_cse_regs (first)
8199 rtx first;
8200{
8201 char *firstobj;
8202 rtx callmem;
8203 register int i;
8204 rtx insn;
8205
cbfc3ad3
RK
8206 init_alias_analysis ();
8207
2a9fb548 8208 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
e016950d 8209 bzero ((char *)reg_values, FIRST_PSEUDO_REGISTER * sizeof (rtx));
2a9fb548
ILT
8210
8211 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8212 free them when we are done. */
8213 push_obstacks (&reload_obstack, &reload_obstack);
8214 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8215
8216 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8217 memory for a non-const call instruction. */
38a448ca 8218 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
2a9fb548 8219
ba325eba
ILT
8220 /* This is used in reload_cse_invalidate_regno to avoid consing a
8221 new REG in a loop in that function. */
38a448ca 8222 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
ba325eba 8223
2a9fb548
ILT
8224 for (insn = first; insn; insn = NEXT_INSN (insn))
8225 {
8226 rtx body;
8227
8228 if (GET_CODE (insn) == CODE_LABEL)
8229 {
8230 /* Forget all the register values at a code label. We don't
8231 try to do anything clever around jumps. */
8232 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8233 reg_values[i] = 0;
8234
8235 continue;
8236 }
8237
8238#ifdef NON_SAVING_SETJMP
8239 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8240 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8241 {
8242 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8243 reg_values[i] = 0;
8244
8245 continue;
8246 }
8247#endif
8248
8249 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8250 continue;
8251
e9a25f70
JL
8252 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8253
2a9fb548
ILT
8254 /* If this is a call instruction, forget anything stored in a
8255 call clobbered register, or, if this is not a const call, in
8256 memory. */
8257 if (GET_CODE (insn) == CALL_INSN)
8258 {
8259 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8260 if (call_used_regs[i])
8261 reload_cse_invalidate_regno (i, VOIDmode, 1);
8262
8263 if (! CONST_CALL_P (insn))
8264 reload_cse_invalidate_mem (callmem);
8265 }
8266
8267 body = PATTERN (insn);
8268 if (GET_CODE (body) == SET)
8269 {
e9a25f70 8270 int count = 0;
31418d35 8271 if (reload_cse_noop_set_p (body, insn))
2a9fb548 8272 {
2a9fb548
ILT
8273 PUT_CODE (insn, NOTE);
8274 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8275 NOTE_SOURCE_FILE (insn) = 0;
e9a25f70 8276 reload_cse_delete_death_notes (insn);
2a9fb548
ILT
8277
8278 /* We're done with this insn. */
8279 continue;
8280 }
8281
e9a25f70
JL
8282 /* It's not a no-op, but we can try to simplify it. */
8283 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8284 count += reload_cse_simplify_set (body, insn);
8285
8286 if (count > 0 && apply_change_group ())
8287 reload_cse_delete_death_notes (insn);
8288 else if (reload_cse_simplify_operands (insn))
8289 reload_cse_delete_death_notes (insn);
8290
2a9fb548
ILT
8291 reload_cse_record_set (body, body);
8292 }
8293 else if (GET_CODE (body) == PARALLEL)
8294 {
e9a25f70 8295 int count = 0;
2a9fb548
ILT
8296
8297 /* If every action in a PARALLEL is a noop, we can delete
8298 the entire PARALLEL. */
8299 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
cbfc3ad3
RK
8300 if ((GET_CODE (XVECEXP (body, 0, i)) != SET
8301 || ! reload_cse_noop_set_p (XVECEXP (body, 0, i), insn))
8302 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
2a9fb548
ILT
8303 break;
8304 if (i < 0)
8305 {
2a9fb548
ILT
8306 PUT_CODE (insn, NOTE);
8307 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8308 NOTE_SOURCE_FILE (insn) = 0;
e9a25f70 8309 reload_cse_delete_death_notes (insn);
2a9fb548
ILT
8310
8311 /* We're done with this insn. */
8312 continue;
8313 }
e9a25f70
JL
8314
8315 /* It's not a no-op, but we can try to simplify it. */
8316 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8317 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8318 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8319 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8320
8321 if (count > 0 && apply_change_group ())
8322 reload_cse_delete_death_notes (insn);
8323 else if (reload_cse_simplify_operands (insn))
8324 reload_cse_delete_death_notes (insn);
2a9fb548
ILT
8325
8326 /* Look through the PARALLEL and record the values being
8327 set, if possible. Also handle any CLOBBERs. */
8328 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8329 {
8330 rtx x = XVECEXP (body, 0, i);
8331
8332 if (GET_CODE (x) == SET)
8333 reload_cse_record_set (x, body);
8334 else
8335 note_stores (x, reload_cse_invalidate_rtx);
8336 }
8337 }
8338 else
8339 note_stores (body, reload_cse_invalidate_rtx);
8340
8341#ifdef AUTO_INC_DEC
8342 /* Clobber any registers which appear in REG_INC notes. We
8343 could keep track of the changes to their values, but it is
8344 unlikely to help. */
8345 {
8346 rtx x;
8347
8348 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8349 if (REG_NOTE_KIND (x) == REG_INC)
8350 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8351 }
8352#endif
8353
8354 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8355 after we have processed the insn. */
8356 if (GET_CODE (insn) == CALL_INSN)
8357 {
8358 rtx x;
8359
8360 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8361 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8362 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8363 }
8364 }
8365
8366 /* Free all the temporary structures we created, and go back to the
8367 regular obstacks. */
8368 obstack_free (&reload_obstack, firstobj);
8369 pop_obstacks ();
8370}
8371
8372/* Return whether the values known for REGNO are equal to VAL. MODE
8373 is the mode of the object that VAL is being copied to; this matters
8374 if VAL is a CONST_INT. */
8375
8376static int
8377reload_cse_regno_equal_p (regno, val, mode)
8378 int regno;
8379 rtx val;
8380 enum machine_mode mode;
8381{
8382 rtx x;
8383
8384 if (val == 0)
8385 return 0;
8386
8387 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8388 if (XEXP (x, 0) != 0
8389 && rtx_equal_p (XEXP (x, 0), val)
bb173ade
RK
8390 && (! flag_float_store || GET_CODE (XEXP (x, 0)) != MEM
8391 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2a9fb548
ILT
8392 && (GET_CODE (val) != CONST_INT
8393 || mode == GET_MODE (x)
8394 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
6e848450
RK
8395 /* On a big endian machine if the value spans more than
8396 one register then this register holds the high part of
8397 it and we can't use it.
8398
8399 ??? We should also compare with the high part of the
8400 value. */
8401 && !(WORDS_BIG_ENDIAN
8402 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
2a9fb548
ILT
8403 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8404 GET_MODE_BITSIZE (GET_MODE (x))))))
8405 return 1;
8406
8407 return 0;
8408}
8409
31418d35
ILT
8410/* See whether a single set is a noop. SET is the set instruction we
8411 are should check, and INSN is the instruction from which it came. */
2a9fb548
ILT
8412
8413static int
31418d35 8414reload_cse_noop_set_p (set, insn)
2a9fb548 8415 rtx set;
31418d35 8416 rtx insn;
2a9fb548
ILT
8417{
8418 rtx src, dest;
8419 enum machine_mode dest_mode;
8420 int dreg, sreg;
31418d35 8421 int ret;
2a9fb548
ILT
8422
8423 src = SET_SRC (set);
8424 dest = SET_DEST (set);
8425 dest_mode = GET_MODE (dest);
8426
8427 if (side_effects_p (src))
8428 return 0;
8429
8430 dreg = true_regnum (dest);
8431 sreg = true_regnum (src);
8432
31418d35
ILT
8433 /* Check for setting a register to itself. In this case, we don't
8434 have to worry about REG_DEAD notes. */
8435 if (dreg >= 0 && dreg == sreg)
8436 return 1;
8437
8438 ret = 0;
2a9fb548
ILT
8439 if (dreg >= 0)
8440 {
8441 /* Check for setting a register to itself. */
8442 if (dreg == sreg)
31418d35 8443 ret = 1;
2a9fb548
ILT
8444
8445 /* Check for setting a register to a value which we already know
8446 is in the register. */
31418d35
ILT
8447 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8448 ret = 1;
2a9fb548
ILT
8449
8450 /* Check for setting a register DREG to another register SREG
8451 where SREG is equal to a value which is already in DREG. */
31418d35 8452 else if (sreg >= 0)
2a9fb548
ILT
8453 {
8454 rtx x;
8455
8456 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
31418d35 8457 {
99c2b71f
ILT
8458 rtx tmp;
8459
8460 if (XEXP (x, 0) == 0)
8461 continue;
8462
8463 if (dest_mode == GET_MODE (x))
8464 tmp = XEXP (x, 0);
8465 else if (GET_MODE_BITSIZE (dest_mode)
8466 < GET_MODE_BITSIZE (GET_MODE (x)))
8467 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8468 else
8469 continue;
8470
8471 if (tmp
8472 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
31418d35
ILT
8473 {
8474 ret = 1;
8475 break;
8476 }
8477 }
2a9fb548
ILT
8478 }
8479 }
8480 else if (GET_CODE (dest) == MEM)
8481 {
8482 /* Check for storing a register to memory when we know that the
8483 register is equivalent to the memory location. */
8484 if (sreg >= 0
8485 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8486 && ! side_effects_p (dest))
31418d35 8487 ret = 1;
2a9fb548
ILT
8488 }
8489
31418d35
ILT
8490 /* If we can delete this SET, then we need to look for an earlier
8491 REG_DEAD note on DREG, and remove it if it exists. */
e9a25f70 8492 if (ret && dreg >= 0)
31418d35
ILT
8493 {
8494 if (! find_regno_note (insn, REG_UNUSED, dreg))
e9a25f70 8495 reload_cse_no_longer_dead (dreg, dest_mode);
31418d35
ILT
8496 }
8497
8498 return ret;
2a9fb548
ILT
8499}
8500
8501/* Try to simplify a single SET instruction. SET is the set pattern.
e9a25f70
JL
8502 INSN is the instruction it came from.
8503 This function only handles one case: if we set a register to a value
8504 which is not a register, we try to find that value in some other register
8505 and change the set into a register copy. */
2a9fb548 8506
e9a25f70 8507static int
2a9fb548
ILT
8508reload_cse_simplify_set (set, insn)
8509 rtx set;
8510 rtx insn;
8511{
8512 int dreg;
8513 rtx src;
8514 enum machine_mode dest_mode;
8515 enum reg_class dclass;
8516 register int i;
8517
2a9fb548
ILT
8518 dreg = true_regnum (SET_DEST (set));
8519 if (dreg < 0)
e9a25f70 8520 return 0;
2a9fb548
ILT
8521
8522 src = SET_SRC (set);
8523 if (side_effects_p (src) || true_regnum (src) >= 0)
e9a25f70 8524 return 0;
2a9fb548 8525
cbd5b9a2
KR
8526 dclass = REGNO_REG_CLASS (dreg);
8527
33ab8de0 8528 /* If memory loads are cheaper than register copies, don't change them. */
cbd5b9a2
KR
8529 if (GET_CODE (src) == MEM
8530 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
e9a25f70 8531 return 0;
2a9fb548 8532
0254c561
JC
8533 /* If the constant is cheaper than a register, don't change it. */
8534 if (CONSTANT_P (src)
8535 && rtx_cost (src, SET) < 2)
8536 return 0;
8537
2a9fb548 8538 dest_mode = GET_MODE (SET_DEST (set));
2a9fb548
ILT
8539 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8540 {
8541 if (i != dreg
8542 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8543 && reload_cse_regno_equal_p (i, src, dest_mode))
8544 {
8545 int validated;
8546
8547 /* Pop back to the real obstacks while changing the insn. */
8548 pop_obstacks ();
8549
8550 validated = validate_change (insn, &SET_SRC (set),
38a448ca 8551 gen_rtx_REG (dest_mode, i), 1);
2a9fb548
ILT
8552
8553 /* Go back to the obstack we are using for temporary
8554 storage. */
8555 push_obstacks (&reload_obstack, &reload_obstack);
8556
e9a25f70
JL
8557 if (validated && ! find_regno_note (insn, REG_UNUSED, i))
8558 {
8559 reload_cse_no_longer_dead (i, dest_mode);
8560 return 1;
8561 }
8562 }
8563 }
8564 return 0;
8565}
8566
8567/* Try to replace operands in INSN with equivalent values that are already
8568 in registers. This can be viewed as optional reloading.
8569
8570 For each non-register operand in the insn, see if any hard regs are
8571 known to be equivalent to that operand. Record the alternatives which
8572 can accept these hard registers. Among all alternatives, select the
8573 ones which are better or equal to the one currently matching, where
8574 "better" is in terms of '?' and '!' constraints. Among the remaining
8575 alternatives, select the one which replaces most operands with
8576 hard registers. */
8577
8578static int
8579reload_cse_simplify_operands (insn)
8580 rtx insn;
8581{
8582#ifdef REGISTER_CONSTRAINTS
8583 int insn_code_number, n_operands, n_alternatives;
8584 int i,j;
8585
8586 char *constraints[MAX_RECOG_OPERANDS];
8587
8588 /* Vector recording how bad an alternative is. */
8589 int *alternative_reject;
8590 /* Vector recording how many registers can be introduced by choosing
8591 this alternative. */
8592 int *alternative_nregs;
8593 /* Array of vectors recording, for each operand and each alternative,
8594 which hard register to substitute, or -1 if the operand should be
8595 left as it is. */
8596 int *op_alt_regno[MAX_RECOG_OPERANDS];
8597 /* Array of alternatives, sorted in order of decreasing desirability. */
8598 int *alternative_order;
0254c561 8599 rtx reg = gen_rtx_REG (VOIDmode, -1);
e9a25f70
JL
8600
8601 /* Find out some information about this insn. */
8602 insn_code_number = recog_memoized (insn);
8603 /* We don't modify asm instructions. */
8604 if (insn_code_number < 0)
8605 return 0;
8606
8607 n_operands = insn_n_operands[insn_code_number];
8608 n_alternatives = insn_n_alternatives[insn_code_number];
8609
8610 if (n_alternatives == 0 || n_operands == 0)
1d300e19 8611 return 0;
e9a25f70
JL
8612 insn_extract (insn);
8613
8614 /* Figure out which alternative currently matches. */
8615 if (! constrain_operands (insn_code_number, 1))
b8705408 8616 fatal_insn_not_found (insn);
e9a25f70
JL
8617
8618 alternative_reject = (int *) alloca (n_alternatives * sizeof (int));
8619 alternative_nregs = (int *) alloca (n_alternatives * sizeof (int));
8620 alternative_order = (int *) alloca (n_alternatives * sizeof (int));
8621 bzero ((char *)alternative_reject, n_alternatives * sizeof (int));
8622 bzero ((char *)alternative_nregs, n_alternatives * sizeof (int));
8623
8624 for (i = 0; i < n_operands; i++)
8625 {
8626 enum machine_mode mode;
8627 int regno;
8628 char *p;
8629
8630 op_alt_regno[i] = (int *) alloca (n_alternatives * sizeof (int));
8631 for (j = 0; j < n_alternatives; j++)
8632 op_alt_regno[i][j] = -1;
8633
8634 p = constraints[i] = insn_operand_constraint[insn_code_number][i];
8635 mode = insn_operand_mode[insn_code_number][i];
8636
8637 /* Add the reject values for each alternative given by the constraints
8638 for this operand. */
8639 j = 0;
8640 while (*p != '\0')
8641 {
8642 char c = *p++;
8643 if (c == ',')
8644 j++;
8645 else if (c == '?')
8646 alternative_reject[j] += 3;
8647 else if (c == '!')
8648 alternative_reject[j] += 300;
8649 }
8650
8651 /* We won't change operands which are already registers. We
8652 also don't want to modify output operands. */
8653 regno = true_regnum (recog_operand[i]);
8654 if (regno >= 0
8655 || constraints[i][0] == '='
8656 || constraints[i][0] == '+')
8657 continue;
8658
8659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8660 {
8661 int class = (int) NO_REGS;
8662
8663 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
8664 continue;
8665
0254c561
JC
8666 REGNO (reg) = regno;
8667 PUT_MODE (reg, mode);
8668
e9a25f70
JL
8669 /* We found a register equal to this operand. Now look for all
8670 alternatives that can accept this register and have not been
8671 assigned a register they can use yet. */
8672 j = 0;
8673 p = constraints[i];
8674 for (;;)
31418d35 8675 {
e9a25f70
JL
8676 char c = *p++;
8677
8678 switch (c)
31418d35 8679 {
e9a25f70
JL
8680 case '=': case '+': case '?':
8681 case '#': case '&': case '!':
8682 case '*': case '%':
8683 case '0': case '1': case '2': case '3': case '4':
8684 case 'm': case '<': case '>': case 'V': case 'o':
8685 case 'E': case 'F': case 'G': case 'H':
8686 case 's': case 'i': case 'n':
8687 case 'I': case 'J': case 'K': case 'L':
8688 case 'M': case 'N': case 'O': case 'P':
8689#ifdef EXTRA_CONSTRAINT
8690 case 'Q': case 'R': case 'S': case 'T': case 'U':
8691#endif
8692 case 'p': case 'X':
8693 /* These don't say anything we care about. */
8694 break;
8695
8696 case 'g': case 'r':
8697 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8698 break;
8699
8700 default:
8701 class
8702 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER (c)];
8703 break;
31418d35 8704
e9a25f70
JL
8705 case ',': case '\0':
8706 /* See if REGNO fits this alternative, and set it up as the
8707 replacement register if we don't have one for this
0254c561
JC
8708 alternative yet and the operand being replaced is not
8709 a cheap CONST_INT. */
e9a25f70 8710 if (op_alt_regno[i][j] == -1
0254c561
JC
8711 && reg_fits_class_p (reg, class, 0, mode)
8712 && (GET_CODE (recog_operand[i]) != CONST_INT
8713 || rtx_cost (recog_operand[i], SET) > rtx_cost (reg, SET)))
31418d35 8714 {
e9a25f70
JL
8715 alternative_nregs[j]++;
8716 op_alt_regno[i][j] = regno;
31418d35 8717 }
e9a25f70
JL
8718 j++;
8719 break;
31418d35
ILT
8720 }
8721
e9a25f70
JL
8722 if (c == '\0')
8723 break;
8724 }
8725 }
8726 }
8727
8728 /* Record all alternatives which are better or equal to the currently
8729 matching one in the alternative_order array. */
8730 for (i = j = 0; i < n_alternatives; i++)
8731 if (alternative_reject[i] <= alternative_reject[which_alternative])
8732 alternative_order[j++] = i;
8733 n_alternatives = j;
8734
8735 /* Sort it. Given a small number of alternatives, a dumb algorithm
8736 won't hurt too much. */
8737 for (i = 0; i < n_alternatives - 1; i++)
8738 {
8739 int best = i;
8740 int best_reject = alternative_reject[alternative_order[i]];
8741 int best_nregs = alternative_nregs[alternative_order[i]];
8742 int tmp;
8743
8744 for (j = i + 1; j < n_alternatives; j++)
8745 {
8746 int this_reject = alternative_reject[alternative_order[j]];
8747 int this_nregs = alternative_nregs[alternative_order[j]];
8748
8749 if (this_reject < best_reject
8750 || (this_reject == best_reject && this_nregs < best_nregs))
8751 {
8752 best = j;
8753 best_reject = this_reject;
8754 best_nregs = this_nregs;
31418d35 8755 }
2a9fb548 8756 }
e9a25f70
JL
8757
8758 tmp = alternative_order[best];
8759 alternative_order[best] = alternative_order[i];
8760 alternative_order[i] = tmp;
8761 }
8762
8763 /* Substitute the operands as determined by op_alt_regno for the best
8764 alternative. */
8765 j = alternative_order[0];
8766 CLEAR_HARD_REG_SET (no_longer_dead_regs);
8767
8768 /* Pop back to the real obstacks while changing the insn. */
8769 pop_obstacks ();
8770
8771 for (i = 0; i < n_operands; i++)
8772 {
8773 enum machine_mode mode = insn_operand_mode[insn_code_number][i];
8774 if (op_alt_regno[i][j] == -1)
8775 continue;
8776
8777 reload_cse_no_longer_dead (op_alt_regno[i][j], mode);
8778 validate_change (insn, recog_operand_loc[i],
38a448ca 8779 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
e9a25f70
JL
8780 }
8781
8782 for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
8783 {
8784 int op = recog_dup_num[i];
8785 enum machine_mode mode = insn_operand_mode[insn_code_number][op];
8786
8787 if (op_alt_regno[op][j] == -1)
8788 continue;
8789
8790 reload_cse_no_longer_dead (op_alt_regno[op][j], mode);
8791 validate_change (insn, recog_dup_loc[i],
38a448ca 8792 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
2a9fb548 8793 }
e9a25f70
JL
8794
8795 /* Go back to the obstack we are using for temporary
8796 storage. */
8797 push_obstacks (&reload_obstack, &reload_obstack);
8798
8799 return apply_change_group ();
8800#else
8801 return 0;
8802#endif
2a9fb548
ILT
8803}
8804
8805/* These two variables are used to pass information from
8806 reload_cse_record_set to reload_cse_check_clobber. */
8807
8808static int reload_cse_check_clobbered;
8809static rtx reload_cse_check_src;
8810
8811/* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
8812 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
8813 second argument, which is passed by note_stores, is ignored. */
8814
8815static void
8816reload_cse_check_clobber (dest, ignore)
8817 rtx dest;
487a6e06 8818 rtx ignore ATTRIBUTE_UNUSED;
2a9fb548
ILT
8819{
8820 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
8821 reload_cse_check_clobbered = 1;
8822}
8823
8824/* Record the result of a SET instruction. SET is the set pattern.
8825 BODY is the pattern of the insn that it came from. */
8826
8827static void
8828reload_cse_record_set (set, body)
8829 rtx set;
8830 rtx body;
8831{
9e148ceb 8832 rtx dest, src, x;
2a9fb548
ILT
8833 int dreg, sreg;
8834 enum machine_mode dest_mode;
8835
8836 dest = SET_DEST (set);
8837 src = SET_SRC (set);
8838 dreg = true_regnum (dest);
8839 sreg = true_regnum (src);
8840 dest_mode = GET_MODE (dest);
8841
9e148ceb
ILT
8842 /* Some machines don't define AUTO_INC_DEC, but they still use push
8843 instructions. We need to catch that case here in order to
8844 invalidate the stack pointer correctly. Note that invalidating
8845 the stack pointer is different from invalidating DEST. */
8846 x = dest;
8847 while (GET_CODE (x) == SUBREG
8848 || GET_CODE (x) == ZERO_EXTRACT
8849 || GET_CODE (x) == SIGN_EXTRACT
8850 || GET_CODE (x) == STRICT_LOW_PART)
8851 x = XEXP (x, 0);
8852 if (push_operand (x, GET_MODE (x)))
8853 {
8854 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
8855 reload_cse_invalidate_rtx (dest, NULL_RTX);
8856 return;
8857 }
8858
2a9fb548
ILT
8859 /* We can only handle an assignment to a register, or a store of a
8860 register to a memory location. For other cases, we just clobber
8861 the destination. We also have to just clobber if there are side
8862 effects in SRC or DEST. */
8863 if ((dreg < 0 && GET_CODE (dest) != MEM)
8864 || side_effects_p (src)
8865 || side_effects_p (dest))
8866 {
8867 reload_cse_invalidate_rtx (dest, NULL_RTX);
8868 return;
8869 }
8870
8871#ifdef HAVE_cc0
8872 /* We don't try to handle values involving CC, because it's a pain
8873 to keep track of when they have to be invalidated. */
8874 if (reg_mentioned_p (cc0_rtx, src)
8875 || reg_mentioned_p (cc0_rtx, dest))
8876 {
8877 reload_cse_invalidate_rtx (dest, NULL_RTX);
8878 return;
8879 }
8880#endif
8881
8882 /* If BODY is a PARALLEL, then we need to see whether the source of
8883 SET is clobbered by some other instruction in the PARALLEL. */
8884 if (GET_CODE (body) == PARALLEL)
8885 {
8886 int i;
8887
8888 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8889 {
8890 rtx x;
8891
8892 x = XVECEXP (body, 0, i);
8893 if (x == set)
8894 continue;
8895
8896 reload_cse_check_clobbered = 0;
8897 reload_cse_check_src = src;
8898 note_stores (x, reload_cse_check_clobber);
8899 if (reload_cse_check_clobbered)
8900 {
8901 reload_cse_invalidate_rtx (dest, NULL_RTX);
8902 return;
8903 }
8904 }
8905 }
8906
8907 if (dreg >= 0)
8908 {
8909 int i;
8910
8911 /* This is an assignment to a register. Update the value we
8912 have stored for the register. */
8913 if (sreg >= 0)
ad578014
ILT
8914 {
8915 rtx x;
8916
8917 /* This is a copy from one register to another. Any values
8918 which were valid for SREG are now valid for DREG. If the
8919 mode changes, we use gen_lowpart_common to extract only
8920 the part of the value that is copied. */
8921 reg_values[dreg] = 0;
8922 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8923 {
8924 rtx tmp;
8925
8926 if (XEXP (x, 0) == 0)
8927 continue;
8928 if (dest_mode == GET_MODE (XEXP (x, 0)))
8929 tmp = XEXP (x, 0);
23e7786b
JL
8930 else if (GET_MODE_BITSIZE (dest_mode)
8931 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
8932 continue;
ad578014
ILT
8933 else
8934 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8935 if (tmp)
38a448ca
RH
8936 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
8937 reg_values[dreg]);
ad578014
ILT
8938 }
8939 }
2a9fb548 8940 else
38a448ca 8941 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
2a9fb548
ILT
8942
8943 /* We've changed DREG, so invalidate any values held by other
8944 registers that depend upon it. */
8945 reload_cse_invalidate_regno (dreg, dest_mode, 0);
8946
8947 /* If this assignment changes more than one hard register,
8948 forget anything we know about the others. */
8949 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
8950 reg_values[dreg + i] = 0;
8951 }
8952 else if (GET_CODE (dest) == MEM)
8953 {
8954 /* Invalidate conflicting memory locations. */
8955 reload_cse_invalidate_mem (dest);
8956
8957 /* If we're storing a register to memory, add DEST to the list
8958 in REG_VALUES. */
8959 if (sreg >= 0 && ! side_effects_p (dest))
38a448ca 8960 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
2a9fb548
ILT
8961 reg_values[sreg]);
8962 }
8963 else
8964 {
8965 /* We should have bailed out earlier. */
8966 abort ();
8967 }
8968}
This page took 1.690787 seconds and 5 git commands to generate.