]> gcc.gnu.org Git - gcc.git/blob - gcc/reload1.c
(reload): If only non-group need is unfilled, see if
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
186
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
192
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
198
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206 static char spill_indirect_levels;
207
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212 char indirect_symref_ok;
213
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216 char double_reg_address_ok;
217
218 /* Record the stack slot for each spilled hard register. */
219
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222 /* Width allocated so far for that stack slot. */
223
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231 char *basic_block_needs[N_REG_CLASSES];
232
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
236
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240 int caller_save_needed;
241
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245 int reload_in_progress = 0;
246
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258 struct obstack reload_obstack;
259 char *reload_firstobj;
260
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
263
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
266 \f
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272 static struct elim_table
273 {
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
291
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
301
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
308
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
311
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322 /* Number of labels in the current function. */
323
324 static int num_labels;
325
326 struct hard_reg_n_uses { int regno; int uses; };
327 \f
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void set_label_offsets PROTO((rtx, rtx, int));
340 static int eliminate_regs_in_insn PROTO((rtx, int));
341 static void mark_not_eliminable PROTO((rtx, rtx));
342 static int spill_hard_reg PROTO((int, int, FILE *, int));
343 static void scan_paradoxical_subregs PROTO((rtx));
344 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
345 struct hard_reg_n_uses *));
346 static void order_regs_for_reload PROTO((void));
347 static void reload_as_needed PROTO((rtx, int));
348 static void forget_old_reloads_1 PROTO((rtx, rtx));
349 static int reload_reg_class_lower PROTO((short *, short *));
350 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
351 enum machine_mode));
352 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354 static int reload_reg_free_p PROTO((int, int, enum reload_type));
355 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
356 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
357 static int allocate_reload_reg PROTO((int, rtx, int, int));
358 static void choose_reload_regs PROTO((rtx, rtx));
359 static void merge_assigned_reloads PROTO((rtx));
360 static void emit_reload_insns PROTO((rtx));
361 static void delete_output_reload PROTO((rtx, int, rtx));
362 static void inc_for_reload PROTO((rtx, rtx, int));
363 static int constraint_accepts_reg_p PROTO((char *, rtx));
364 static int count_occurrences PROTO((rtx, rtx));
365 \f
366 /* Initialize the reload pass once per compilation. */
367
368 void
369 init_reload ()
370 {
371 register int i;
372
373 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
374 Set spill_indirect_levels to the number of levels such addressing is
375 permitted, zero if it is not permitted at all. */
376
377 register rtx tem
378 = gen_rtx (MEM, Pmode,
379 gen_rtx (PLUS, Pmode,
380 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
381 GEN_INT (4)));
382 spill_indirect_levels = 0;
383
384 while (memory_address_p (QImode, tem))
385 {
386 spill_indirect_levels++;
387 tem = gen_rtx (MEM, Pmode, tem);
388 }
389
390 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
391
392 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
393 indirect_symref_ok = memory_address_p (QImode, tem);
394
395 /* See if reg+reg is a valid (and offsettable) address. */
396
397 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
398 {
399 tem = gen_rtx (PLUS, Pmode,
400 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
401 gen_rtx (REG, Pmode, i));
402 /* This way, we make sure that reg+reg is an offsettable address. */
403 tem = plus_constant (tem, 4);
404
405 if (memory_address_p (QImode, tem))
406 {
407 double_reg_address_ok = 1;
408 break;
409 }
410 }
411
412 /* Initialize obstack for our rtl allocation. */
413 gcc_obstack_init (&reload_obstack);
414 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
415 }
416
417 /* Main entry point for the reload pass.
418
419 FIRST is the first insn of the function being compiled.
420
421 GLOBAL nonzero means we were called from global_alloc
422 and should attempt to reallocate any pseudoregs that we
423 displace from hard regs we will use for reloads.
424 If GLOBAL is zero, we do not have enough information to do that,
425 so any pseudo reg that is spilled must go to the stack.
426
427 DUMPFILE is the global-reg debugging dump file stream, or 0.
428 If it is nonzero, messages are written to it to describe
429 which registers are seized as reload regs, which pseudo regs
430 are spilled from them, and where the pseudo regs are reallocated to.
431
432 Return value is nonzero if reload failed
433 and we must not do any more for this function. */
434
435 int
436 reload (first, global, dumpfile)
437 rtx first;
438 int global;
439 FILE *dumpfile;
440 {
441 register int class;
442 register int i, j;
443 register rtx insn;
444 register struct elim_table *ep;
445
446 int something_changed;
447 int something_needs_reloads;
448 int something_needs_elimination;
449 int new_basic_block_needs;
450 enum reg_class caller_save_spill_class = NO_REGS;
451 int caller_save_group_size = 1;
452
453 /* Nonzero means we couldn't get enough spill regs. */
454 int failure = 0;
455
456 /* The basic block number currently being processed for INSN. */
457 int this_block;
458
459 /* Make sure even insns with volatile mem refs are recognizable. */
460 init_recog ();
461
462 /* Enable find_equiv_reg to distinguish insns made by reload. */
463 reload_first_uid = get_max_uid ();
464
465 for (i = 0; i < N_REG_CLASSES; i++)
466 basic_block_needs[i] = 0;
467
468 #ifdef SECONDARY_MEMORY_NEEDED
469 /* Initialize the secondary memory table. */
470 clear_secondary_mem ();
471 #endif
472
473 /* Remember which hard regs appear explicitly
474 before we merge into `regs_ever_live' the ones in which
475 pseudo regs have been allocated. */
476 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
477
478 /* We don't have a stack slot for any spill reg yet. */
479 bzero (spill_stack_slot, sizeof spill_stack_slot);
480 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
481
482 /* Initialize the save area information for caller-save, in case some
483 are needed. */
484 init_save_areas ();
485
486 /* Compute which hard registers are now in use
487 as homes for pseudo registers.
488 This is done here rather than (eg) in global_alloc
489 because this point is reached even if not optimizing. */
490
491 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
492 mark_home_live (i);
493
494 /* Make sure that the last insn in the chain
495 is not something that needs reloading. */
496 emit_note (NULL_PTR, NOTE_INSN_DELETED);
497
498 /* Find all the pseudo registers that didn't get hard regs
499 but do have known equivalent constants or memory slots.
500 These include parameters (known equivalent to parameter slots)
501 and cse'd or loop-moved constant memory addresses.
502
503 Record constant equivalents in reg_equiv_constant
504 so they will be substituted by find_reloads.
505 Record memory equivalents in reg_mem_equiv so they can
506 be substituted eventually by altering the REG-rtx's. */
507
508 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
509 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
510 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
511 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
512 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
513 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
514 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
515 bzero (reg_equiv_init, max_regno * sizeof (rtx));
516 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
517 bzero (reg_equiv_address, max_regno * sizeof (rtx));
518 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
519 bzero (reg_max_ref_width, max_regno * sizeof (int));
520 cannot_omit_stores = (char *) alloca (max_regno);
521 bzero (cannot_omit_stores, max_regno);
522
523 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
524 Also find all paradoxical subregs
525 and find largest such for each pseudo. */
526
527 for (insn = first; insn; insn = NEXT_INSN (insn))
528 {
529 rtx set = single_set (insn);
530
531 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
532 {
533 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
534 if (note
535 #ifdef LEGITIMATE_PIC_OPERAND_P
536 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
537 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
538 #endif
539 )
540 {
541 rtx x = XEXP (note, 0);
542 i = REGNO (SET_DEST (set));
543 if (i > LAST_VIRTUAL_REGISTER)
544 {
545 if (GET_CODE (x) == MEM)
546 reg_equiv_memory_loc[i] = x;
547 else if (CONSTANT_P (x))
548 {
549 if (LEGITIMATE_CONSTANT_P (x))
550 reg_equiv_constant[i] = x;
551 else
552 reg_equiv_memory_loc[i]
553 = force_const_mem (GET_MODE (SET_DEST (set)), x);
554 }
555 else
556 continue;
557
558 /* If this register is being made equivalent to a MEM
559 and the MEM is not SET_SRC, the equivalencing insn
560 is one with the MEM as a SET_DEST and it occurs later.
561 So don't mark this insn now. */
562 if (GET_CODE (x) != MEM
563 || rtx_equal_p (SET_SRC (set), x))
564 reg_equiv_init[i] = insn;
565 }
566 }
567 }
568
569 /* If this insn is setting a MEM from a register equivalent to it,
570 this is the equivalencing insn. */
571 else if (set && GET_CODE (SET_DEST (set)) == MEM
572 && GET_CODE (SET_SRC (set)) == REG
573 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
574 && rtx_equal_p (SET_DEST (set),
575 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
576 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
577
578 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
579 scan_paradoxical_subregs (PATTERN (insn));
580 }
581
582 /* Does this function require a frame pointer? */
583
584 frame_pointer_needed = (! flag_omit_frame_pointer
585 #ifdef EXIT_IGNORE_STACK
586 /* ?? If EXIT_IGNORE_STACK is set, we will not save
587 and restore sp for alloca. So we can't eliminate
588 the frame pointer in that case. At some point,
589 we should improve this by emitting the
590 sp-adjusting insns for this case. */
591 || (current_function_calls_alloca
592 && EXIT_IGNORE_STACK)
593 #endif
594 || FRAME_POINTER_REQUIRED);
595
596 num_eliminable = 0;
597
598 /* Initialize the table of registers to eliminate. The way we do this
599 depends on how the eliminable registers were defined. */
600 #ifdef ELIMINABLE_REGS
601 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
602 {
603 ep->can_eliminate = ep->can_eliminate_previous
604 = (CAN_ELIMINATE (ep->from, ep->to)
605 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
606 }
607 #else
608 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
609 = ! frame_pointer_needed;
610 #endif
611
612 /* Count the number of eliminable registers and build the FROM and TO
613 REG rtx's. Note that code in gen_rtx will cause, e.g.,
614 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
615 We depend on this. */
616 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
617 {
618 num_eliminable += ep->can_eliminate;
619 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
620 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
621 }
622
623 num_labels = max_label_num () - get_first_label_num ();
624
625 /* Allocate the tables used to store offset information at labels. */
626 offsets_known_at = (char *) alloca (num_labels);
627 offsets_at
628 = (int (*)[NUM_ELIMINABLE_REGS])
629 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
630
631 offsets_known_at -= get_first_label_num ();
632 offsets_at -= get_first_label_num ();
633
634 /* Alter each pseudo-reg rtx to contain its hard reg number.
635 Assign stack slots to the pseudos that lack hard regs or equivalents.
636 Do not touch virtual registers. */
637
638 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
639 alter_reg (i, -1);
640
641 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
642 because the stack size may be a part of the offset computation for
643 register elimination. */
644 assign_stack_local (BLKmode, 0, 0);
645
646 /* If we have some registers we think can be eliminated, scan all insns to
647 see if there is an insn that sets one of these registers to something
648 other than itself plus a constant. If so, the register cannot be
649 eliminated. Doing this scan here eliminates an extra pass through the
650 main reload loop in the most common case where register elimination
651 cannot be done. */
652 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
653 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
654 || GET_CODE (insn) == CALL_INSN)
655 note_stores (PATTERN (insn), mark_not_eliminable);
656
657 #ifndef REGISTER_CONSTRAINTS
658 /* If all the pseudo regs have hard regs,
659 except for those that are never referenced,
660 we know that no reloads are needed. */
661 /* But that is not true if there are register constraints, since
662 in that case some pseudos might be in the wrong kind of hard reg. */
663
664 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
665 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
666 break;
667
668 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
669 return;
670 #endif
671
672 /* Compute the order of preference for hard registers to spill.
673 Store them by decreasing preference in potential_reload_regs. */
674
675 order_regs_for_reload ();
676
677 /* So far, no hard regs have been spilled. */
678 n_spills = 0;
679 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
680 spill_reg_order[i] = -1;
681
682 /* On most machines, we can't use any register explicitly used in the
683 rtl as a spill register. But on some, we have to. Those will have
684 taken care to keep the life of hard regs as short as possible. */
685
686 #ifdef SMALL_REGISTER_CLASSES
687 CLEAR_HARD_REG_SET (forbidden_regs);
688 #else
689 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
690 #endif
691
692 /* Spill any hard regs that we know we can't eliminate. */
693 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
694 if (! ep->can_eliminate)
695 {
696 spill_hard_reg (ep->from, global, dumpfile, 1);
697 regs_ever_live[ep->from] = 1;
698 }
699
700 if (global)
701 for (i = 0; i < N_REG_CLASSES; i++)
702 {
703 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
704 bzero (basic_block_needs[i], n_basic_blocks);
705 }
706
707 /* From now on, we need to emit any moves without making new pseudos. */
708 reload_in_progress = 1;
709
710 /* This loop scans the entire function each go-round
711 and repeats until one repetition spills no additional hard regs. */
712
713 /* This flag is set when a pseudo reg is spilled,
714 to require another pass. Note that getting an additional reload
715 reg does not necessarily imply any pseudo reg was spilled;
716 sometimes we find a reload reg that no pseudo reg was allocated in. */
717 something_changed = 1;
718 /* This flag is set if there are any insns that require reloading. */
719 something_needs_reloads = 0;
720 /* This flag is set if there are any insns that require register
721 eliminations. */
722 something_needs_elimination = 0;
723 while (something_changed)
724 {
725 rtx after_call = 0;
726
727 /* For each class, number of reload regs needed in that class.
728 This is the maximum over all insns of the needs in that class
729 of the individual insn. */
730 int max_needs[N_REG_CLASSES];
731 /* For each class, size of group of consecutive regs
732 that is needed for the reloads of this class. */
733 int group_size[N_REG_CLASSES];
734 /* For each class, max number of consecutive groups needed.
735 (Each group contains group_size[CLASS] consecutive registers.) */
736 int max_groups[N_REG_CLASSES];
737 /* For each class, max number needed of regs that don't belong
738 to any of the groups. */
739 int max_nongroups[N_REG_CLASSES];
740 /* For each class, the machine mode which requires consecutive
741 groups of regs of that class.
742 If two different modes ever require groups of one class,
743 they must be the same size and equally restrictive for that class,
744 otherwise we can't handle the complexity. */
745 enum machine_mode group_mode[N_REG_CLASSES];
746 /* Record the insn where each maximum need is first found. */
747 rtx max_needs_insn[N_REG_CLASSES];
748 rtx max_groups_insn[N_REG_CLASSES];
749 rtx max_nongroups_insn[N_REG_CLASSES];
750 rtx x;
751 int starting_frame_size = get_frame_size ();
752 static char *reg_class_names[] = REG_CLASS_NAMES;
753
754 something_changed = 0;
755 bzero (max_needs, sizeof max_needs);
756 bzero (max_groups, sizeof max_groups);
757 bzero (max_nongroups, sizeof max_nongroups);
758 bzero (max_needs_insn, sizeof max_needs_insn);
759 bzero (max_groups_insn, sizeof max_groups_insn);
760 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
761 bzero (group_size, sizeof group_size);
762 for (i = 0; i < N_REG_CLASSES; i++)
763 group_mode[i] = VOIDmode;
764
765 /* Keep track of which basic blocks are needing the reloads. */
766 this_block = 0;
767
768 /* Remember whether any element of basic_block_needs
769 changes from 0 to 1 in this pass. */
770 new_basic_block_needs = 0;
771
772 /* Reset all offsets on eliminable registers to their initial values. */
773 #ifdef ELIMINABLE_REGS
774 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
775 {
776 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
777 ep->previous_offset = ep->offset
778 = ep->max_offset = ep->initial_offset;
779 }
780 #else
781 #ifdef INITIAL_FRAME_POINTER_OFFSET
782 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
783 #else
784 if (!FRAME_POINTER_REQUIRED)
785 abort ();
786 reg_eliminate[0].initial_offset = 0;
787 #endif
788 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
789 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
790 #endif
791
792 num_not_at_initial_offset = 0;
793
794 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
795
796 /* Set a known offset for each forced label to be at the initial offset
797 of each elimination. We do this because we assume that all
798 computed jumps occur from a location where each elimination is
799 at its initial offset. */
800
801 for (x = forced_labels; x; x = XEXP (x, 1))
802 if (XEXP (x, 0))
803 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
804
805 /* For each pseudo register that has an equivalent location defined,
806 try to eliminate any eliminable registers (such as the frame pointer)
807 assuming initial offsets for the replacement register, which
808 is the normal case.
809
810 If the resulting location is directly addressable, substitute
811 the MEM we just got directly for the old REG.
812
813 If it is not addressable but is a constant or the sum of a hard reg
814 and constant, it is probably not addressable because the constant is
815 out of range, in that case record the address; we will generate
816 hairy code to compute the address in a register each time it is
817 needed.
818
819 If the location is not addressable, but does not have one of the
820 above forms, assign a stack slot. We have to do this to avoid the
821 potential of producing lots of reloads if, e.g., a location involves
822 a pseudo that didn't get a hard register and has an equivalent memory
823 location that also involves a pseudo that didn't get a hard register.
824
825 Perhaps at some point we will improve reload_when_needed handling
826 so this problem goes away. But that's very hairy. */
827
828 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
829 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
830 {
831 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
832
833 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
834 XEXP (x, 0)))
835 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
836 else if (CONSTANT_P (XEXP (x, 0))
837 || (GET_CODE (XEXP (x, 0)) == PLUS
838 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
839 && (REGNO (XEXP (XEXP (x, 0), 0))
840 < FIRST_PSEUDO_REGISTER)
841 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
842 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
843 else
844 {
845 /* Make a new stack slot. Then indicate that something
846 changed so we go back and recompute offsets for
847 eliminable registers because the allocation of memory
848 below might change some offset. reg_equiv_{mem,address}
849 will be set up for this pseudo on the next pass around
850 the loop. */
851 reg_equiv_memory_loc[i] = 0;
852 reg_equiv_init[i] = 0;
853 alter_reg (i, -1);
854 something_changed = 1;
855 }
856 }
857
858 /* If we allocated another pseudo to the stack, redo elimination
859 bookkeeping. */
860 if (something_changed)
861 continue;
862
863 /* If caller-saves needs a group, initialize the group to include
864 the size and mode required for caller-saves. */
865
866 if (caller_save_group_size > 1)
867 {
868 group_mode[(int) caller_save_spill_class] = Pmode;
869 group_size[(int) caller_save_spill_class] = caller_save_group_size;
870 }
871
872 /* Compute the most additional registers needed by any instruction.
873 Collect information separately for each class of regs. */
874
875 for (insn = first; insn; insn = NEXT_INSN (insn))
876 {
877 if (global && this_block + 1 < n_basic_blocks
878 && insn == basic_block_head[this_block+1])
879 ++this_block;
880
881 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
882 might include REG_LABEL), we need to see what effects this
883 has on the known offsets at labels. */
884
885 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
886 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
887 && REG_NOTES (insn) != 0))
888 set_label_offsets (insn, insn, 0);
889
890 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
891 {
892 /* Nonzero means don't use a reload reg that overlaps
893 the place where a function value can be returned. */
894 rtx avoid_return_reg = 0;
895
896 rtx old_body = PATTERN (insn);
897 int old_code = INSN_CODE (insn);
898 rtx old_notes = REG_NOTES (insn);
899 int did_elimination = 0;
900 int max_total_input_groups = 0, max_total_output_groups = 0;
901
902 /* To compute the number of reload registers of each class
903 needed for an insn, we must similate what choose_reload_regs
904 can do. We do this by splitting an insn into an "input" and
905 an "output" part. RELOAD_OTHER reloads are used in both.
906 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
907 which must be live over the entire input section of reloads,
908 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
909 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
910 inputs.
911
912 The registers needed for output are RELOAD_OTHER and
913 RELOAD_FOR_OUTPUT, which are live for the entire output
914 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
915 reloads for each operand.
916
917 The total number of registers needed is the maximum of the
918 inputs and outputs. */
919
920 /* These just count RELOAD_OTHER. */
921 int insn_needs[N_REG_CLASSES];
922 int insn_groups[N_REG_CLASSES];
923 int insn_total_groups = 0;
924
925 /* Count RELOAD_FOR_INPUT reloads. */
926 int insn_needs_for_inputs[N_REG_CLASSES];
927 int insn_groups_for_inputs[N_REG_CLASSES];
928 int insn_total_groups_for_inputs = 0;
929
930 /* Count RELOAD_FOR_OUTPUT reloads. */
931 int insn_needs_for_outputs[N_REG_CLASSES];
932 int insn_groups_for_outputs[N_REG_CLASSES];
933 int insn_total_groups_for_outputs = 0;
934
935 /* Count RELOAD_FOR_INSN reloads. */
936 int insn_needs_for_insn[N_REG_CLASSES];
937 int insn_groups_for_insn[N_REG_CLASSES];
938 int insn_total_groups_for_insn = 0;
939
940 /* Count RELOAD_FOR_OTHER_ADDRESS reloads. */
941 int insn_needs_for_other_addr[N_REG_CLASSES];
942 int insn_groups_for_other_addr[N_REG_CLASSES];
943 int insn_total_groups_for_other_addr = 0;
944
945 /* Count RELOAD_FOR_INPUT_ADDRESS reloads. */
946 int insn_needs_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
947 int insn_groups_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
948 int insn_total_groups_for_in_addr[MAX_RECOG_OPERANDS];
949
950 /* Count RELOAD_FOR_OUTPUT_ADDRESS reloads. */
951 int insn_needs_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
952 int insn_groups_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
953 int insn_total_groups_for_out_addr[MAX_RECOG_OPERANDS];
954
955 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
956 int insn_needs_for_op_addr[N_REG_CLASSES];
957 int insn_groups_for_op_addr[N_REG_CLASSES];
958 int insn_total_groups_for_op_addr = 0;
959
960 #if 0 /* This wouldn't work nowadays, since optimize_bit_field
961 looks for non-strict memory addresses. */
962 /* Optimization: a bit-field instruction whose field
963 happens to be a byte or halfword in memory
964 can be changed to a move instruction. */
965
966 if (GET_CODE (PATTERN (insn)) == SET)
967 {
968 rtx dest = SET_DEST (PATTERN (insn));
969 rtx src = SET_SRC (PATTERN (insn));
970
971 if (GET_CODE (dest) == ZERO_EXTRACT
972 || GET_CODE (dest) == SIGN_EXTRACT)
973 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
974 if (GET_CODE (src) == ZERO_EXTRACT
975 || GET_CODE (src) == SIGN_EXTRACT)
976 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
977 }
978 #endif
979
980 /* If needed, eliminate any eliminable registers. */
981 if (num_eliminable)
982 did_elimination = eliminate_regs_in_insn (insn, 0);
983
984 #ifdef SMALL_REGISTER_CLASSES
985 /* Set avoid_return_reg if this is an insn
986 that might use the value of a function call. */
987 if (GET_CODE (insn) == CALL_INSN)
988 {
989 if (GET_CODE (PATTERN (insn)) == SET)
990 after_call = SET_DEST (PATTERN (insn));
991 else if (GET_CODE (PATTERN (insn)) == PARALLEL
992 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
993 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
994 else
995 after_call = 0;
996 }
997 else if (after_call != 0
998 && !(GET_CODE (PATTERN (insn)) == SET
999 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1000 {
1001 if (reg_mentioned_p (after_call, PATTERN (insn)))
1002 avoid_return_reg = after_call;
1003 after_call = 0;
1004 }
1005 #endif /* SMALL_REGISTER_CLASSES */
1006
1007 /* Analyze the instruction. */
1008 find_reloads (insn, 0, spill_indirect_levels, global,
1009 spill_reg_order);
1010
1011 /* Remember for later shortcuts which insns had any reloads or
1012 register eliminations.
1013
1014 One might think that it would be worthwhile to mark insns
1015 that need register replacements but not reloads, but this is
1016 not safe because find_reloads may do some manipulation of
1017 the insn (such as swapping commutative operands), which would
1018 be lost when we restore the old pattern after register
1019 replacement. So the actions of find_reloads must be redone in
1020 subsequent passes or in reload_as_needed.
1021
1022 However, it is safe to mark insns that need reloads
1023 but not register replacement. */
1024
1025 PUT_MODE (insn, (did_elimination ? QImode
1026 : n_reloads ? HImode
1027 : GET_MODE (insn) == DImode ? DImode
1028 : VOIDmode));
1029
1030 /* Discard any register replacements done. */
1031 if (did_elimination)
1032 {
1033 obstack_free (&reload_obstack, reload_firstobj);
1034 PATTERN (insn) = old_body;
1035 INSN_CODE (insn) = old_code;
1036 REG_NOTES (insn) = old_notes;
1037 something_needs_elimination = 1;
1038 }
1039
1040 /* If this insn has no reloads, we need not do anything except
1041 in the case of a CALL_INSN when we have caller-saves and
1042 caller-save needs reloads. */
1043
1044 if (n_reloads == 0
1045 && ! (GET_CODE (insn) == CALL_INSN
1046 && caller_save_spill_class != NO_REGS))
1047 continue;
1048
1049 something_needs_reloads = 1;
1050
1051 for (i = 0; i < N_REG_CLASSES; i++)
1052 {
1053 insn_needs[i] = 0, insn_groups[i] = 0;
1054 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1055 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1056 insn_needs_for_insn[i] = 0, insn_groups_for_insn[i] = 0;
1057 insn_needs_for_op_addr[i] = 0, insn_groups_for_op_addr[i] = 0;
1058 insn_needs_for_other_addr[i] = 0;
1059 insn_groups_for_other_addr[i] = 0;
1060 }
1061
1062 for (i = 0; i < reload_n_operands; i++)
1063 {
1064 insn_total_groups_for_in_addr[i] = 0;
1065 insn_total_groups_for_out_addr[i] = 0;
1066
1067 for (j = 0; j < N_REG_CLASSES; j++)
1068 {
1069 insn_needs_for_in_addr[i][j] = 0;
1070 insn_needs_for_out_addr[i][j] = 0;
1071 insn_groups_for_in_addr[i][j] = 0;
1072 insn_groups_for_out_addr[i][j] = 0;
1073 }
1074 }
1075
1076 /* Count each reload once in every class
1077 containing the reload's own class. */
1078
1079 for (i = 0; i < n_reloads; i++)
1080 {
1081 register enum reg_class *p;
1082 enum reg_class class = reload_reg_class[i];
1083 int size;
1084 enum machine_mode mode;
1085 int *this_groups;
1086 int *this_needs;
1087 int *this_total_groups;
1088
1089 /* Don't count the dummy reloads, for which one of the
1090 regs mentioned in the insn can be used for reloading.
1091 Don't count optional reloads.
1092 Don't count reloads that got combined with others. */
1093 if (reload_reg_rtx[i] != 0
1094 || reload_optional[i] != 0
1095 || (reload_out[i] == 0 && reload_in[i] == 0
1096 && ! reload_secondary_p[i]))
1097 continue;
1098
1099 /* Show that a reload register of this class is needed
1100 in this basic block. We do not use insn_needs and
1101 insn_groups because they are overly conservative for
1102 this purpose. */
1103 if (global && ! basic_block_needs[(int) class][this_block])
1104 {
1105 basic_block_needs[(int) class][this_block] = 1;
1106 new_basic_block_needs = 1;
1107 }
1108
1109 /* Decide which time-of-use to count this reload for. */
1110 switch (reload_when_needed[i])
1111 {
1112 case RELOAD_OTHER:
1113 this_needs = insn_needs;
1114 this_groups = insn_groups;
1115 this_total_groups = &insn_total_groups;
1116 break;
1117
1118 case RELOAD_FOR_INPUT:
1119 this_needs = insn_needs_for_inputs;
1120 this_groups = insn_groups_for_inputs;
1121 this_total_groups = &insn_total_groups_for_inputs;
1122 break;
1123
1124 case RELOAD_FOR_OUTPUT:
1125 this_needs = insn_needs_for_outputs;
1126 this_groups = insn_groups_for_outputs;
1127 this_total_groups = &insn_total_groups_for_outputs;
1128 break;
1129
1130 case RELOAD_FOR_INSN:
1131 this_needs = insn_needs_for_insn;
1132 this_groups = insn_groups_for_outputs;
1133 this_total_groups = &insn_total_groups_for_insn;
1134 break;
1135
1136 case RELOAD_FOR_OTHER_ADDRESS:
1137 this_needs = insn_needs_for_other_addr;
1138 this_groups = insn_groups_for_other_addr;
1139 this_total_groups = &insn_total_groups_for_other_addr;
1140 break;
1141
1142 case RELOAD_FOR_INPUT_ADDRESS:
1143 this_needs = insn_needs_for_in_addr[reload_opnum[i]];
1144 this_groups = insn_groups_for_in_addr[reload_opnum[i]];
1145 this_total_groups
1146 = &insn_total_groups_for_in_addr[reload_opnum[i]];
1147 break;
1148
1149 case RELOAD_FOR_OUTPUT_ADDRESS:
1150 this_needs = insn_needs_for_out_addr[reload_opnum[i]];
1151 this_groups = insn_groups_for_out_addr[reload_opnum[i]];
1152 this_total_groups
1153 = &insn_total_groups_for_out_addr[reload_opnum[i]];
1154 break;
1155
1156 case RELOAD_FOR_OPERAND_ADDRESS:
1157 this_needs = insn_needs_for_op_addr;
1158 this_groups = insn_groups_for_op_addr;
1159 this_total_groups = &insn_total_groups_for_op_addr;
1160 break;
1161 }
1162
1163 mode = reload_inmode[i];
1164 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1165 mode = reload_outmode[i];
1166 size = CLASS_MAX_NREGS (class, mode);
1167 if (size > 1)
1168 {
1169 enum machine_mode other_mode, allocate_mode;
1170
1171 /* Count number of groups needed separately from
1172 number of individual regs needed. */
1173 this_groups[(int) class]++;
1174 p = reg_class_superclasses[(int) class];
1175 while (*p != LIM_REG_CLASSES)
1176 this_groups[(int) *p++]++;
1177 (*this_total_groups)++;
1178
1179 /* Record size and mode of a group of this class. */
1180 /* If more than one size group is needed,
1181 make all groups the largest needed size. */
1182 if (group_size[(int) class] < size)
1183 {
1184 other_mode = group_mode[(int) class];
1185 allocate_mode = mode;
1186
1187 group_size[(int) class] = size;
1188 group_mode[(int) class] = mode;
1189 }
1190 else
1191 {
1192 other_mode = mode;
1193 allocate_mode = group_mode[(int) class];
1194 }
1195
1196 /* Crash if two dissimilar machine modes both need
1197 groups of consecutive regs of the same class. */
1198
1199 if (other_mode != VOIDmode
1200 && other_mode != allocate_mode
1201 && ! modes_equiv_for_class_p (allocate_mode,
1202 other_mode,
1203 class))
1204 abort ();
1205 }
1206 else if (size == 1)
1207 {
1208 this_needs[(int) class] += 1;
1209 p = reg_class_superclasses[(int) class];
1210 while (*p != LIM_REG_CLASSES)
1211 this_needs[(int) *p++] += 1;
1212 }
1213 else
1214 abort ();
1215 }
1216
1217 /* All reloads have been counted for this insn;
1218 now merge the various times of use.
1219 This sets insn_needs, etc., to the maximum total number
1220 of registers needed at any point in this insn. */
1221
1222 for (i = 0; i < N_REG_CLASSES; i++)
1223 {
1224 int in_max, out_max;
1225
1226 for (in_max = 0, out_max = 0, j = 0;
1227 j < reload_n_operands; j++)
1228 {
1229 in_max = MAX (in_max, insn_needs_for_in_addr[j][i]);
1230 out_max = MAX (out_max, insn_needs_for_out_addr[j][i]);
1231 }
1232
1233 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1234 and operand addresses but not things used to reload them.
1235 Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads don't
1236 conflict with things needed to reload inputs or
1237 outputs. */
1238
1239 in_max = MAX (in_max, insn_needs_for_op_addr[i]);
1240 out_max = MAX (out_max, insn_needs_for_insn[i]);
1241
1242 insn_needs_for_inputs[i]
1243 = MAX (insn_needs_for_inputs[i]
1244 + insn_needs_for_op_addr[i]
1245 + insn_needs_for_insn[i],
1246 in_max + insn_needs_for_inputs[i]);
1247
1248 insn_needs_for_outputs[i] += out_max;
1249 insn_needs[i] += MAX (MAX (insn_needs_for_inputs[i],
1250 insn_needs_for_outputs[i]),
1251 insn_needs_for_other_addr[i]);
1252
1253 for (in_max = 0, out_max = 0, j = 0;
1254 j < reload_n_operands; j++)
1255 {
1256 in_max = MAX (in_max, insn_groups_for_in_addr[j][i]);
1257 out_max = MAX (out_max, insn_groups_for_out_addr[j][i]);
1258 }
1259
1260 in_max = MAX (in_max, insn_groups_for_op_addr[i]);
1261 out_max = MAX (out_max, insn_groups_for_insn[i]);
1262
1263 insn_groups_for_inputs[i]
1264 = MAX (insn_groups_for_inputs[i]
1265 + insn_groups_for_op_addr[i]
1266 + insn_groups_for_insn[i],
1267 in_max + insn_groups_for_inputs[i]);
1268
1269 insn_groups_for_outputs[i] += out_max;
1270 insn_groups[i] += MAX (MAX (insn_groups_for_inputs[i],
1271 insn_groups_for_outputs[i]),
1272 insn_groups_for_other_addr[i]);
1273 }
1274
1275 for (i = 0; i < reload_n_operands; i++)
1276 {
1277 max_total_input_groups
1278 = MAX (max_total_input_groups,
1279 insn_total_groups_for_in_addr[i]);
1280 max_total_output_groups
1281 = MAX (max_total_output_groups,
1282 insn_total_groups_for_out_addr[i]);
1283 }
1284
1285 max_total_input_groups = MAX (max_total_input_groups,
1286 insn_total_groups_for_op_addr);
1287 max_total_output_groups = MAX (max_total_output_groups,
1288 insn_total_groups_for_insn);
1289
1290 insn_total_groups_for_inputs
1291 = MAX (max_total_input_groups + insn_total_groups_for_op_addr
1292 + insn_total_groups_for_insn,
1293 max_total_input_groups + insn_total_groups_for_inputs);
1294
1295 insn_total_groups_for_outputs += max_total_output_groups;
1296
1297 insn_total_groups += MAX (MAX (insn_total_groups_for_outputs,
1298 insn_total_groups_for_inputs),
1299 insn_total_groups_for_other_addr);
1300
1301 /* If this is a CALL_INSN and caller-saves will need
1302 a spill register, act as if the spill register is
1303 needed for this insn. However, the spill register
1304 can be used by any reload of this insn, so we only
1305 need do something if no need for that class has
1306 been recorded.
1307
1308 The assumption that every CALL_INSN will trigger a
1309 caller-save is highly conservative, however, the number
1310 of cases where caller-saves will need a spill register but
1311 a block containing a CALL_INSN won't need a spill register
1312 of that class should be quite rare.
1313
1314 If a group is needed, the size and mode of the group will
1315 have been set up at the beginning of this loop. */
1316
1317 if (GET_CODE (insn) == CALL_INSN
1318 && caller_save_spill_class != NO_REGS)
1319 {
1320 int *caller_save_needs
1321 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1322
1323 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1324 {
1325 register enum reg_class *p
1326 = reg_class_superclasses[(int) caller_save_spill_class];
1327
1328 caller_save_needs[(int) caller_save_spill_class]++;
1329
1330 while (*p != LIM_REG_CLASSES)
1331 caller_save_needs[(int) *p++] += 1;
1332 }
1333
1334 if (caller_save_group_size > 1)
1335 insn_total_groups = MAX (insn_total_groups, 1);
1336
1337
1338 /* Show that this basic block will need a register of
1339 this class. */
1340
1341 if (global
1342 && ! (basic_block_needs[(int) caller_save_spill_class]
1343 [this_block]))
1344 {
1345 basic_block_needs[(int) caller_save_spill_class]
1346 [this_block] = 1;
1347 new_basic_block_needs = 1;
1348 }
1349 }
1350
1351 #ifdef SMALL_REGISTER_CLASSES
1352 /* If this insn stores the value of a function call,
1353 and that value is in a register that has been spilled,
1354 and if the insn needs a reload in a class
1355 that might use that register as the reload register,
1356 then add add an extra need in that class.
1357 This makes sure we have a register available that does
1358 not overlap the return value. */
1359 if (avoid_return_reg)
1360 {
1361 int regno = REGNO (avoid_return_reg);
1362 int nregs
1363 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1364 int r;
1365 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1366
1367 /* First compute the "basic needs", which counts a
1368 need only in the smallest class in which it
1369 is required. */
1370
1371 bcopy (insn_needs, basic_needs, sizeof basic_needs);
1372 bcopy (insn_groups, basic_groups, sizeof basic_groups);
1373
1374 for (i = 0; i < N_REG_CLASSES; i++)
1375 {
1376 enum reg_class *p;
1377
1378 if (basic_needs[i] >= 0)
1379 for (p = reg_class_superclasses[i];
1380 *p != LIM_REG_CLASSES; p++)
1381 basic_needs[(int) *p] -= basic_needs[i];
1382
1383 if (basic_groups[i] >= 0)
1384 for (p = reg_class_superclasses[i];
1385 *p != LIM_REG_CLASSES; p++)
1386 basic_groups[(int) *p] -= basic_groups[i];
1387 }
1388
1389 /* Now count extra regs if there might be a conflict with
1390 the return value register.
1391
1392 ??? This is not quite correct because we don't properly
1393 handle the case of groups, but if we end up doing
1394 something wrong, it either will end up not mattering or
1395 we will abort elsewhere. */
1396
1397 for (r = regno; r < regno + nregs; r++)
1398 if (spill_reg_order[r] >= 0)
1399 for (i = 0; i < N_REG_CLASSES; i++)
1400 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1401 {
1402 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1403 {
1404 enum reg_class *p;
1405
1406 insn_needs[i]++;
1407 p = reg_class_superclasses[i];
1408 while (*p != LIM_REG_CLASSES)
1409 insn_needs[(int) *p++]++;
1410 }
1411 }
1412 }
1413 #endif /* SMALL_REGISTER_CLASSES */
1414
1415 /* For each class, collect maximum need of any insn. */
1416
1417 for (i = 0; i < N_REG_CLASSES; i++)
1418 {
1419 if (max_needs[i] < insn_needs[i])
1420 {
1421 max_needs[i] = insn_needs[i];
1422 max_needs_insn[i] = insn;
1423 }
1424 if (max_groups[i] < insn_groups[i])
1425 {
1426 max_groups[i] = insn_groups[i];
1427 max_groups_insn[i] = insn;
1428 }
1429 if (insn_total_groups > 0)
1430 if (max_nongroups[i] < insn_needs[i])
1431 {
1432 max_nongroups[i] = insn_needs[i];
1433 max_nongroups_insn[i] = insn;
1434 }
1435 }
1436 }
1437 /* Note that there is a continue statement above. */
1438 }
1439
1440 /* If we allocated any new memory locations, make another pass
1441 since it might have changed elimination offsets. */
1442 if (starting_frame_size != get_frame_size ())
1443 something_changed = 1;
1444
1445 if (dumpfile)
1446 for (i = 0; i < N_REG_CLASSES; i++)
1447 {
1448 if (max_needs[i] > 0)
1449 fprintf (dumpfile,
1450 ";; Need %d reg%s of class %s (for insn %d).\n",
1451 max_needs[i], max_needs[i] == 1 ? "" : "s",
1452 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1453 if (max_nongroups[i] > 0)
1454 fprintf (dumpfile,
1455 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1456 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1457 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1458 if (max_groups[i] > 0)
1459 fprintf (dumpfile,
1460 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1461 max_groups[i], max_groups[i] == 1 ? "" : "s",
1462 mode_name[(int) group_mode[i]],
1463 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1464 }
1465
1466 /* If we have caller-saves, set up the save areas and see if caller-save
1467 will need a spill register. */
1468
1469 if (caller_save_needed
1470 && ! setup_save_areas (&something_changed)
1471 && caller_save_spill_class == NO_REGS)
1472 {
1473 /* The class we will need depends on whether the machine
1474 supports the sum of two registers for an address; see
1475 find_address_reloads for details. */
1476
1477 caller_save_spill_class
1478 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1479 caller_save_group_size
1480 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1481 something_changed = 1;
1482 }
1483
1484 /* See if anything that happened changes which eliminations are valid.
1485 For example, on the Sparc, whether or not the frame pointer can
1486 be eliminated can depend on what registers have been used. We need
1487 not check some conditions again (such as flag_omit_frame_pointer)
1488 since they can't have changed. */
1489
1490 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1491 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1492 #ifdef ELIMINABLE_REGS
1493 || ! CAN_ELIMINATE (ep->from, ep->to)
1494 #endif
1495 )
1496 ep->can_eliminate = 0;
1497
1498 /* Look for the case where we have discovered that we can't replace
1499 register A with register B and that means that we will now be
1500 trying to replace register A with register C. This means we can
1501 no longer replace register C with register B and we need to disable
1502 such an elimination, if it exists. This occurs often with A == ap,
1503 B == sp, and C == fp. */
1504
1505 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1506 {
1507 struct elim_table *op;
1508 register int new_to = -1;
1509
1510 if (! ep->can_eliminate && ep->can_eliminate_previous)
1511 {
1512 /* Find the current elimination for ep->from, if there is a
1513 new one. */
1514 for (op = reg_eliminate;
1515 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1516 if (op->from == ep->from && op->can_eliminate)
1517 {
1518 new_to = op->to;
1519 break;
1520 }
1521
1522 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1523 disable it. */
1524 for (op = reg_eliminate;
1525 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1526 if (op->from == new_to && op->to == ep->to)
1527 op->can_eliminate = 0;
1528 }
1529 }
1530
1531 /* See if any registers that we thought we could eliminate the previous
1532 time are no longer eliminable. If so, something has changed and we
1533 must spill the register. Also, recompute the number of eliminable
1534 registers and see if the frame pointer is needed; it is if there is
1535 no elimination of the frame pointer that we can perform. */
1536
1537 frame_pointer_needed = 1;
1538 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1539 {
1540 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1541 frame_pointer_needed = 0;
1542
1543 if (! ep->can_eliminate && ep->can_eliminate_previous)
1544 {
1545 ep->can_eliminate_previous = 0;
1546 spill_hard_reg (ep->from, global, dumpfile, 1);
1547 regs_ever_live[ep->from] = 1;
1548 something_changed = 1;
1549 num_eliminable--;
1550 }
1551 }
1552
1553 /* If all needs are met, we win. */
1554
1555 for (i = 0; i < N_REG_CLASSES; i++)
1556 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1557 break;
1558 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1559 break;
1560
1561 /* Not all needs are met; must spill some hard regs. */
1562
1563 /* Put all registers spilled so far back in potential_reload_regs, but
1564 put them at the front, since we've already spilled most of the
1565 psuedos in them (we might have left some pseudos unspilled if they
1566 were in a block that didn't need any spill registers of a conflicting
1567 class. We used to try to mark off the need for those registers,
1568 but doing so properly is very complex and reallocating them is the
1569 simpler approach. First, "pack" potential_reload_regs by pushing
1570 any nonnegative entries towards the end. That will leave room
1571 for the registers we already spilled.
1572
1573 Also, undo the marking of the spill registers from the last time
1574 around in FORBIDDEN_REGS since we will be probably be allocating
1575 them again below.
1576
1577 ??? It is theoretically possible that we might end up not using one
1578 of our previously-spilled registers in this allocation, even though
1579 they are at the head of the list. It's not clear what to do about
1580 this, but it was no better before, when we marked off the needs met
1581 by the previously-spilled registers. With the current code, globals
1582 can be allocated into these registers, but locals cannot. */
1583
1584 if (n_spills)
1585 {
1586 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1587 if (potential_reload_regs[i] != -1)
1588 potential_reload_regs[j--] = potential_reload_regs[i];
1589
1590 for (i = 0; i < n_spills; i++)
1591 {
1592 potential_reload_regs[i] = spill_regs[i];
1593 spill_reg_order[spill_regs[i]] = -1;
1594 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1595 }
1596
1597 n_spills = 0;
1598 }
1599
1600 /* Now find more reload regs to satisfy the remaining need
1601 Do it by ascending class number, since otherwise a reg
1602 might be spilled for a big class and might fail to count
1603 for a smaller class even though it belongs to that class.
1604
1605 Count spilled regs in `spills', and add entries to
1606 `spill_regs' and `spill_reg_order'.
1607
1608 ??? Note there is a problem here.
1609 When there is a need for a group in a high-numbered class,
1610 and also need for non-group regs that come from a lower class,
1611 the non-group regs are chosen first. If there aren't many regs,
1612 they might leave no room for a group.
1613
1614 This was happening on the 386. To fix it, we added the code
1615 that calls possible_group_p, so that the lower class won't
1616 break up the last possible group.
1617
1618 Really fixing the problem would require changes above
1619 in counting the regs already spilled, and in choose_reload_regs.
1620 It might be hard to avoid introducing bugs there. */
1621
1622 CLEAR_HARD_REG_SET (counted_for_groups);
1623 CLEAR_HARD_REG_SET (counted_for_nongroups);
1624
1625 for (class = 0; class < N_REG_CLASSES; class++)
1626 {
1627 /* First get the groups of registers.
1628 If we got single registers first, we might fragment
1629 possible groups. */
1630 while (max_groups[class] > 0)
1631 {
1632 /* If any single spilled regs happen to form groups,
1633 count them now. Maybe we don't really need
1634 to spill another group. */
1635 count_possible_groups (group_size, group_mode, max_groups);
1636
1637 if (max_groups[class] <= 0)
1638 break;
1639
1640 /* Groups of size 2 (the only groups used on most machines)
1641 are treated specially. */
1642 if (group_size[class] == 2)
1643 {
1644 /* First, look for a register that will complete a group. */
1645 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1646 {
1647 int other;
1648
1649 j = potential_reload_regs[i];
1650 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1651 &&
1652 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1653 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1654 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1655 && HARD_REGNO_MODE_OK (other, group_mode[class])
1656 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1657 other)
1658 /* We don't want one part of another group.
1659 We could get "two groups" that overlap! */
1660 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1661 ||
1662 (j < FIRST_PSEUDO_REGISTER - 1
1663 && (other = j + 1, spill_reg_order[other] >= 0)
1664 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1665 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1666 && HARD_REGNO_MODE_OK (j, group_mode[class])
1667 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1668 other)
1669 && ! TEST_HARD_REG_BIT (counted_for_groups,
1670 other))))
1671 {
1672 register enum reg_class *p;
1673
1674 /* We have found one that will complete a group,
1675 so count off one group as provided. */
1676 max_groups[class]--;
1677 p = reg_class_superclasses[class];
1678 while (*p != LIM_REG_CLASSES)
1679 max_groups[(int) *p++]--;
1680
1681 /* Indicate both these regs are part of a group. */
1682 SET_HARD_REG_BIT (counted_for_groups, j);
1683 SET_HARD_REG_BIT (counted_for_groups, other);
1684 break;
1685 }
1686 }
1687 /* We can't complete a group, so start one. */
1688 if (i == FIRST_PSEUDO_REGISTER)
1689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1690 {
1691 int k;
1692 j = potential_reload_regs[i];
1693 /* Verify that J+1 is a potential reload reg. */
1694 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1695 if (potential_reload_regs[k] == j + 1)
1696 break;
1697 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1698 && k < FIRST_PSEUDO_REGISTER
1699 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1700 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1701 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1702 && HARD_REGNO_MODE_OK (j, group_mode[class])
1703 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1704 j + 1)
1705 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1706 break;
1707 }
1708
1709 /* I should be the index in potential_reload_regs
1710 of the new reload reg we have found. */
1711
1712 if (i >= FIRST_PSEUDO_REGISTER)
1713 {
1714 /* There are no groups left to spill. */
1715 spill_failure (max_groups_insn[class]);
1716 failure = 1;
1717 goto failed;
1718 }
1719 else
1720 something_changed
1721 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1722 global, dumpfile);
1723 }
1724 else
1725 {
1726 /* For groups of more than 2 registers,
1727 look for a sufficient sequence of unspilled registers,
1728 and spill them all at once. */
1729 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1730 {
1731 int k;
1732
1733 j = potential_reload_regs[i];
1734 if (j >= 0
1735 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1736 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1737 {
1738 /* Check each reg in the sequence. */
1739 for (k = 0; k < group_size[class]; k++)
1740 if (! (spill_reg_order[j + k] < 0
1741 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1742 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1743 break;
1744 /* We got a full sequence, so spill them all. */
1745 if (k == group_size[class])
1746 {
1747 register enum reg_class *p;
1748 for (k = 0; k < group_size[class]; k++)
1749 {
1750 int idx;
1751 SET_HARD_REG_BIT (counted_for_groups, j + k);
1752 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1753 if (potential_reload_regs[idx] == j + k)
1754 break;
1755 something_changed
1756 |= new_spill_reg (idx, class,
1757 max_needs, NULL_PTR,
1758 global, dumpfile);
1759 }
1760
1761 /* We have found one that will complete a group,
1762 so count off one group as provided. */
1763 max_groups[class]--;
1764 p = reg_class_superclasses[class];
1765 while (*p != LIM_REG_CLASSES)
1766 max_groups[(int) *p++]--;
1767
1768 break;
1769 }
1770 }
1771 }
1772 /* We couldn't find any registers for this reload.
1773 Avoid going into an infinite loop. */
1774 if (i >= FIRST_PSEUDO_REGISTER)
1775 {
1776 /* There are no groups left. */
1777 spill_failure (max_groups_insn[class]);
1778 failure = 1;
1779 goto failed;
1780 }
1781 }
1782 }
1783
1784 /* Now similarly satisfy all need for single registers. */
1785
1786 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1787 {
1788 #ifdef SMALL_REGISTER_CLASSES
1789 /* This should be right for all machines, but only the 386
1790 is known to need it, so this conditional plays safe.
1791 ??? For 2.5, try making this unconditional. */
1792 /* If we spilled enough regs, but they weren't counted
1793 against the non-group need, see if we can count them now.
1794 If so, we can avoid some actual spilling. */
1795 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1796 for (i = 0; i < n_spills; i++)
1797 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1798 spill_regs[i])
1799 && !TEST_HARD_REG_BIT (counted_for_groups,
1800 spill_regs[i])
1801 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1802 spill_regs[i])
1803 && max_nongroups[class] > 0)
1804 {
1805 register enum reg_class *p;
1806
1807 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1808 max_nongroups[class]--;
1809 p = reg_class_superclasses[class];
1810 while (*p != LIM_REG_CLASSES)
1811 max_nongroups[(int) *p++]--;
1812 }
1813 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1814 break;
1815 #endif
1816
1817 /* Consider the potential reload regs that aren't
1818 yet in use as reload regs, in order of preference.
1819 Find the most preferred one that's in this class. */
1820
1821 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1822 if (potential_reload_regs[i] >= 0
1823 && TEST_HARD_REG_BIT (reg_class_contents[class],
1824 potential_reload_regs[i])
1825 /* If this reg will not be available for groups,
1826 pick one that does not foreclose possible groups.
1827 This is a kludge, and not very general,
1828 but it should be sufficient to make the 386 work,
1829 and the problem should not occur on machines with
1830 more registers. */
1831 && (max_nongroups[class] == 0
1832 || possible_group_p (potential_reload_regs[i], max_groups)))
1833 break;
1834
1835 /* If we couldn't get a register, try to get one even if we
1836 might foreclose possible groups. This may cause problems
1837 later, but that's better than aborting now, since it is
1838 possible that we will, in fact, be able to form the needed
1839 group even with this allocation. */
1840
1841 if (i >= FIRST_PSEUDO_REGISTER
1842 && (asm_noperands (max_needs[class] > 0
1843 ? max_needs_insn[class]
1844 : max_nongroups_insn[class])
1845 < 0))
1846 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1847 if (potential_reload_regs[i] >= 0
1848 && TEST_HARD_REG_BIT (reg_class_contents[class],
1849 potential_reload_regs[i]))
1850 break;
1851
1852 /* I should be the index in potential_reload_regs
1853 of the new reload reg we have found. */
1854
1855 if (i >= FIRST_PSEUDO_REGISTER)
1856 {
1857 /* There are no possible registers left to spill. */
1858 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1859 : max_nongroups_insn[class]);
1860 failure = 1;
1861 goto failed;
1862 }
1863 else
1864 something_changed
1865 |= new_spill_reg (i, class, max_needs, max_nongroups,
1866 global, dumpfile);
1867 }
1868 }
1869 }
1870
1871 /* If global-alloc was run, notify it of any register eliminations we have
1872 done. */
1873 if (global)
1874 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1875 if (ep->can_eliminate)
1876 mark_elimination (ep->from, ep->to);
1877
1878 /* Insert code to save and restore call-clobbered hard regs
1879 around calls. Tell if what mode to use so that we will process
1880 those insns in reload_as_needed if we have to. */
1881
1882 if (caller_save_needed)
1883 save_call_clobbered_regs (num_eliminable ? QImode
1884 : caller_save_spill_class != NO_REGS ? HImode
1885 : VOIDmode);
1886
1887 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1888 If that insn didn't set the register (i.e., it copied the register to
1889 memory), just delete that insn instead of the equivalencing insn plus
1890 anything now dead. If we call delete_dead_insn on that insn, we may
1891 delete the insn that actually sets the register if the register die
1892 there and that is incorrect. */
1893
1894 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1895 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1896 && GET_CODE (reg_equiv_init[i]) != NOTE)
1897 {
1898 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1899 delete_dead_insn (reg_equiv_init[i]);
1900 else
1901 {
1902 PUT_CODE (reg_equiv_init[i], NOTE);
1903 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1904 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1905 }
1906 }
1907
1908 /* Use the reload registers where necessary
1909 by generating move instructions to move the must-be-register
1910 values into or out of the reload registers. */
1911
1912 if (something_needs_reloads || something_needs_elimination
1913 || (caller_save_needed && num_eliminable)
1914 || caller_save_spill_class != NO_REGS)
1915 reload_as_needed (first, global);
1916
1917 /* If we were able to eliminate the frame pointer, show that it is no
1918 longer live at the start of any basic block. If it ls live by
1919 virtue of being in a pseudo, that pseudo will be marked live
1920 and hence the frame pointer will be known to be live via that
1921 pseudo. */
1922
1923 if (! frame_pointer_needed)
1924 for (i = 0; i < n_basic_blocks; i++)
1925 basic_block_live_at_start[i][FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1926 &= ~ ((REGSET_ELT_TYPE) 1 << (FRAME_POINTER_REGNUM % REGSET_ELT_BITS));
1927
1928 /* Come here (with failure set nonzero) if we can't get enough spill regs
1929 and we decide not to abort about it. */
1930 failed:
1931
1932 reload_in_progress = 0;
1933
1934 /* Now eliminate all pseudo regs by modifying them into
1935 their equivalent memory references.
1936 The REG-rtx's for the pseudos are modified in place,
1937 so all insns that used to refer to them now refer to memory.
1938
1939 For a reg that has a reg_equiv_address, all those insns
1940 were changed by reloading so that no insns refer to it any longer;
1941 but the DECL_RTL of a variable decl may refer to it,
1942 and if so this causes the debugging info to mention the variable. */
1943
1944 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1945 {
1946 rtx addr = 0;
1947 int in_struct = 0;
1948 if (reg_equiv_mem[i])
1949 {
1950 addr = XEXP (reg_equiv_mem[i], 0);
1951 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1952 }
1953 if (reg_equiv_address[i])
1954 addr = reg_equiv_address[i];
1955 if (addr)
1956 {
1957 if (reg_renumber[i] < 0)
1958 {
1959 rtx reg = regno_reg_rtx[i];
1960 XEXP (reg, 0) = addr;
1961 REG_USERVAR_P (reg) = 0;
1962 MEM_IN_STRUCT_P (reg) = in_struct;
1963 PUT_CODE (reg, MEM);
1964 }
1965 else if (reg_equiv_mem[i])
1966 XEXP (reg_equiv_mem[i], 0) = addr;
1967 }
1968 }
1969
1970 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1971 /* Make a pass over all the insns and remove death notes for things that
1972 are no longer registers or no longer die in the insn (e.g., an input
1973 and output pseudo being tied). */
1974
1975 for (insn = first; insn; insn = NEXT_INSN (insn))
1976 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1977 {
1978 rtx note, next;
1979
1980 for (note = REG_NOTES (insn); note; note = next)
1981 {
1982 next = XEXP (note, 1);
1983 if (REG_NOTE_KIND (note) == REG_DEAD
1984 && (GET_CODE (XEXP (note, 0)) != REG
1985 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1986 remove_note (insn, note);
1987 }
1988 }
1989 #endif
1990
1991 /* Indicate that we no longer have known memory locations or constants. */
1992 reg_equiv_constant = 0;
1993 reg_equiv_memory_loc = 0;
1994
1995 return failure;
1996 }
1997 \f
1998 /* Nonzero if, after spilling reg REGNO for non-groups,
1999 it will still be possible to find a group if we still need one. */
2000
2001 static int
2002 possible_group_p (regno, max_groups)
2003 int regno;
2004 int *max_groups;
2005 {
2006 int i;
2007 int class = (int) NO_REGS;
2008
2009 for (i = 0; i < (int) N_REG_CLASSES; i++)
2010 if (max_groups[i] > 0)
2011 {
2012 class = i;
2013 break;
2014 }
2015
2016 if (class == (int) NO_REGS)
2017 return 1;
2018
2019 /* Consider each pair of consecutive registers. */
2020 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2021 {
2022 /* Ignore pairs that include reg REGNO. */
2023 if (i == regno || i + 1 == regno)
2024 continue;
2025
2026 /* Ignore pairs that are outside the class that needs the group.
2027 ??? Here we fail to handle the case where two different classes
2028 independently need groups. But this never happens with our
2029 current machine descriptions. */
2030 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2031 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2032 continue;
2033
2034 /* A pair of consecutive regs we can still spill does the trick. */
2035 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2036 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2037 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2038 return 1;
2039
2040 /* A pair of one already spilled and one we can spill does it
2041 provided the one already spilled is not otherwise reserved. */
2042 if (spill_reg_order[i] < 0
2043 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2044 && spill_reg_order[i + 1] >= 0
2045 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2046 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2047 return 1;
2048 if (spill_reg_order[i + 1] < 0
2049 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2050 && spill_reg_order[i] >= 0
2051 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2052 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2053 return 1;
2054 }
2055
2056 return 0;
2057 }
2058 \f
2059 /* Count any groups that can be formed from the registers recently spilled.
2060 This is done class by class, in order of ascending class number. */
2061
2062 static void
2063 count_possible_groups (group_size, group_mode, max_groups)
2064 int *group_size;
2065 enum machine_mode *group_mode;
2066 int *max_groups;
2067 {
2068 int i;
2069 /* Now find all consecutive groups of spilled registers
2070 and mark each group off against the need for such groups.
2071 But don't count them against ordinary need, yet. */
2072
2073 for (i = 0; i < N_REG_CLASSES; i++)
2074 if (group_size[i] > 1)
2075 {
2076 HARD_REG_SET new;
2077 int j;
2078
2079 CLEAR_HARD_REG_SET (new);
2080
2081 /* Make a mask of all the regs that are spill regs in class I. */
2082 for (j = 0; j < n_spills; j++)
2083 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2084 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2085 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2086 spill_regs[j]))
2087 SET_HARD_REG_BIT (new, spill_regs[j]);
2088
2089 /* Find each consecutive group of them. */
2090 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2091 if (TEST_HARD_REG_BIT (new, j)
2092 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2093 /* Next line in case group-mode for this class
2094 demands an even-odd pair. */
2095 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2096 {
2097 int k;
2098 for (k = 1; k < group_size[i]; k++)
2099 if (! TEST_HARD_REG_BIT (new, j + k))
2100 break;
2101 if (k == group_size[i])
2102 {
2103 /* We found a group. Mark it off against this class's
2104 need for groups, and against each superclass too. */
2105 register enum reg_class *p;
2106 max_groups[i]--;
2107 p = reg_class_superclasses[i];
2108 while (*p != LIM_REG_CLASSES)
2109 max_groups[(int) *p++]--;
2110 /* Don't count these registers again. */
2111 for (k = 0; k < group_size[i]; k++)
2112 SET_HARD_REG_BIT (counted_for_groups, j + k);
2113 }
2114 /* Skip to the last reg in this group. When j is incremented
2115 above, it will then point to the first reg of the next
2116 possible group. */
2117 j += k - 1;
2118 }
2119 }
2120
2121 }
2122 \f
2123 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2124 another mode that needs to be reloaded for the same register class CLASS.
2125 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2126 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2127
2128 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2129 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2130 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2131 causes unnecessary failures on machines requiring alignment of register
2132 groups when the two modes are different sizes, because the larger mode has
2133 more strict alignment rules than the smaller mode. */
2134
2135 static int
2136 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2137 enum machine_mode allocate_mode, other_mode;
2138 enum reg_class class;
2139 {
2140 register int regno;
2141 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2142 {
2143 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2144 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2145 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2146 return 0;
2147 }
2148 return 1;
2149 }
2150
2151 /* Handle the failure to find a register to spill.
2152 INSN should be one of the insns which needed this particular spill reg. */
2153
2154 static void
2155 spill_failure (insn)
2156 rtx insn;
2157 {
2158 if (asm_noperands (PATTERN (insn)) >= 0)
2159 error_for_asm (insn, "`asm' needs too many reloads");
2160 else
2161 abort ();
2162 }
2163
2164 /* Add a new register to the tables of available spill-registers
2165 (as well as spilling all pseudos allocated to the register).
2166 I is the index of this register in potential_reload_regs.
2167 CLASS is the regclass whose need is being satisfied.
2168 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2169 so that this register can count off against them.
2170 MAX_NONGROUPS is 0 if this register is part of a group.
2171 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2172
2173 static int
2174 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2175 int i;
2176 int class;
2177 int *max_needs;
2178 int *max_nongroups;
2179 int global;
2180 FILE *dumpfile;
2181 {
2182 register enum reg_class *p;
2183 int val;
2184 int regno = potential_reload_regs[i];
2185
2186 if (i >= FIRST_PSEUDO_REGISTER)
2187 abort (); /* Caller failed to find any register. */
2188
2189 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2190 fatal ("fixed or forbidden register was spilled.\n\
2191 This may be due to a compiler bug or to impossible asm statements.");
2192
2193 /* Make reg REGNO an additional reload reg. */
2194
2195 potential_reload_regs[i] = -1;
2196 spill_regs[n_spills] = regno;
2197 spill_reg_order[regno] = n_spills;
2198 if (dumpfile)
2199 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2200
2201 /* Clear off the needs we just satisfied. */
2202
2203 max_needs[class]--;
2204 p = reg_class_superclasses[class];
2205 while (*p != LIM_REG_CLASSES)
2206 max_needs[(int) *p++]--;
2207
2208 if (max_nongroups && max_nongroups[class] > 0)
2209 {
2210 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2211 max_nongroups[class]--;
2212 p = reg_class_superclasses[class];
2213 while (*p != LIM_REG_CLASSES)
2214 max_nongroups[(int) *p++]--;
2215 }
2216
2217 /* Spill every pseudo reg that was allocated to this reg
2218 or to something that overlaps this reg. */
2219
2220 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2221
2222 /* If there are some registers still to eliminate and this register
2223 wasn't ever used before, additional stack space may have to be
2224 allocated to store this register. Thus, we may have changed the offset
2225 between the stack and frame pointers, so mark that something has changed.
2226 (If new pseudos were spilled, thus requiring more space, VAL would have
2227 been set non-zero by the call to spill_hard_reg above since additional
2228 reloads may be needed in that case.
2229
2230 One might think that we need only set VAL to 1 if this is a call-used
2231 register. However, the set of registers that must be saved by the
2232 prologue is not identical to the call-used set. For example, the
2233 register used by the call insn for the return PC is a call-used register,
2234 but must be saved by the prologue. */
2235 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2236 val = 1;
2237
2238 regs_ever_live[spill_regs[n_spills]] = 1;
2239 n_spills++;
2240
2241 return val;
2242 }
2243 \f
2244 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2245 data that is dead in INSN. */
2246
2247 static void
2248 delete_dead_insn (insn)
2249 rtx insn;
2250 {
2251 rtx prev = prev_real_insn (insn);
2252 rtx prev_dest;
2253
2254 /* If the previous insn sets a register that dies in our insn, delete it
2255 too. */
2256 if (prev && GET_CODE (PATTERN (prev)) == SET
2257 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2258 && reg_mentioned_p (prev_dest, PATTERN (insn))
2259 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2260 delete_dead_insn (prev);
2261
2262 PUT_CODE (insn, NOTE);
2263 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2264 NOTE_SOURCE_FILE (insn) = 0;
2265 }
2266
2267 /* Modify the home of pseudo-reg I.
2268 The new home is present in reg_renumber[I].
2269
2270 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2271 or it may be -1, meaning there is none or it is not relevant.
2272 This is used so that all pseudos spilled from a given hard reg
2273 can share one stack slot. */
2274
2275 static void
2276 alter_reg (i, from_reg)
2277 register int i;
2278 int from_reg;
2279 {
2280 /* When outputting an inline function, this can happen
2281 for a reg that isn't actually used. */
2282 if (regno_reg_rtx[i] == 0)
2283 return;
2284
2285 /* If the reg got changed to a MEM at rtl-generation time,
2286 ignore it. */
2287 if (GET_CODE (regno_reg_rtx[i]) != REG)
2288 return;
2289
2290 /* Modify the reg-rtx to contain the new hard reg
2291 number or else to contain its pseudo reg number. */
2292 REGNO (regno_reg_rtx[i])
2293 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2294
2295 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2296 allocate a stack slot for it. */
2297
2298 if (reg_renumber[i] < 0
2299 && reg_n_refs[i] > 0
2300 && reg_equiv_constant[i] == 0
2301 && reg_equiv_memory_loc[i] == 0)
2302 {
2303 register rtx x;
2304 int inherent_size = PSEUDO_REGNO_BYTES (i);
2305 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2306 int adjust = 0;
2307
2308 /* Each pseudo reg has an inherent size which comes from its own mode,
2309 and a total size which provides room for paradoxical subregs
2310 which refer to the pseudo reg in wider modes.
2311
2312 We can use a slot already allocated if it provides both
2313 enough inherent space and enough total space.
2314 Otherwise, we allocate a new slot, making sure that it has no less
2315 inherent space, and no less total space, then the previous slot. */
2316 if (from_reg == -1)
2317 {
2318 /* No known place to spill from => no slot to reuse. */
2319 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2320 #if BYTES_BIG_ENDIAN
2321 /* Cancel the big-endian correction done in assign_stack_local.
2322 Get the address of the beginning of the slot.
2323 This is so we can do a big-endian correction unconditionally
2324 below. */
2325 adjust = inherent_size - total_size;
2326 #endif
2327 }
2328 /* Reuse a stack slot if possible. */
2329 else if (spill_stack_slot[from_reg] != 0
2330 && spill_stack_slot_width[from_reg] >= total_size
2331 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2332 >= inherent_size))
2333 x = spill_stack_slot[from_reg];
2334 /* Allocate a bigger slot. */
2335 else
2336 {
2337 /* Compute maximum size needed, both for inherent size
2338 and for total size. */
2339 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2340 if (spill_stack_slot[from_reg])
2341 {
2342 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2343 > inherent_size)
2344 mode = GET_MODE (spill_stack_slot[from_reg]);
2345 if (spill_stack_slot_width[from_reg] > total_size)
2346 total_size = spill_stack_slot_width[from_reg];
2347 }
2348 /* Make a slot with that size. */
2349 x = assign_stack_local (mode, total_size, -1);
2350 #if BYTES_BIG_ENDIAN
2351 /* Cancel the big-endian correction done in assign_stack_local.
2352 Get the address of the beginning of the slot.
2353 This is so we can do a big-endian correction unconditionally
2354 below. */
2355 adjust = GET_MODE_SIZE (mode) - total_size;
2356 #endif
2357 spill_stack_slot[from_reg] = x;
2358 spill_stack_slot_width[from_reg] = total_size;
2359 }
2360
2361 #if BYTES_BIG_ENDIAN
2362 /* On a big endian machine, the "address" of the slot
2363 is the address of the low part that fits its inherent mode. */
2364 if (inherent_size < total_size)
2365 adjust += (total_size - inherent_size);
2366 #endif /* BYTES_BIG_ENDIAN */
2367
2368 /* If we have any adjustment to make, or if the stack slot is the
2369 wrong mode, make a new stack slot. */
2370 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2371 {
2372 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2373 plus_constant (XEXP (x, 0), adjust));
2374 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2375 }
2376
2377 /* Save the stack slot for later. */
2378 reg_equiv_memory_loc[i] = x;
2379 }
2380 }
2381
2382 /* Mark the slots in regs_ever_live for the hard regs
2383 used by pseudo-reg number REGNO. */
2384
2385 void
2386 mark_home_live (regno)
2387 int regno;
2388 {
2389 register int i, lim;
2390 i = reg_renumber[regno];
2391 if (i < 0)
2392 return;
2393 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2394 while (i < lim)
2395 regs_ever_live[i++] = 1;
2396 }
2397 \f
2398 /* This function handles the tracking of elimination offsets around branches.
2399
2400 X is a piece of RTL being scanned.
2401
2402 INSN is the insn that it came from, if any.
2403
2404 INITIAL_P is non-zero if we are to set the offset to be the initial
2405 offset and zero if we are setting the offset of the label to be the
2406 current offset. */
2407
2408 static void
2409 set_label_offsets (x, insn, initial_p)
2410 rtx x;
2411 rtx insn;
2412 int initial_p;
2413 {
2414 enum rtx_code code = GET_CODE (x);
2415 rtx tem;
2416 int i;
2417 struct elim_table *p;
2418
2419 switch (code)
2420 {
2421 case LABEL_REF:
2422 if (LABEL_REF_NONLOCAL_P (x))
2423 return;
2424
2425 x = XEXP (x, 0);
2426
2427 /* ... fall through ... */
2428
2429 case CODE_LABEL:
2430 /* If we know nothing about this label, set the desired offsets. Note
2431 that this sets the offset at a label to be the offset before a label
2432 if we don't know anything about the label. This is not correct for
2433 the label after a BARRIER, but is the best guess we can make. If
2434 we guessed wrong, we will suppress an elimination that might have
2435 been possible had we been able to guess correctly. */
2436
2437 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2438 {
2439 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2440 offsets_at[CODE_LABEL_NUMBER (x)][i]
2441 = (initial_p ? reg_eliminate[i].initial_offset
2442 : reg_eliminate[i].offset);
2443 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2444 }
2445
2446 /* Otherwise, if this is the definition of a label and it is
2447 preceded by a BARRIER, set our offsets to the known offset of
2448 that label. */
2449
2450 else if (x == insn
2451 && (tem = prev_nonnote_insn (insn)) != 0
2452 && GET_CODE (tem) == BARRIER)
2453 {
2454 num_not_at_initial_offset = 0;
2455 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2456 {
2457 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2458 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2459 if (reg_eliminate[i].can_eliminate
2460 && (reg_eliminate[i].offset
2461 != reg_eliminate[i].initial_offset))
2462 num_not_at_initial_offset++;
2463 }
2464 }
2465
2466 else
2467 /* If neither of the above cases is true, compare each offset
2468 with those previously recorded and suppress any eliminations
2469 where the offsets disagree. */
2470
2471 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2472 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2473 != (initial_p ? reg_eliminate[i].initial_offset
2474 : reg_eliminate[i].offset))
2475 reg_eliminate[i].can_eliminate = 0;
2476
2477 return;
2478
2479 case JUMP_INSN:
2480 set_label_offsets (PATTERN (insn), insn, initial_p);
2481
2482 /* ... fall through ... */
2483
2484 case INSN:
2485 case CALL_INSN:
2486 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2487 and hence must have all eliminations at their initial offsets. */
2488 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2489 if (REG_NOTE_KIND (tem) == REG_LABEL)
2490 set_label_offsets (XEXP (tem, 0), insn, 1);
2491 return;
2492
2493 case ADDR_VEC:
2494 case ADDR_DIFF_VEC:
2495 /* Each of the labels in the address vector must be at their initial
2496 offsets. We want the first first for ADDR_VEC and the second
2497 field for ADDR_DIFF_VEC. */
2498
2499 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2500 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2501 insn, initial_p);
2502 return;
2503
2504 case SET:
2505 /* We only care about setting PC. If the source is not RETURN,
2506 IF_THEN_ELSE, or a label, disable any eliminations not at
2507 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2508 isn't one of those possibilities. For branches to a label,
2509 call ourselves recursively.
2510
2511 Note that this can disable elimination unnecessarily when we have
2512 a non-local goto since it will look like a non-constant jump to
2513 someplace in the current function. This isn't a significant
2514 problem since such jumps will normally be when all elimination
2515 pairs are back to their initial offsets. */
2516
2517 if (SET_DEST (x) != pc_rtx)
2518 return;
2519
2520 switch (GET_CODE (SET_SRC (x)))
2521 {
2522 case PC:
2523 case RETURN:
2524 return;
2525
2526 case LABEL_REF:
2527 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2528 return;
2529
2530 case IF_THEN_ELSE:
2531 tem = XEXP (SET_SRC (x), 1);
2532 if (GET_CODE (tem) == LABEL_REF)
2533 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2534 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2535 break;
2536
2537 tem = XEXP (SET_SRC (x), 2);
2538 if (GET_CODE (tem) == LABEL_REF)
2539 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2540 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2541 break;
2542 return;
2543 }
2544
2545 /* If we reach here, all eliminations must be at their initial
2546 offset because we are doing a jump to a variable address. */
2547 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2548 if (p->offset != p->initial_offset)
2549 p->can_eliminate = 0;
2550 }
2551 }
2552 \f
2553 /* Used for communication between the next two function to properly share
2554 the vector for an ASM_OPERANDS. */
2555
2556 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2557
2558 /* Scan X and replace any eliminable registers (such as fp) with a
2559 replacement (such as sp), plus an offset.
2560
2561 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2562 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2563 MEM, we are allowed to replace a sum of a register and the constant zero
2564 with the register, which we cannot do outside a MEM. In addition, we need
2565 to record the fact that a register is referenced outside a MEM.
2566
2567 If INSN is nonzero, it is the insn containing X. If we replace a REG
2568 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2569 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2570 that the REG is being modified.
2571
2572 If we see a modification to a register we know about, take the
2573 appropriate action (see case SET, below).
2574
2575 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2576 replacements done assuming all offsets are at their initial values. If
2577 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2578 encounter, return the actual location so that find_reloads will do
2579 the proper thing. */
2580
2581 rtx
2582 eliminate_regs (x, mem_mode, insn)
2583 rtx x;
2584 enum machine_mode mem_mode;
2585 rtx insn;
2586 {
2587 enum rtx_code code = GET_CODE (x);
2588 struct elim_table *ep;
2589 int regno;
2590 rtx new;
2591 int i, j;
2592 char *fmt;
2593 int copied = 0;
2594
2595 switch (code)
2596 {
2597 case CONST_INT:
2598 case CONST_DOUBLE:
2599 case CONST:
2600 case SYMBOL_REF:
2601 case CODE_LABEL:
2602 case PC:
2603 case CC0:
2604 case ASM_INPUT:
2605 case ADDR_VEC:
2606 case ADDR_DIFF_VEC:
2607 case RETURN:
2608 return x;
2609
2610 case REG:
2611 regno = REGNO (x);
2612
2613 /* First handle the case where we encounter a bare register that
2614 is eliminable. Replace it with a PLUS. */
2615 if (regno < FIRST_PSEUDO_REGISTER)
2616 {
2617 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2618 ep++)
2619 if (ep->from_rtx == x && ep->can_eliminate)
2620 {
2621 if (! mem_mode)
2622 ep->ref_outside_mem = 1;
2623 return plus_constant (ep->to_rtx, ep->previous_offset);
2624 }
2625
2626 }
2627 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2628 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2629 {
2630 /* In this case, find_reloads would attempt to either use an
2631 incorrect address (if something is not at its initial offset)
2632 or substitute an replaced address into an insn (which loses
2633 if the offset is changed by some later action). So we simply
2634 return the replaced stack slot (assuming it is changed by
2635 elimination) and ignore the fact that this is actually a
2636 reference to the pseudo. Ensure we make a copy of the
2637 address in case it is shared. */
2638 new = eliminate_regs (reg_equiv_memory_loc[regno],
2639 mem_mode, NULL_RTX);
2640 if (new != reg_equiv_memory_loc[regno])
2641 {
2642 cannot_omit_stores[regno] = 1;
2643 return copy_rtx (new);
2644 }
2645 }
2646 return x;
2647
2648 case PLUS:
2649 /* If this is the sum of an eliminable register and a constant, rework
2650 the sum. */
2651 if (GET_CODE (XEXP (x, 0)) == REG
2652 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2653 && CONSTANT_P (XEXP (x, 1)))
2654 {
2655 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2656 ep++)
2657 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2658 {
2659 if (! mem_mode)
2660 ep->ref_outside_mem = 1;
2661
2662 /* The only time we want to replace a PLUS with a REG (this
2663 occurs when the constant operand of the PLUS is the negative
2664 of the offset) is when we are inside a MEM. We won't want
2665 to do so at other times because that would change the
2666 structure of the insn in a way that reload can't handle.
2667 We special-case the commonest situation in
2668 eliminate_regs_in_insn, so just replace a PLUS with a
2669 PLUS here, unless inside a MEM. */
2670 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2671 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2672 return ep->to_rtx;
2673 else
2674 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2675 plus_constant (XEXP (x, 1),
2676 ep->previous_offset));
2677 }
2678
2679 /* If the register is not eliminable, we are done since the other
2680 operand is a constant. */
2681 return x;
2682 }
2683
2684 /* If this is part of an address, we want to bring any constant to the
2685 outermost PLUS. We will do this by doing register replacement in
2686 our operands and seeing if a constant shows up in one of them.
2687
2688 We assume here this is part of an address (or a "load address" insn)
2689 since an eliminable register is not likely to appear in any other
2690 context.
2691
2692 If we have (plus (eliminable) (reg)), we want to produce
2693 (plus (plus (replacement) (reg) (const))). If this was part of a
2694 normal add insn, (plus (replacement) (reg)) will be pushed as a
2695 reload. This is the desired action. */
2696
2697 {
2698 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2699 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
2700
2701 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2702 {
2703 /* If one side is a PLUS and the other side is a pseudo that
2704 didn't get a hard register but has a reg_equiv_constant,
2705 we must replace the constant here since it may no longer
2706 be in the position of any operand. */
2707 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2708 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2709 && reg_renumber[REGNO (new1)] < 0
2710 && reg_equiv_constant != 0
2711 && reg_equiv_constant[REGNO (new1)] != 0)
2712 new1 = reg_equiv_constant[REGNO (new1)];
2713 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2714 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2715 && reg_renumber[REGNO (new0)] < 0
2716 && reg_equiv_constant[REGNO (new0)] != 0)
2717 new0 = reg_equiv_constant[REGNO (new0)];
2718
2719 new = form_sum (new0, new1);
2720
2721 /* As above, if we are not inside a MEM we do not want to
2722 turn a PLUS into something else. We might try to do so here
2723 for an addition of 0 if we aren't optimizing. */
2724 if (! mem_mode && GET_CODE (new) != PLUS)
2725 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2726 else
2727 return new;
2728 }
2729 }
2730 return x;
2731
2732 case EXPR_LIST:
2733 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2734 if (XEXP (x, 0))
2735 {
2736 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2737 if (new != XEXP (x, 0))
2738 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2739 }
2740
2741 /* ... fall through ... */
2742
2743 case INSN_LIST:
2744 /* Now do eliminations in the rest of the chain. If this was
2745 an EXPR_LIST, this might result in allocating more memory than is
2746 strictly needed, but it simplifies the code. */
2747 if (XEXP (x, 1))
2748 {
2749 new = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
2750 if (new != XEXP (x, 1))
2751 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2752 }
2753 return x;
2754
2755 case CALL:
2756 case COMPARE:
2757 case MINUS:
2758 case MULT:
2759 case DIV: case UDIV:
2760 case MOD: case UMOD:
2761 case AND: case IOR: case XOR:
2762 case LSHIFT: case ASHIFT: case ROTATE:
2763 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2764 case NE: case EQ:
2765 case GE: case GT: case GEU: case GTU:
2766 case LE: case LT: case LEU: case LTU:
2767 {
2768 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2769 rtx new1
2770 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX) : 0;
2771
2772 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2773 return gen_rtx (code, GET_MODE (x), new0, new1);
2774 }
2775 return x;
2776
2777 case PRE_INC:
2778 case POST_INC:
2779 case PRE_DEC:
2780 case POST_DEC:
2781 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2782 if (ep->to_rtx == XEXP (x, 0))
2783 {
2784 int size = GET_MODE_SIZE (mem_mode);
2785
2786 /* If more bytes than MEM_MODE are pushed, account for them. */
2787 #ifdef PUSH_ROUNDING
2788 if (ep->to_rtx == stack_pointer_rtx)
2789 size = PUSH_ROUNDING (size);
2790 #endif
2791 if (code == PRE_DEC || code == POST_DEC)
2792 ep->offset += size;
2793 else
2794 ep->offset -= size;
2795 }
2796
2797 /* Fall through to generic unary operation case. */
2798 case USE:
2799 case STRICT_LOW_PART:
2800 case NEG: case NOT:
2801 case SIGN_EXTEND: case ZERO_EXTEND:
2802 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2803 case FLOAT: case FIX:
2804 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2805 case ABS:
2806 case SQRT:
2807 case FFS:
2808 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2809 if (new != XEXP (x, 0))
2810 return gen_rtx (code, GET_MODE (x), new);
2811 return x;
2812
2813 case SUBREG:
2814 /* Similar to above processing, but preserve SUBREG_WORD.
2815 Convert (subreg (mem)) to (mem) if not paradoxical.
2816 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2817 pseudo didn't get a hard reg, we must replace this with the
2818 eliminated version of the memory location because push_reloads
2819 may do the replacement in certain circumstances. */
2820 if (GET_CODE (SUBREG_REG (x)) == REG
2821 && (GET_MODE_SIZE (GET_MODE (x))
2822 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2823 && reg_equiv_memory_loc != 0
2824 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2825 {
2826 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2827 mem_mode, NULL_RTX);
2828
2829 /* If we didn't change anything, we must retain the pseudo. */
2830 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2831 new = XEXP (x, 0);
2832 else
2833 /* Otherwise, ensure NEW isn't shared in case we have to reload
2834 it. */
2835 new = copy_rtx (new);
2836 }
2837 else
2838 new = eliminate_regs (SUBREG_REG (x), mem_mode, NULL_RTX);
2839
2840 if (new != XEXP (x, 0))
2841 {
2842 if (GET_CODE (new) == MEM
2843 && (GET_MODE_SIZE (GET_MODE (x))
2844 <= GET_MODE_SIZE (GET_MODE (new)))
2845 #if defined(BYTES_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
2846 /* On these machines we will be reloading what is
2847 inside the SUBREG if it originally was a pseudo and
2848 the inner and outer modes are both a word or
2849 smaller. So leave the SUBREG then. */
2850 && ! (GET_CODE (SUBREG_REG (x)) == REG
2851 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2852 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2853 #endif
2854 )
2855 {
2856 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2857 enum machine_mode mode = GET_MODE (x);
2858
2859 #if BYTES_BIG_ENDIAN
2860 offset += (MIN (UNITS_PER_WORD,
2861 GET_MODE_SIZE (GET_MODE (new)))
2862 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2863 #endif
2864
2865 PUT_MODE (new, mode);
2866 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2867 return new;
2868 }
2869 else
2870 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2871 }
2872
2873 return x;
2874
2875 case CLOBBER:
2876 /* If clobbering a register that is the replacement register for an
2877 elimination we still think can be performed, note that it cannot
2878 be performed. Otherwise, we need not be concerned about it. */
2879 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2880 if (ep->to_rtx == XEXP (x, 0))
2881 ep->can_eliminate = 0;
2882
2883 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2884 if (new != XEXP (x, 0))
2885 return gen_rtx (code, GET_MODE (x), new);
2886 return x;
2887
2888 case ASM_OPERANDS:
2889 {
2890 rtx *temp_vec;
2891 /* Properly handle sharing input and constraint vectors. */
2892 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2893 {
2894 /* When we come to a new vector not seen before,
2895 scan all its elements; keep the old vector if none
2896 of them changes; otherwise, make a copy. */
2897 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2898 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2899 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2900 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2901 mem_mode, NULL_RTX);
2902
2903 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2904 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2905 break;
2906
2907 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2908 new_asm_operands_vec = old_asm_operands_vec;
2909 else
2910 new_asm_operands_vec
2911 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2912 }
2913
2914 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2915 if (new_asm_operands_vec == old_asm_operands_vec)
2916 return x;
2917
2918 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2919 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2920 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2921 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2922 ASM_OPERANDS_SOURCE_FILE (x),
2923 ASM_OPERANDS_SOURCE_LINE (x));
2924 new->volatil = x->volatil;
2925 return new;
2926 }
2927
2928 case SET:
2929 /* Check for setting a register that we know about. */
2930 if (GET_CODE (SET_DEST (x)) == REG)
2931 {
2932 /* See if this is setting the replacement register for an
2933 elimination.
2934
2935 If DEST is the frame pointer, we do nothing because we assume that
2936 all assignments to the frame pointer are for non-local gotos and
2937 are being done at a time when they are valid and do not disturb
2938 anything else. Some machines want to eliminate a fake argument
2939 pointer with either the frame or stack pointer. Assignments to
2940 the frame pointer must not prevent this elimination. */
2941
2942 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2943 ep++)
2944 if (ep->to_rtx == SET_DEST (x)
2945 && SET_DEST (x) != frame_pointer_rtx)
2946 {
2947 /* If it is being incremented, adjust the offset. Otherwise,
2948 this elimination can't be done. */
2949 rtx src = SET_SRC (x);
2950
2951 if (GET_CODE (src) == PLUS
2952 && XEXP (src, 0) == SET_DEST (x)
2953 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2954 ep->offset -= INTVAL (XEXP (src, 1));
2955 else
2956 ep->can_eliminate = 0;
2957 }
2958
2959 /* Now check to see we are assigning to a register that can be
2960 eliminated. If so, it must be as part of a PARALLEL, since we
2961 will not have been called if this is a single SET. So indicate
2962 that we can no longer eliminate this reg. */
2963 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2964 ep++)
2965 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2966 ep->can_eliminate = 0;
2967 }
2968
2969 /* Now avoid the loop below in this common case. */
2970 {
2971 rtx new0 = eliminate_regs (SET_DEST (x), 0, NULL_RTX);
2972 rtx new1 = eliminate_regs (SET_SRC (x), 0, NULL_RTX);
2973
2974 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2975 write a CLOBBER insn. */
2976 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
2977 && insn != 0)
2978 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
2979
2980 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
2981 return gen_rtx (SET, VOIDmode, new0, new1);
2982 }
2983
2984 return x;
2985
2986 case MEM:
2987 /* Our only special processing is to pass the mode of the MEM to our
2988 recursive call and copy the flags. While we are here, handle this
2989 case more efficiently. */
2990 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), NULL_RTX);
2991 if (new != XEXP (x, 0))
2992 {
2993 new = gen_rtx (MEM, GET_MODE (x), new);
2994 new->volatil = x->volatil;
2995 new->unchanging = x->unchanging;
2996 new->in_struct = x->in_struct;
2997 return new;
2998 }
2999 else
3000 return x;
3001 }
3002
3003 /* Process each of our operands recursively. If any have changed, make a
3004 copy of the rtx. */
3005 fmt = GET_RTX_FORMAT (code);
3006 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3007 {
3008 if (*fmt == 'e')
3009 {
3010 new = eliminate_regs (XEXP (x, i), mem_mode, NULL_RTX);
3011 if (new != XEXP (x, i) && ! copied)
3012 {
3013 rtx new_x = rtx_alloc (code);
3014 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3015 + (sizeof (new_x->fld[0])
3016 * GET_RTX_LENGTH (code))));
3017 x = new_x;
3018 copied = 1;
3019 }
3020 XEXP (x, i) = new;
3021 }
3022 else if (*fmt == 'E')
3023 {
3024 int copied_vec = 0;
3025 for (j = 0; j < XVECLEN (x, i); j++)
3026 {
3027 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3028 if (new != XVECEXP (x, i, j) && ! copied_vec)
3029 {
3030 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3031 &XVECEXP (x, i, 0));
3032 if (! copied)
3033 {
3034 rtx new_x = rtx_alloc (code);
3035 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3036 + (sizeof (new_x->fld[0])
3037 * GET_RTX_LENGTH (code))));
3038 x = new_x;
3039 copied = 1;
3040 }
3041 XVEC (x, i) = new_v;
3042 copied_vec = 1;
3043 }
3044 XVECEXP (x, i, j) = new;
3045 }
3046 }
3047 }
3048
3049 return x;
3050 }
3051 \f
3052 /* Scan INSN and eliminate all eliminable registers in it.
3053
3054 If REPLACE is nonzero, do the replacement destructively. Also
3055 delete the insn as dead it if it is setting an eliminable register.
3056
3057 If REPLACE is zero, do all our allocations in reload_obstack.
3058
3059 If no eliminations were done and this insn doesn't require any elimination
3060 processing (these are not identical conditions: it might be updating sp,
3061 but not referencing fp; this needs to be seen during reload_as_needed so
3062 that the offset between fp and sp can be taken into consideration), zero
3063 is returned. Otherwise, 1 is returned. */
3064
3065 static int
3066 eliminate_regs_in_insn (insn, replace)
3067 rtx insn;
3068 int replace;
3069 {
3070 rtx old_body = PATTERN (insn);
3071 rtx new_body;
3072 int val = 0;
3073 struct elim_table *ep;
3074
3075 if (! replace)
3076 push_obstacks (&reload_obstack, &reload_obstack);
3077
3078 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3079 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3080 {
3081 /* Check for setting an eliminable register. */
3082 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3083 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3084 {
3085 /* In this case this insn isn't serving a useful purpose. We
3086 will delete it in reload_as_needed once we know that this
3087 elimination is, in fact, being done.
3088
3089 If REPLACE isn't set, we can't delete this insn, but neededn't
3090 process it since it won't be used unless something changes. */
3091 if (replace)
3092 delete_dead_insn (insn);
3093 val = 1;
3094 goto done;
3095 }
3096
3097 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3098 in the insn is the negative of the offset in FROM. Substitute
3099 (set (reg) (reg to)) for the insn and change its code.
3100
3101 We have to do this here, rather than in eliminate_regs, do that we can
3102 change the insn code. */
3103
3104 if (GET_CODE (SET_SRC (old_body)) == PLUS
3105 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3106 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3107 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3108 ep++)
3109 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
3110 && ep->can_eliminate
3111 && ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3112 {
3113 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3114 SET_DEST (old_body), ep->to_rtx);
3115 INSN_CODE (insn) = -1;
3116 val = 1;
3117 goto done;
3118 }
3119 }
3120
3121 old_asm_operands_vec = 0;
3122
3123 /* Replace the body of this insn with a substituted form. If we changed
3124 something, return non-zero. If this is the final call for this
3125 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
3126
3127 If we are replacing a body that was a (set X (plus Y Z)), try to
3128 re-recognize the insn. We do this in case we had a simple addition
3129 but now can do this as a load-address. This saves an insn in this
3130 common case. */
3131
3132 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3133 if (new_body != old_body)
3134 {
3135 /* If we aren't replacing things permanently and we changed something,
3136 make another copy to ensure that all the RTL is new. Otherwise
3137 things can go wrong if find_reload swaps commutative operands
3138 and one is inside RTL that has been copied while the other is not. */
3139
3140 /* Don't copy an asm_operands because (1) there's no need and (2)
3141 copy_rtx can't do it properly when there are multiple outputs. */
3142 if (! replace && asm_noperands (old_body) < 0)
3143 new_body = copy_rtx (new_body);
3144
3145 /* If we had a move insn but now we don't, rerecognize it. */
3146 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3147 && (GET_CODE (new_body) != SET
3148 || GET_CODE (SET_SRC (new_body)) != REG))
3149 /* If this was an add insn before, rerecognize. */
3150 ||
3151 (GET_CODE (old_body) == SET
3152 && GET_CODE (SET_SRC (old_body)) == PLUS))
3153 {
3154 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3155 /* If recognition fails, store the new body anyway.
3156 It's normal to have recognition failures here
3157 due to bizarre memory addresses; reloading will fix them. */
3158 PATTERN (insn) = new_body;
3159 }
3160 else
3161 PATTERN (insn) = new_body;
3162
3163 if (replace && REG_NOTES (insn))
3164 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, NULL_RTX);
3165 val = 1;
3166 }
3167
3168 /* Loop through all elimination pairs. See if any have changed and
3169 recalculate the number not at initial offset.
3170
3171 Compute the maximum offset (minimum offset if the stack does not
3172 grow downward) for each elimination pair.
3173
3174 We also detect a cases where register elimination cannot be done,
3175 namely, if a register would be both changed and referenced outside a MEM
3176 in the resulting insn since such an insn is often undefined and, even if
3177 not, we cannot know what meaning will be given to it. Note that it is
3178 valid to have a register used in an address in an insn that changes it
3179 (presumably with a pre- or post-increment or decrement).
3180
3181 If anything changes, return nonzero. */
3182
3183 num_not_at_initial_offset = 0;
3184 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3185 {
3186 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3187 ep->can_eliminate = 0;
3188
3189 ep->ref_outside_mem = 0;
3190
3191 if (ep->previous_offset != ep->offset)
3192 val = 1;
3193
3194 ep->previous_offset = ep->offset;
3195 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3196 num_not_at_initial_offset++;
3197
3198 #ifdef STACK_GROWS_DOWNWARD
3199 ep->max_offset = MAX (ep->max_offset, ep->offset);
3200 #else
3201 ep->max_offset = MIN (ep->max_offset, ep->offset);
3202 #endif
3203 }
3204
3205 done:
3206 if (! replace)
3207 pop_obstacks ();
3208
3209 return val;
3210 }
3211
3212 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3213 replacement we currently believe is valid, mark it as not eliminable if X
3214 modifies DEST in any way other than by adding a constant integer to it.
3215
3216 If DEST is the frame pointer, we do nothing because we assume that
3217 all assignments to the frame pointer are nonlocal gotos and are being done
3218 at a time when they are valid and do not disturb anything else.
3219 Some machines want to eliminate a fake argument pointer with either the
3220 frame or stack pointer. Assignments to the frame pointer must not prevent
3221 this elimination.
3222
3223 Called via note_stores from reload before starting its passes to scan
3224 the insns of the function. */
3225
3226 static void
3227 mark_not_eliminable (dest, x)
3228 rtx dest;
3229 rtx x;
3230 {
3231 register int i;
3232
3233 /* A SUBREG of a hard register here is just changing its mode. We should
3234 not see a SUBREG of an eliminable hard register, but check just in
3235 case. */
3236 if (GET_CODE (dest) == SUBREG)
3237 dest = SUBREG_REG (dest);
3238
3239 if (dest == frame_pointer_rtx)
3240 return;
3241
3242 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3243 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3244 && (GET_CODE (x) != SET
3245 || GET_CODE (SET_SRC (x)) != PLUS
3246 || XEXP (SET_SRC (x), 0) != dest
3247 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3248 {
3249 reg_eliminate[i].can_eliminate_previous
3250 = reg_eliminate[i].can_eliminate = 0;
3251 num_eliminable--;
3252 }
3253 }
3254 \f
3255 /* Kick all pseudos out of hard register REGNO.
3256 If GLOBAL is nonzero, try to find someplace else to put them.
3257 If DUMPFILE is nonzero, log actions taken on that file.
3258
3259 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3260 because we found we can't eliminate some register. In the case, no pseudos
3261 are allowed to be in the register, even if they are only in a block that
3262 doesn't require spill registers, unlike the case when we are spilling this
3263 hard reg to produce another spill register.
3264
3265 Return nonzero if any pseudos needed to be kicked out. */
3266
3267 static int
3268 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3269 register int regno;
3270 int global;
3271 FILE *dumpfile;
3272 int cant_eliminate;
3273 {
3274 int something_changed = 0;
3275 register int i;
3276
3277 SET_HARD_REG_BIT (forbidden_regs, regno);
3278
3279 /* Spill every pseudo reg that was allocated to this reg
3280 or to something that overlaps this reg. */
3281
3282 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3283 if (reg_renumber[i] >= 0
3284 && reg_renumber[i] <= regno
3285 && (reg_renumber[i]
3286 + HARD_REGNO_NREGS (reg_renumber[i],
3287 PSEUDO_REGNO_MODE (i))
3288 > regno))
3289 {
3290 enum reg_class class = REGNO_REG_CLASS (regno);
3291
3292 /* If this register belongs solely to a basic block which needed no
3293 spilling of any class that this register is contained in,
3294 leave it be, unless we are spilling this register because
3295 it was a hard register that can't be eliminated. */
3296
3297 if (! cant_eliminate
3298 && basic_block_needs[0]
3299 && reg_basic_block[i] >= 0
3300 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3301 {
3302 enum reg_class *p;
3303
3304 for (p = reg_class_superclasses[(int) class];
3305 *p != LIM_REG_CLASSES; p++)
3306 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3307 break;
3308
3309 if (*p == LIM_REG_CLASSES)
3310 continue;
3311 }
3312
3313 /* Mark it as no longer having a hard register home. */
3314 reg_renumber[i] = -1;
3315 /* We will need to scan everything again. */
3316 something_changed = 1;
3317 if (global)
3318 retry_global_alloc (i, forbidden_regs);
3319
3320 alter_reg (i, regno);
3321 if (dumpfile)
3322 {
3323 if (reg_renumber[i] == -1)
3324 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3325 else
3326 fprintf (dumpfile, " Register %d now in %d.\n\n",
3327 i, reg_renumber[i]);
3328 }
3329 }
3330
3331 return something_changed;
3332 }
3333 \f
3334 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
3335
3336 static void
3337 scan_paradoxical_subregs (x)
3338 register rtx x;
3339 {
3340 register int i;
3341 register char *fmt;
3342 register enum rtx_code code = GET_CODE (x);
3343
3344 switch (code)
3345 {
3346 case CONST_INT:
3347 case CONST:
3348 case SYMBOL_REF:
3349 case LABEL_REF:
3350 case CONST_DOUBLE:
3351 case CC0:
3352 case PC:
3353 case REG:
3354 case USE:
3355 case CLOBBER:
3356 return;
3357
3358 case SUBREG:
3359 if (GET_CODE (SUBREG_REG (x)) == REG
3360 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3361 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3362 = GET_MODE_SIZE (GET_MODE (x));
3363 return;
3364 }
3365
3366 fmt = GET_RTX_FORMAT (code);
3367 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3368 {
3369 if (fmt[i] == 'e')
3370 scan_paradoxical_subregs (XEXP (x, i));
3371 else if (fmt[i] == 'E')
3372 {
3373 register int j;
3374 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3375 scan_paradoxical_subregs (XVECEXP (x, i, j));
3376 }
3377 }
3378 }
3379 \f
3380 static int
3381 hard_reg_use_compare (p1, p2)
3382 struct hard_reg_n_uses *p1, *p2;
3383 {
3384 int tem = p1->uses - p2->uses;
3385 if (tem != 0) return tem;
3386 /* If regs are equally good, sort by regno,
3387 so that the results of qsort leave nothing to chance. */
3388 return p1->regno - p2->regno;
3389 }
3390
3391 /* Choose the order to consider regs for use as reload registers
3392 based on how much trouble would be caused by spilling one.
3393 Store them in order of decreasing preference in potential_reload_regs. */
3394
3395 static void
3396 order_regs_for_reload ()
3397 {
3398 register int i;
3399 register int o = 0;
3400 int large = 0;
3401
3402 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3403
3404 CLEAR_HARD_REG_SET (bad_spill_regs);
3405
3406 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3407 potential_reload_regs[i] = -1;
3408
3409 /* Count number of uses of each hard reg by pseudo regs allocated to it
3410 and then order them by decreasing use. */
3411
3412 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3413 {
3414 hard_reg_n_uses[i].uses = 0;
3415 hard_reg_n_uses[i].regno = i;
3416 }
3417
3418 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3419 {
3420 int regno = reg_renumber[i];
3421 if (regno >= 0)
3422 {
3423 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3424 while (regno < lim)
3425 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3426 }
3427 large += reg_n_refs[i];
3428 }
3429
3430 /* Now fixed registers (which cannot safely be used for reloading)
3431 get a very high use count so they will be considered least desirable.
3432 Registers used explicitly in the rtl code are almost as bad. */
3433
3434 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3435 {
3436 if (fixed_regs[i])
3437 {
3438 hard_reg_n_uses[i].uses += 2 * large + 2;
3439 SET_HARD_REG_BIT (bad_spill_regs, i);
3440 }
3441 else if (regs_explicitly_used[i])
3442 {
3443 hard_reg_n_uses[i].uses += large + 1;
3444 #ifndef SMALL_REGISTER_CLASSES
3445 /* ??? We are doing this here because of the potential that
3446 bad code may be generated if a register explicitly used in
3447 an insn was used as a spill register for that insn. But
3448 not using these are spill registers may lose on some machine.
3449 We'll have to see how this works out. */
3450 SET_HARD_REG_BIT (bad_spill_regs, i);
3451 #endif
3452 }
3453 }
3454 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3455 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3456
3457 #ifdef ELIMINABLE_REGS
3458 /* If registers other than the frame pointer are eliminable, mark them as
3459 poor choices. */
3460 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3461 {
3462 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3463 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3464 }
3465 #endif
3466
3467 /* Prefer registers not so far used, for use in temporary loading.
3468 Among them, if REG_ALLOC_ORDER is defined, use that order.
3469 Otherwise, prefer registers not preserved by calls. */
3470
3471 #ifdef REG_ALLOC_ORDER
3472 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3473 {
3474 int regno = reg_alloc_order[i];
3475
3476 if (hard_reg_n_uses[regno].uses == 0)
3477 potential_reload_regs[o++] = regno;
3478 }
3479 #else
3480 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3481 {
3482 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3483 potential_reload_regs[o++] = i;
3484 }
3485 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3486 {
3487 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3488 potential_reload_regs[o++] = i;
3489 }
3490 #endif
3491
3492 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3493 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3494
3495 /* Now add the regs that are already used,
3496 preferring those used less often. The fixed and otherwise forbidden
3497 registers will be at the end of this list. */
3498
3499 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3500 if (hard_reg_n_uses[i].uses != 0)
3501 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3502 }
3503 \f
3504 /* Reload pseudo-registers into hard regs around each insn as needed.
3505 Additional register load insns are output before the insn that needs it
3506 and perhaps store insns after insns that modify the reloaded pseudo reg.
3507
3508 reg_last_reload_reg and reg_reloaded_contents keep track of
3509 which registers are already available in reload registers.
3510 We update these for the reloads that we perform,
3511 as the insns are scanned. */
3512
3513 static void
3514 reload_as_needed (first, live_known)
3515 rtx first;
3516 int live_known;
3517 {
3518 register rtx insn;
3519 register int i;
3520 int this_block = 0;
3521 rtx x;
3522 rtx after_call = 0;
3523
3524 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3525 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3526 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3527 reg_has_output_reload = (char *) alloca (max_regno);
3528 for (i = 0; i < n_spills; i++)
3529 {
3530 reg_reloaded_contents[i] = -1;
3531 reg_reloaded_insn[i] = 0;
3532 }
3533
3534 /* Reset all offsets on eliminable registers to their initial values. */
3535 #ifdef ELIMINABLE_REGS
3536 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3537 {
3538 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3539 reg_eliminate[i].initial_offset);
3540 reg_eliminate[i].previous_offset
3541 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3542 }
3543 #else
3544 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3545 reg_eliminate[0].previous_offset
3546 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3547 #endif
3548
3549 num_not_at_initial_offset = 0;
3550
3551 for (insn = first; insn;)
3552 {
3553 register rtx next = NEXT_INSN (insn);
3554
3555 /* Notice when we move to a new basic block. */
3556 if (live_known && this_block + 1 < n_basic_blocks
3557 && insn == basic_block_head[this_block+1])
3558 ++this_block;
3559
3560 /* If we pass a label, copy the offsets from the label information
3561 into the current offsets of each elimination. */
3562 if (GET_CODE (insn) == CODE_LABEL)
3563 {
3564 num_not_at_initial_offset = 0;
3565 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3566 {
3567 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3568 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3569 if (reg_eliminate[i].can_eliminate
3570 && (reg_eliminate[i].offset
3571 != reg_eliminate[i].initial_offset))
3572 num_not_at_initial_offset++;
3573 }
3574 }
3575
3576 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3577 {
3578 rtx avoid_return_reg = 0;
3579
3580 #ifdef SMALL_REGISTER_CLASSES
3581 /* Set avoid_return_reg if this is an insn
3582 that might use the value of a function call. */
3583 if (GET_CODE (insn) == CALL_INSN)
3584 {
3585 if (GET_CODE (PATTERN (insn)) == SET)
3586 after_call = SET_DEST (PATTERN (insn));
3587 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3588 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3589 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3590 else
3591 after_call = 0;
3592 }
3593 else if (after_call != 0
3594 && !(GET_CODE (PATTERN (insn)) == SET
3595 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3596 {
3597 if (reg_mentioned_p (after_call, PATTERN (insn)))
3598 avoid_return_reg = after_call;
3599 after_call = 0;
3600 }
3601 #endif /* SMALL_REGISTER_CLASSES */
3602
3603 /* If this is a USE and CLOBBER of a MEM, ensure that any
3604 references to eliminable registers have been removed. */
3605
3606 if ((GET_CODE (PATTERN (insn)) == USE
3607 || GET_CODE (PATTERN (insn)) == CLOBBER)
3608 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3609 XEXP (XEXP (PATTERN (insn), 0), 0)
3610 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3611 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3612
3613 /* If we need to do register elimination processing, do so.
3614 This might delete the insn, in which case we are done. */
3615 if (num_eliminable && GET_MODE (insn) == QImode)
3616 {
3617 eliminate_regs_in_insn (insn, 1);
3618 if (GET_CODE (insn) == NOTE)
3619 {
3620 insn = next;
3621 continue;
3622 }
3623 }
3624
3625 if (GET_MODE (insn) == VOIDmode)
3626 n_reloads = 0;
3627 /* First find the pseudo regs that must be reloaded for this insn.
3628 This info is returned in the tables reload_... (see reload.h).
3629 Also modify the body of INSN by substituting RELOAD
3630 rtx's for those pseudo regs. */
3631 else
3632 {
3633 bzero (reg_has_output_reload, max_regno);
3634 CLEAR_HARD_REG_SET (reg_is_output_reload);
3635
3636 find_reloads (insn, 1, spill_indirect_levels, live_known,
3637 spill_reg_order);
3638 }
3639
3640 if (n_reloads > 0)
3641 {
3642 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3643 rtx p;
3644 int class;
3645
3646 /* If this block has not had spilling done for a
3647 particular clas and we have any non-optionals that need a
3648 spill reg in that class, abort. */
3649
3650 for (class = 0; class < N_REG_CLASSES; class++)
3651 if (basic_block_needs[class] != 0
3652 && basic_block_needs[class][this_block] == 0)
3653 for (i = 0; i < n_reloads; i++)
3654 if (class == (int) reload_reg_class[i]
3655 && reload_reg_rtx[i] == 0
3656 && ! reload_optional[i]
3657 && (reload_in[i] != 0 || reload_out[i] != 0
3658 || reload_secondary_p[i] != 0))
3659 abort ();
3660
3661 /* Now compute which reload regs to reload them into. Perhaps
3662 reusing reload regs from previous insns, or else output
3663 load insns to reload them. Maybe output store insns too.
3664 Record the choices of reload reg in reload_reg_rtx. */
3665 choose_reload_regs (insn, avoid_return_reg);
3666
3667 #ifdef SMALL_REGISTER_CLASSES
3668 /* Merge any reloads that we didn't combine for fear of
3669 increasing the number of spill registers needed but now
3670 discover can be safely merged. */
3671 merge_assigned_reloads (insn);
3672 #endif
3673
3674 /* Generate the insns to reload operands into or out of
3675 their reload regs. */
3676 emit_reload_insns (insn);
3677
3678 /* Substitute the chosen reload regs from reload_reg_rtx
3679 into the insn's body (or perhaps into the bodies of other
3680 load and store insn that we just made for reloading
3681 and that we moved the structure into). */
3682 subst_reloads ();
3683
3684 /* If this was an ASM, make sure that all the reload insns
3685 we have generated are valid. If not, give an error
3686 and delete them. */
3687
3688 if (asm_noperands (PATTERN (insn)) >= 0)
3689 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3690 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3691 && (recog_memoized (p) < 0
3692 || (insn_extract (p),
3693 ! constrain_operands (INSN_CODE (p), 1))))
3694 {
3695 error_for_asm (insn,
3696 "`asm' operand requires impossible reload");
3697 PUT_CODE (p, NOTE);
3698 NOTE_SOURCE_FILE (p) = 0;
3699 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3700 }
3701 }
3702 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3703 is no longer validly lying around to save a future reload.
3704 Note that this does not detect pseudos that were reloaded
3705 for this insn in order to be stored in
3706 (obeying register constraints). That is correct; such reload
3707 registers ARE still valid. */
3708 note_stores (PATTERN (insn), forget_old_reloads_1);
3709
3710 /* There may have been CLOBBER insns placed after INSN. So scan
3711 between INSN and NEXT and use them to forget old reloads. */
3712 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3713 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3714 note_stores (PATTERN (x), forget_old_reloads_1);
3715
3716 #ifdef AUTO_INC_DEC
3717 /* Likewise for regs altered by auto-increment in this insn.
3718 But note that the reg-notes are not changed by reloading:
3719 they still contain the pseudo-regs, not the spill regs. */
3720 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3721 if (REG_NOTE_KIND (x) == REG_INC)
3722 {
3723 /* See if this pseudo reg was reloaded in this insn.
3724 If so, its last-reload info is still valid
3725 because it is based on this insn's reload. */
3726 for (i = 0; i < n_reloads; i++)
3727 if (reload_out[i] == XEXP (x, 0))
3728 break;
3729
3730 if (i == n_reloads)
3731 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3732 }
3733 #endif
3734 }
3735 /* A reload reg's contents are unknown after a label. */
3736 if (GET_CODE (insn) == CODE_LABEL)
3737 for (i = 0; i < n_spills; i++)
3738 {
3739 reg_reloaded_contents[i] = -1;
3740 reg_reloaded_insn[i] = 0;
3741 }
3742
3743 /* Don't assume a reload reg is still good after a call insn
3744 if it is a call-used reg. */
3745 else if (GET_CODE (insn) == CALL_INSN)
3746 for (i = 0; i < n_spills; i++)
3747 if (call_used_regs[spill_regs[i]])
3748 {
3749 reg_reloaded_contents[i] = -1;
3750 reg_reloaded_insn[i] = 0;
3751 }
3752
3753 /* In case registers overlap, allow certain insns to invalidate
3754 particular hard registers. */
3755
3756 #ifdef INSN_CLOBBERS_REGNO_P
3757 for (i = 0 ; i < n_spills ; i++)
3758 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3759 {
3760 reg_reloaded_contents[i] = -1;
3761 reg_reloaded_insn[i] = 0;
3762 }
3763 #endif
3764
3765 insn = next;
3766
3767 #ifdef USE_C_ALLOCA
3768 alloca (0);
3769 #endif
3770 }
3771 }
3772
3773 /* Discard all record of any value reloaded from X,
3774 or reloaded in X from someplace else;
3775 unless X is an output reload reg of the current insn.
3776
3777 X may be a hard reg (the reload reg)
3778 or it may be a pseudo reg that was reloaded from. */
3779
3780 static void
3781 forget_old_reloads_1 (x, ignored)
3782 rtx x;
3783 rtx ignored;
3784 {
3785 register int regno;
3786 int nr;
3787 int offset = 0;
3788
3789 /* note_stores does give us subregs of hard regs. */
3790 while (GET_CODE (x) == SUBREG)
3791 {
3792 offset += SUBREG_WORD (x);
3793 x = SUBREG_REG (x);
3794 }
3795
3796 if (GET_CODE (x) != REG)
3797 return;
3798
3799 regno = REGNO (x) + offset;
3800
3801 if (regno >= FIRST_PSEUDO_REGISTER)
3802 nr = 1;
3803 else
3804 {
3805 int i;
3806 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3807 /* Storing into a spilled-reg invalidates its contents.
3808 This can happen if a block-local pseudo is allocated to that reg
3809 and it wasn't spilled because this block's total need is 0.
3810 Then some insn might have an optional reload and use this reg. */
3811 for (i = 0; i < nr; i++)
3812 if (spill_reg_order[regno + i] >= 0
3813 /* But don't do this if the reg actually serves as an output
3814 reload reg in the current instruction. */
3815 && (n_reloads == 0
3816 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3817 {
3818 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3819 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3820 }
3821 }
3822
3823 /* Since value of X has changed,
3824 forget any value previously copied from it. */
3825
3826 while (nr-- > 0)
3827 /* But don't forget a copy if this is the output reload
3828 that establishes the copy's validity. */
3829 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3830 reg_last_reload_reg[regno + nr] = 0;
3831 }
3832 \f
3833 /* For each reload, the mode of the reload register. */
3834 static enum machine_mode reload_mode[MAX_RELOADS];
3835
3836 /* For each reload, the largest number of registers it will require. */
3837 static int reload_nregs[MAX_RELOADS];
3838
3839 /* Comparison function for qsort to decide which of two reloads
3840 should be handled first. *P1 and *P2 are the reload numbers. */
3841
3842 static int
3843 reload_reg_class_lower (p1, p2)
3844 short *p1, *p2;
3845 {
3846 register int r1 = *p1, r2 = *p2;
3847 register int t;
3848
3849 /* Consider required reloads before optional ones. */
3850 t = reload_optional[r1] - reload_optional[r2];
3851 if (t != 0)
3852 return t;
3853
3854 /* Count all solitary classes before non-solitary ones. */
3855 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3856 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3857 if (t != 0)
3858 return t;
3859
3860 /* Aside from solitaires, consider all multi-reg groups first. */
3861 t = reload_nregs[r2] - reload_nregs[r1];
3862 if (t != 0)
3863 return t;
3864
3865 /* Consider reloads in order of increasing reg-class number. */
3866 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3867 if (t != 0)
3868 return t;
3869
3870 /* If reloads are equally urgent, sort by reload number,
3871 so that the results of qsort leave nothing to chance. */
3872 return r1 - r2;
3873 }
3874 \f
3875 /* The following HARD_REG_SETs indicate when each hard register is
3876 used for a reload of various parts of the current insn. */
3877
3878 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3879 static HARD_REG_SET reload_reg_used;
3880 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3881 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3882 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3883 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
3884 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
3885 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
3886 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
3887 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
3888 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3889 static HARD_REG_SET reload_reg_used_in_op_addr;
3890 /* If reg is in use for a RELOAD_FOR_INSN reload. */
3891 static HARD_REG_SET reload_reg_used_in_insn;
3892 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
3893 static HARD_REG_SET reload_reg_used_in_other_addr;
3894
3895 /* If reg is in use as a reload reg for any sort of reload. */
3896 static HARD_REG_SET reload_reg_used_at_all;
3897
3898 /* If reg is use as an inherited reload. We just mark the first register
3899 in the group. */
3900 static HARD_REG_SET reload_reg_used_for_inherit;
3901
3902 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
3903 TYPE. MODE is used to indicate how many consecutive regs are
3904 actually used. */
3905
3906 static void
3907 mark_reload_reg_in_use (regno, opnum, type, mode)
3908 int regno;
3909 int opnum;
3910 enum reload_type type;
3911 enum machine_mode mode;
3912 {
3913 int nregs = HARD_REGNO_NREGS (regno, mode);
3914 int i;
3915
3916 for (i = regno; i < nregs + regno; i++)
3917 {
3918 switch (type)
3919 {
3920 case RELOAD_OTHER:
3921 SET_HARD_REG_BIT (reload_reg_used, i);
3922 break;
3923
3924 case RELOAD_FOR_INPUT_ADDRESS:
3925 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
3926 break;
3927
3928 case RELOAD_FOR_OUTPUT_ADDRESS:
3929 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
3930 break;
3931
3932 case RELOAD_FOR_OPERAND_ADDRESS:
3933 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3934 break;
3935
3936 case RELOAD_FOR_OTHER_ADDRESS:
3937 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
3938 break;
3939
3940 case RELOAD_FOR_INPUT:
3941 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
3942 break;
3943
3944 case RELOAD_FOR_OUTPUT:
3945 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
3946 break;
3947
3948 case RELOAD_FOR_INSN:
3949 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
3950 break;
3951 }
3952
3953 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3954 }
3955 }
3956
3957 /* Similarly, but show REGNO is no longer in use for a reload. */
3958
3959 static void
3960 clear_reload_reg_in_use (regno, opnum, type, mode)
3961 int regno;
3962 int opnum;
3963 enum reload_type type;
3964 enum machine_mode mode;
3965 {
3966 int nregs = HARD_REGNO_NREGS (regno, mode);
3967 int i;
3968
3969 for (i = regno; i < nregs + regno; i++)
3970 {
3971 switch (type)
3972 {
3973 case RELOAD_OTHER:
3974 CLEAR_HARD_REG_BIT (reload_reg_used, i);
3975 break;
3976
3977 case RELOAD_FOR_INPUT_ADDRESS:
3978 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
3979 break;
3980
3981 case RELOAD_FOR_OUTPUT_ADDRESS:
3982 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
3983 break;
3984
3985 case RELOAD_FOR_OPERAND_ADDRESS:
3986 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3987 break;
3988
3989 case RELOAD_FOR_OTHER_ADDRESS:
3990 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
3991 break;
3992
3993 case RELOAD_FOR_INPUT:
3994 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
3995 break;
3996
3997 case RELOAD_FOR_OUTPUT:
3998 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
3999 break;
4000
4001 case RELOAD_FOR_INSN:
4002 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4003 break;
4004 }
4005 }
4006 }
4007
4008 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4009 specified by OPNUM and TYPE. */
4010
4011 static int
4012 reload_reg_free_p (regno, opnum, type)
4013 int regno;
4014 int opnum;
4015 enum reload_type type;
4016 {
4017 int i;
4018
4019 /* In use for a RELOAD_OTHER means it's not available for anything except
4020 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4021 to be used only for inputs. */
4022
4023 if (type != RELOAD_FOR_OTHER_ADDRESS
4024 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4025 return 0;
4026
4027 switch (type)
4028 {
4029 case RELOAD_OTHER:
4030 /* In use for anything means not available for a RELOAD_OTHER. */
4031 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4032
4033 /* The other kinds of use can sometimes share a register. */
4034 case RELOAD_FOR_INPUT:
4035 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4036 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4037 return 0;
4038
4039 /* If it is used for some other input, can't use it. */
4040 for (i = 0; i < reload_n_operands; i++)
4041 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4042 return 0;
4043
4044 /* If it is used in a later operand's address, can't use it. */
4045 for (i = opnum + 1; i < reload_n_operands; i++)
4046 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4047 return 0;
4048
4049 return 1;
4050
4051 case RELOAD_FOR_INPUT_ADDRESS:
4052 /* Can't use a register if it is used for an input address for this
4053 operand or used as an input in an earlier one. */
4054 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4055 return 0;
4056
4057 for (i = 0; i < opnum; i++)
4058 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4059 return 0;
4060
4061 return 1;
4062
4063 case RELOAD_FOR_OUTPUT_ADDRESS:
4064 /* Can't use a register if it is used for an output address for this
4065 operand or used as an output in this or a later operand. */
4066 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4067 return 0;
4068
4069 for (i = opnum; i < reload_n_operands; i++)
4070 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4071 return 0;
4072
4073 return 1;
4074
4075 case RELOAD_FOR_OPERAND_ADDRESS:
4076 for (i = 0; i < reload_n_operands; i++)
4077 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4078 return 0;
4079
4080 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4081 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4082
4083 case RELOAD_FOR_OUTPUT:
4084 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4085 outputs, or an operand address for this or an earlier output. */
4086 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4087 return 0;
4088
4089 for (i = 0; i < reload_n_operands; i++)
4090 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4091 return 0;
4092
4093 for (i = 0; i <= opnum; i++)
4094 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4095 return 0;
4096
4097 return 1;
4098
4099 case RELOAD_FOR_INSN:
4100 for (i = 0; i < reload_n_operands; i++)
4101 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4102 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4103 return 0;
4104
4105 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4106 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4107
4108 case RELOAD_FOR_OTHER_ADDRESS:
4109 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4110 }
4111 abort ();
4112 }
4113
4114 /* Return 1 if the value in reload reg REGNO, as used by a reload
4115 needed for the part of the insn specified by OPNUM and TYPE,
4116 is not in use for a reload in any prior part of the insn.
4117
4118 We can assume that the reload reg was already tested for availability
4119 at the time it is needed, and we should not check this again,
4120 in case the reg has already been marked in use. */
4121
4122 static int
4123 reload_reg_free_before_p (regno, opnum, type)
4124 int regno;
4125 int opnum;
4126 enum reload_type type;
4127 {
4128 int i;
4129
4130 switch (type)
4131 {
4132 case RELOAD_FOR_OTHER_ADDRESS:
4133 /* These always come first. */
4134 return 1;
4135
4136 case RELOAD_OTHER:
4137 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4138
4139 /* If this use is for part of the insn,
4140 check the reg is not in use for any prior part. It is tempting
4141 to try to do this by falling through from objecs that occur
4142 later in the insn to ones that occur earlier, but that will not
4143 correctly take into account the fact that here we MUST ignore
4144 things that would prevent the register from being allocated in
4145 the first place, since we know that it was allocated. */
4146
4147 case RELOAD_FOR_OUTPUT_ADDRESS:
4148 /* Earlier reloads are for earlier outputs or their addresses,
4149 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4150 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4151 RELOAD_OTHER).. */
4152 for (i = 0; i < opnum; i++)
4153 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4154 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4155 return 0;
4156
4157 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4158 return 0;
4159
4160 for (i = 0; i < reload_n_operands; i++)
4161 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4162 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4163 return 0;
4164
4165 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4166 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4167 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4168
4169 case RELOAD_FOR_OUTPUT:
4170 /* This can't be used in the output address for this operand and
4171 anything that can't be used for it, except that we've already
4172 tested for RELOAD_FOR_INSN objects. */
4173
4174 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4175 return 0;
4176
4177 for (i = 0; i < opnum; i++)
4178 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4179 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4180 return 0;
4181
4182 for (i = 0; i < reload_n_operands; i++)
4183 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4184 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4185 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4186 return 0;
4187
4188 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4189
4190 case RELOAD_FOR_OPERAND_ADDRESS:
4191 case RELOAD_FOR_INSN:
4192 /* These can't conflict with inputs, or each other, so all we have to
4193 test is input addresses and the addresses of OTHER items. */
4194
4195 for (i = 0; i < reload_n_operands; i++)
4196 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4197 return 0;
4198
4199 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4200
4201 case RELOAD_FOR_INPUT:
4202 /* The only things earlier are the address for this and
4203 earlier inputs, other inputs (which we know we don't conflict
4204 with), and addresses of RELOAD_OTHER objects. */
4205
4206 for (i = 0; i <= opnum; i++)
4207 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4208 return 0;
4209
4210 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4211
4212 case RELOAD_FOR_INPUT_ADDRESS:
4213 /* Similarly, all we have to check is for use in earlier inputs'
4214 addresses. */
4215 for (i = 0; i < opnum; i++)
4216 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4217 return 0;
4218
4219 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4220 }
4221 abort ();
4222 }
4223
4224 /* Return 1 if the value in reload reg REGNO, as used by a reload
4225 needed for the part of the insn specified by OPNUM and TYPE,
4226 is still available in REGNO at the end of the insn.
4227
4228 We can assume that the reload reg was already tested for availability
4229 at the time it is needed, and we should not check this again,
4230 in case the reg has already been marked in use. */
4231
4232 static int
4233 reload_reg_reaches_end_p (regno, opnum, type)
4234 int regno;
4235 int opnum;
4236 enum reload_type type;
4237 {
4238 int i;
4239
4240 switch (type)
4241 {
4242 case RELOAD_OTHER:
4243 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4244 its value must reach the end. */
4245 return 1;
4246
4247 /* If this use is for part of the insn,
4248 its value reaches if no subsequent part uses the same register.
4249 Just like the above function, don't try to do this with lots
4250 of fallthroughs. */
4251
4252 case RELOAD_FOR_OTHER_ADDRESS:
4253 /* Here we check for everything else, since these don't conflict
4254 with anything else and everything comes later. */
4255
4256 for (i = 0; i < reload_n_operands; i++)
4257 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4258 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4259 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4260 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4261 return 0;
4262
4263 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4264 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4265 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4266
4267 case RELOAD_FOR_INPUT_ADDRESS:
4268 /* Similar, except that we check only for this and subsequent inputs
4269 and the address of only subsequent inputs and we do not need
4270 to check for RELOAD_OTHER objects since they are known not to
4271 conflict. */
4272
4273 for (i = opnum; i < reload_n_operands; i++)
4274 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4275 return 0;
4276
4277 for (i = opnum + 1; i < reload_n_operands; i++)
4278 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4279 return 0;
4280
4281 for (i = 0; i < reload_n_operands; i++)
4282 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4283 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4284 return 0;
4285
4286 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4287 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4288
4289 case RELOAD_FOR_INPUT:
4290 /* Similar to input address, except we start at the next operand for
4291 both input and input address and we do not check for
4292 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4293 would conflict. */
4294
4295 for (i = opnum + 1; i < reload_n_operands; i++)
4296 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4297 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4298 return 0;
4299
4300 /* ... fall through ... */
4301
4302 case RELOAD_FOR_OPERAND_ADDRESS:
4303 /* Check outputs and their addresses. */
4304
4305 for (i = 0; i < reload_n_operands; i++)
4306 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4307 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4308 return 0;
4309
4310 return 1;
4311
4312 case RELOAD_FOR_INSN:
4313 /* These conflict with other outputs with with RELOAD_OTHER. So
4314 we need only check for output addresses. */
4315
4316 opnum = -1;
4317
4318 /* ... fall through ... */
4319
4320 case RELOAD_FOR_OUTPUT:
4321 case RELOAD_FOR_OUTPUT_ADDRESS:
4322 /* We already know these can't conflict with a later output. So the
4323 only thing to check are later output addresses. */
4324 for (i = opnum + 1; i < reload_n_operands; i++)
4325 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4326 return 0;
4327
4328 return 1;
4329 }
4330
4331 abort ();
4332 }
4333 \f
4334 /* Vector of reload-numbers showing the order in which the reloads should
4335 be processed. */
4336 short reload_order[MAX_RELOADS];
4337
4338 /* Indexed by reload number, 1 if incoming value
4339 inherited from previous insns. */
4340 char reload_inherited[MAX_RELOADS];
4341
4342 /* For an inherited reload, this is the insn the reload was inherited from,
4343 if we know it. Otherwise, this is 0. */
4344 rtx reload_inheritance_insn[MAX_RELOADS];
4345
4346 /* If non-zero, this is a place to get the value of the reload,
4347 rather than using reload_in. */
4348 rtx reload_override_in[MAX_RELOADS];
4349
4350 /* For each reload, the index in spill_regs of the spill register used,
4351 or -1 if we did not need one of the spill registers for this reload. */
4352 int reload_spill_index[MAX_RELOADS];
4353
4354 /* Index of last register assigned as a spill register. We allocate in
4355 a round-robin fashio. */
4356
4357 static int last_spill_reg = 0;
4358
4359 /* Find a spill register to use as a reload register for reload R.
4360 LAST_RELOAD is non-zero if this is the last reload for the insn being
4361 processed.
4362
4363 Set reload_reg_rtx[R] to the register allocated.
4364
4365 If NOERROR is nonzero, we return 1 if successful,
4366 or 0 if we couldn't find a spill reg and we didn't change anything. */
4367
4368 static int
4369 allocate_reload_reg (r, insn, last_reload, noerror)
4370 int r;
4371 rtx insn;
4372 int last_reload;
4373 int noerror;
4374 {
4375 int i;
4376 int pass;
4377 int count;
4378 rtx new;
4379 int regno;
4380
4381 /* If we put this reload ahead, thinking it is a group,
4382 then insist on finding a group. Otherwise we can grab a
4383 reg that some other reload needs.
4384 (That can happen when we have a 68000 DATA_OR_FP_REG
4385 which is a group of data regs or one fp reg.)
4386 We need not be so restrictive if there are no more reloads
4387 for this insn.
4388
4389 ??? Really it would be nicer to have smarter handling
4390 for that kind of reg class, where a problem like this is normal.
4391 Perhaps those classes should be avoided for reloading
4392 by use of more alternatives. */
4393
4394 int force_group = reload_nregs[r] > 1 && ! last_reload;
4395
4396 /* If we want a single register and haven't yet found one,
4397 take any reg in the right class and not in use.
4398 If we want a consecutive group, here is where we look for it.
4399
4400 We use two passes so we can first look for reload regs to
4401 reuse, which are already in use for other reloads in this insn,
4402 and only then use additional registers.
4403 I think that maximizing reuse is needed to make sure we don't
4404 run out of reload regs. Suppose we have three reloads, and
4405 reloads A and B can share regs. These need two regs.
4406 Suppose A and B are given different regs.
4407 That leaves none for C. */
4408 for (pass = 0; pass < 2; pass++)
4409 {
4410 /* I is the index in spill_regs.
4411 We advance it round-robin between insns to use all spill regs
4412 equally, so that inherited reloads have a chance
4413 of leapfrogging each other. */
4414
4415 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4416 {
4417 int class = (int) reload_reg_class[r];
4418
4419 i = (i + 1) % n_spills;
4420
4421 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4422 reload_when_needed[r])
4423 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4424 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4425 /* Look first for regs to share, then for unshared. But
4426 don't share regs used for inherited reloads; they are
4427 the ones we want to preserve. */
4428 && (pass
4429 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4430 spill_regs[i])
4431 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4432 spill_regs[i]))))
4433 {
4434 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4435 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4436 (on 68000) got us two FP regs. If NR is 1,
4437 we would reject both of them. */
4438 if (force_group)
4439 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4440 /* If we need only one reg, we have already won. */
4441 if (nr == 1)
4442 {
4443 /* But reject a single reg if we demand a group. */
4444 if (force_group)
4445 continue;
4446 break;
4447 }
4448 /* Otherwise check that as many consecutive regs as we need
4449 are available here.
4450 Also, don't use for a group registers that are
4451 needed for nongroups. */
4452 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4453 while (nr > 1)
4454 {
4455 regno = spill_regs[i] + nr - 1;
4456 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4457 && spill_reg_order[regno] >= 0
4458 && reload_reg_free_p (regno, reload_opnum[r],
4459 reload_when_needed[r])
4460 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4461 regno)))
4462 break;
4463 nr--;
4464 }
4465 if (nr == 1)
4466 break;
4467 }
4468 }
4469
4470 /* If we found something on pass 1, omit pass 2. */
4471 if (count < n_spills)
4472 break;
4473 }
4474
4475 /* We should have found a spill register by now. */
4476 if (count == n_spills)
4477 {
4478 if (noerror)
4479 return 0;
4480 goto failure;
4481 }
4482
4483 /* I is the index in SPILL_REG_RTX of the reload register we are to
4484 allocate. Get an rtx for it and find its register number. */
4485
4486 new = spill_reg_rtx[i];
4487
4488 if (new == 0 || GET_MODE (new) != reload_mode[r])
4489 spill_reg_rtx[i] = new
4490 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4491
4492 regno = true_regnum (new);
4493
4494 /* Detect when the reload reg can't hold the reload mode.
4495 This used to be one `if', but Sequent compiler can't handle that. */
4496 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4497 {
4498 enum machine_mode test_mode = VOIDmode;
4499 if (reload_in[r])
4500 test_mode = GET_MODE (reload_in[r]);
4501 /* If reload_in[r] has VOIDmode, it means we will load it
4502 in whatever mode the reload reg has: to wit, reload_mode[r].
4503 We have already tested that for validity. */
4504 /* Aside from that, we need to test that the expressions
4505 to reload from or into have modes which are valid for this
4506 reload register. Otherwise the reload insns would be invalid. */
4507 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4508 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4509 if (! (reload_out[r] != 0
4510 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4511 {
4512 /* The reg is OK. */
4513 last_spill_reg = i;
4514
4515 /* Mark as in use for this insn the reload regs we use
4516 for this. */
4517 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4518 reload_when_needed[r], reload_mode[r]);
4519
4520 reload_reg_rtx[r] = new;
4521 reload_spill_index[r] = i;
4522 return 1;
4523 }
4524 }
4525
4526 /* The reg is not OK. */
4527 if (noerror)
4528 return 0;
4529
4530 failure:
4531 if (asm_noperands (PATTERN (insn)) < 0)
4532 /* It's the compiler's fault. */
4533 abort ();
4534
4535 /* It's the user's fault; the operand's mode and constraint
4536 don't match. Disable this reload so we don't crash in final. */
4537 error_for_asm (insn,
4538 "`asm' operand constraint incompatible with operand size");
4539 reload_in[r] = 0;
4540 reload_out[r] = 0;
4541 reload_reg_rtx[r] = 0;
4542 reload_optional[r] = 1;
4543 reload_secondary_p[r] = 1;
4544
4545 return 1;
4546 }
4547 \f
4548 /* Assign hard reg targets for the pseudo-registers we must reload
4549 into hard regs for this insn.
4550 Also output the instructions to copy them in and out of the hard regs.
4551
4552 For machines with register classes, we are responsible for
4553 finding a reload reg in the proper class. */
4554
4555 static void
4556 choose_reload_regs (insn, avoid_return_reg)
4557 rtx insn;
4558 rtx avoid_return_reg;
4559 {
4560 register int i, j;
4561 int max_group_size = 1;
4562 enum reg_class group_class = NO_REGS;
4563 int inheritance;
4564
4565 rtx save_reload_reg_rtx[MAX_RELOADS];
4566 char save_reload_inherited[MAX_RELOADS];
4567 rtx save_reload_inheritance_insn[MAX_RELOADS];
4568 rtx save_reload_override_in[MAX_RELOADS];
4569 int save_reload_spill_index[MAX_RELOADS];
4570 HARD_REG_SET save_reload_reg_used;
4571 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4572 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4573 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4574 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4575 HARD_REG_SET save_reload_reg_used_in_op_addr;
4576 HARD_REG_SET save_reload_reg_used_in_insn;
4577 HARD_REG_SET save_reload_reg_used_in_other_addr;
4578 HARD_REG_SET save_reload_reg_used_at_all;
4579
4580 bzero (reload_inherited, MAX_RELOADS);
4581 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4582 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4583
4584 CLEAR_HARD_REG_SET (reload_reg_used);
4585 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4586 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4587 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4588 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4589
4590 for (i = 0; i < reload_n_operands; i++)
4591 {
4592 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4593 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4594 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4595 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4596 }
4597
4598 #ifdef SMALL_REGISTER_CLASSES
4599 /* Don't bother with avoiding the return reg
4600 if we have no mandatory reload that could use it. */
4601 if (avoid_return_reg)
4602 {
4603 int do_avoid = 0;
4604 int regno = REGNO (avoid_return_reg);
4605 int nregs
4606 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4607 int r;
4608
4609 for (r = regno; r < regno + nregs; r++)
4610 if (spill_reg_order[r] >= 0)
4611 for (j = 0; j < n_reloads; j++)
4612 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4613 && (reload_in[j] != 0 || reload_out[j] != 0
4614 || reload_secondary_p[j])
4615 &&
4616 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4617 do_avoid = 1;
4618 if (!do_avoid)
4619 avoid_return_reg = 0;
4620 }
4621 #endif /* SMALL_REGISTER_CLASSES */
4622
4623 #if 0 /* Not needed, now that we can always retry without inheritance. */
4624 /* See if we have more mandatory reloads than spill regs.
4625 If so, then we cannot risk optimizations that could prevent
4626 reloads from sharing one spill register.
4627
4628 Since we will try finding a better register than reload_reg_rtx
4629 unless it is equal to reload_in or reload_out, count such reloads. */
4630
4631 {
4632 int tem = 0;
4633 #ifdef SMALL_REGISTER_CLASSES
4634 int tem = (avoid_return_reg != 0);
4635 #endif
4636 for (j = 0; j < n_reloads; j++)
4637 if (! reload_optional[j]
4638 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4639 && (reload_reg_rtx[j] == 0
4640 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4641 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4642 tem++;
4643 if (tem > n_spills)
4644 must_reuse = 1;
4645 }
4646 #endif
4647
4648 #ifdef SMALL_REGISTER_CLASSES
4649 /* Don't use the subroutine call return reg for a reload
4650 if we are supposed to avoid it. */
4651 if (avoid_return_reg)
4652 {
4653 int regno = REGNO (avoid_return_reg);
4654 int nregs
4655 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4656 int r;
4657
4658 for (r = regno; r < regno + nregs; r++)
4659 if (spill_reg_order[r] >= 0)
4660 SET_HARD_REG_BIT (reload_reg_used, r);
4661 }
4662 #endif /* SMALL_REGISTER_CLASSES */
4663
4664 /* In order to be certain of getting the registers we need,
4665 we must sort the reloads into order of increasing register class.
4666 Then our grabbing of reload registers will parallel the process
4667 that provided the reload registers.
4668
4669 Also note whether any of the reloads wants a consecutive group of regs.
4670 If so, record the maximum size of the group desired and what
4671 register class contains all the groups needed by this insn. */
4672
4673 for (j = 0; j < n_reloads; j++)
4674 {
4675 reload_order[j] = j;
4676 reload_spill_index[j] = -1;
4677
4678 reload_mode[j]
4679 = (reload_inmode[j] == VOIDmode
4680 || (GET_MODE_SIZE (reload_outmode[j])
4681 > GET_MODE_SIZE (reload_inmode[j])))
4682 ? reload_outmode[j] : reload_inmode[j];
4683
4684 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4685
4686 if (reload_nregs[j] > 1)
4687 {
4688 max_group_size = MAX (reload_nregs[j], max_group_size);
4689 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4690 }
4691
4692 /* If we have already decided to use a certain register,
4693 don't use it in another way. */
4694 if (reload_reg_rtx[j])
4695 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
4696 reload_when_needed[j], reload_mode[j]);
4697 }
4698
4699 if (n_reloads > 1)
4700 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4701
4702 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4703 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4704 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4705 sizeof reload_inheritance_insn);
4706 bcopy (reload_override_in, save_reload_override_in,
4707 sizeof reload_override_in);
4708 bcopy (reload_spill_index, save_reload_spill_index,
4709 sizeof reload_spill_index);
4710 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4711 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4712 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4713 reload_reg_used_in_op_addr);
4714 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4715 reload_reg_used_in_insn);
4716 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4717 reload_reg_used_in_other_addr);
4718
4719 for (i = 0; i < reload_n_operands; i++)
4720 {
4721 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4722 reload_reg_used_in_output[i]);
4723 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4724 reload_reg_used_in_input[i]);
4725 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4726 reload_reg_used_in_input_addr[i]);
4727 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4728 reload_reg_used_in_output_addr[i]);
4729 }
4730
4731 /* If -O, try first with inheritance, then turning it off.
4732 If not -O, don't do inheritance.
4733 Using inheritance when not optimizing leads to paradoxes
4734 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4735 because one side of the comparison might be inherited. */
4736
4737 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
4738 {
4739 /* Process the reloads in order of preference just found.
4740 Beyond this point, subregs can be found in reload_reg_rtx.
4741
4742 This used to look for an existing reloaded home for all
4743 of the reloads, and only then perform any new reloads.
4744 But that could lose if the reloads were done out of reg-class order
4745 because a later reload with a looser constraint might have an old
4746 home in a register needed by an earlier reload with a tighter constraint.
4747
4748 To solve this, we make two passes over the reloads, in the order
4749 described above. In the first pass we try to inherit a reload
4750 from a previous insn. If there is a later reload that needs a
4751 class that is a proper subset of the class being processed, we must
4752 also allocate a spill register during the first pass.
4753
4754 Then make a second pass over the reloads to allocate any reloads
4755 that haven't been given registers yet. */
4756
4757 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4758
4759 for (j = 0; j < n_reloads; j++)
4760 {
4761 register int r = reload_order[j];
4762
4763 /* Ignore reloads that got marked inoperative. */
4764 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4765 continue;
4766
4767 /* If find_reloads chose a to use reload_in or reload_out as a reload
4768 register, we don't need to chose one. Otherwise, try even if it found
4769 one since we might save an insn if we find the value lying around. */
4770 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4771 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4772 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4773 continue;
4774
4775 #if 0 /* No longer needed for correct operation.
4776 It might give better code, or might not; worth an experiment? */
4777 /* If this is an optional reload, we can't inherit from earlier insns
4778 until we are sure that any non-optional reloads have been allocated.
4779 The following code takes advantage of the fact that optional reloads
4780 are at the end of reload_order. */
4781 if (reload_optional[r] != 0)
4782 for (i = 0; i < j; i++)
4783 if ((reload_out[reload_order[i]] != 0
4784 || reload_in[reload_order[i]] != 0
4785 || reload_secondary_p[reload_order[i]])
4786 && ! reload_optional[reload_order[i]]
4787 && reload_reg_rtx[reload_order[i]] == 0)
4788 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4789 #endif
4790
4791 /* First see if this pseudo is already available as reloaded
4792 for a previous insn. We cannot try to inherit for reloads
4793 that are smaller than the maximum number of registers needed
4794 for groups unless the register we would allocate cannot be used
4795 for the groups.
4796
4797 We could check here to see if this is a secondary reload for
4798 an object that is already in a register of the desired class.
4799 This would avoid the need for the secondary reload register.
4800 But this is complex because we can't easily determine what
4801 objects might want to be loaded via this reload. So let a register
4802 be allocated here. In `emit_reload_insns' we suppress one of the
4803 loads in the case described above. */
4804
4805 if (inheritance)
4806 {
4807 register int regno = -1;
4808 enum machine_mode mode;
4809
4810 if (reload_in[r] == 0)
4811 ;
4812 else if (GET_CODE (reload_in[r]) == REG)
4813 {
4814 regno = REGNO (reload_in[r]);
4815 mode = GET_MODE (reload_in[r]);
4816 }
4817 else if (GET_CODE (reload_in_reg[r]) == REG)
4818 {
4819 regno = REGNO (reload_in_reg[r]);
4820 mode = GET_MODE (reload_in_reg[r]);
4821 }
4822 #if 0
4823 /* This won't work, since REGNO can be a pseudo reg number.
4824 Also, it takes much more hair to keep track of all the things
4825 that can invalidate an inherited reload of part of a pseudoreg. */
4826 else if (GET_CODE (reload_in[r]) == SUBREG
4827 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4828 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4829 #endif
4830
4831 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4832 {
4833 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4834
4835 if (reg_reloaded_contents[i] == regno
4836 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4837 >= GET_MODE_SIZE (mode))
4838 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4839 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4840 spill_regs[i])
4841 && (reload_nregs[r] == max_group_size
4842 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4843 spill_regs[i]))
4844 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
4845 reload_when_needed[r])
4846 && reload_reg_free_before_p (spill_regs[i],
4847 reload_opnum[r],
4848 reload_when_needed[r]))
4849 {
4850 /* If a group is needed, verify that all the subsequent
4851 registers still have their values intact. */
4852 int nr
4853 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4854 int k;
4855
4856 for (k = 1; k < nr; k++)
4857 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4858 != regno)
4859 break;
4860
4861 if (k == nr)
4862 {
4863 int i1;
4864
4865 /* We found a register that contains the
4866 value we need. If this register is the
4867 same as an `earlyclobber' operand of the
4868 current insn, just mark it as a place to
4869 reload from since we can't use it as the
4870 reload register itself. */
4871
4872 for (i1 = 0; i1 < n_earlyclobbers; i1++)
4873 if (reg_overlap_mentioned_for_reload_p
4874 (reg_last_reload_reg[regno],
4875 reload_earlyclobbers[i1]))
4876 break;
4877
4878 if (i1 != n_earlyclobbers
4879 /* Don't really use the inherited spill reg
4880 if we need it wider than we've got it. */
4881 || (GET_MODE_SIZE (reload_mode[r])
4882 > GET_MODE_SIZE (mode)))
4883 reload_override_in[r] = reg_last_reload_reg[regno];
4884 else
4885 {
4886 /* We can use this as a reload reg. */
4887 /* Mark the register as in use for this part of
4888 the insn. */
4889 mark_reload_reg_in_use (spill_regs[i],
4890 reload_opnum[r],
4891 reload_when_needed[r],
4892 reload_mode[r]);
4893 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4894 reload_inherited[r] = 1;
4895 reload_inheritance_insn[r]
4896 = reg_reloaded_insn[i];
4897 reload_spill_index[r] = i;
4898 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
4899 spill_regs[i]);
4900 }
4901 }
4902 }
4903 }
4904 }
4905
4906 /* Here's another way to see if the value is already lying around. */
4907 if (inheritance
4908 && reload_in[r] != 0
4909 && ! reload_inherited[r]
4910 && reload_out[r] == 0
4911 && (CONSTANT_P (reload_in[r])
4912 || GET_CODE (reload_in[r]) == PLUS
4913 || GET_CODE (reload_in[r]) == REG
4914 || GET_CODE (reload_in[r]) == MEM)
4915 && (reload_nregs[r] == max_group_size
4916 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4917 {
4918 register rtx equiv
4919 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
4920 -1, NULL_PTR, 0, reload_mode[r]);
4921 int regno;
4922
4923 if (equiv != 0)
4924 {
4925 if (GET_CODE (equiv) == REG)
4926 regno = REGNO (equiv);
4927 else if (GET_CODE (equiv) == SUBREG)
4928 {
4929 regno = REGNO (SUBREG_REG (equiv));
4930 if (regno < FIRST_PSEUDO_REGISTER)
4931 regno += SUBREG_WORD (equiv);
4932 }
4933 else
4934 abort ();
4935 }
4936
4937 /* If we found a spill reg, reject it unless it is free
4938 and of the desired class. */
4939 if (equiv != 0
4940 && ((spill_reg_order[regno] >= 0
4941 && ! reload_reg_free_before_p (regno, reload_opnum[r],
4942 reload_when_needed[r]))
4943 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4944 regno)))
4945 equiv = 0;
4946
4947 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4948 equiv = 0;
4949
4950 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4951 equiv = 0;
4952
4953 /* We found a register that contains the value we need.
4954 If this register is the same as an `earlyclobber' operand
4955 of the current insn, just mark it as a place to reload from
4956 since we can't use it as the reload register itself. */
4957
4958 if (equiv != 0)
4959 for (i = 0; i < n_earlyclobbers; i++)
4960 if (reg_overlap_mentioned_for_reload_p (equiv,
4961 reload_earlyclobbers[i]))
4962 {
4963 reload_override_in[r] = equiv;
4964 equiv = 0;
4965 break;
4966 }
4967
4968 /* JRV: If the equiv register we have found is explicitly
4969 clobbered in the current insn, mark but don't use, as above. */
4970
4971 if (equiv != 0 && regno_clobbered_p (regno, insn))
4972 {
4973 reload_override_in[r] = equiv;
4974 equiv = 0;
4975 }
4976
4977 /* If we found an equivalent reg, say no code need be generated
4978 to load it, and use it as our reload reg. */
4979 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4980 {
4981 reload_reg_rtx[r] = equiv;
4982 reload_inherited[r] = 1;
4983 /* If it is a spill reg,
4984 mark the spill reg as in use for this insn. */
4985 i = spill_reg_order[regno];
4986 if (i >= 0)
4987 {
4988 mark_reload_reg_in_use (regno, reload_opnum[r],
4989 reload_when_needed[r],
4990 reload_mode[r]);
4991 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno);
4992 }
4993 }
4994 }
4995
4996 /* If we found a register to use already, or if this is an optional
4997 reload, we are done. */
4998 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
4999 continue;
5000
5001 #if 0 /* No longer needed for correct operation. Might or might not
5002 give better code on the average. Want to experiment? */
5003
5004 /* See if there is a later reload that has a class different from our
5005 class that intersects our class or that requires less register
5006 than our reload. If so, we must allocate a register to this
5007 reload now, since that reload might inherit a previous reload
5008 and take the only available register in our class. Don't do this
5009 for optional reloads since they will force all previous reloads
5010 to be allocated. Also don't do this for reloads that have been
5011 turned off. */
5012
5013 for (i = j + 1; i < n_reloads; i++)
5014 {
5015 int s = reload_order[i];
5016
5017 if ((reload_in[s] == 0 && reload_out[s] == 0
5018 && ! reload_secondary_p[s])
5019 || reload_optional[s])
5020 continue;
5021
5022 if ((reload_reg_class[s] != reload_reg_class[r]
5023 && reg_classes_intersect_p (reload_reg_class[r],
5024 reload_reg_class[s]))
5025 || reload_nregs[s] < reload_nregs[r])
5026 break;
5027 }
5028
5029 if (i == n_reloads)
5030 continue;
5031
5032 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5033 #endif
5034 }
5035
5036 /* Now allocate reload registers for anything non-optional that
5037 didn't get one yet. */
5038 for (j = 0; j < n_reloads; j++)
5039 {
5040 register int r = reload_order[j];
5041
5042 /* Ignore reloads that got marked inoperative. */
5043 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5044 continue;
5045
5046 /* Skip reloads that already have a register allocated or are
5047 optional. */
5048 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5049 continue;
5050
5051 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5052 break;
5053 }
5054
5055 /* If that loop got all the way, we have won. */
5056 if (j == n_reloads)
5057 break;
5058
5059 fail:
5060 /* Loop around and try without any inheritance. */
5061 /* First undo everything done by the failed attempt
5062 to allocate with inheritance. */
5063 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5064 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5065 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5066 sizeof reload_inheritance_insn);
5067 bcopy (save_reload_override_in, reload_override_in,
5068 sizeof reload_override_in);
5069 bcopy (save_reload_spill_index, reload_spill_index,
5070 sizeof reload_spill_index);
5071 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5072 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5073 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5074 save_reload_reg_used_in_op_addr);
5075 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5076 save_reload_reg_used_in_insn);
5077 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5078 save_reload_reg_used_in_other_addr);
5079
5080 for (i = 0; i < reload_n_operands; i++)
5081 {
5082 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5083 save_reload_reg_used_in_input[i]);
5084 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5085 save_reload_reg_used_in_output[i]);
5086 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5087 save_reload_reg_used_in_input_addr[i]);
5088 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5089 save_reload_reg_used_in_output_addr[i]);
5090 }
5091 }
5092
5093 /* If we thought we could inherit a reload, because it seemed that
5094 nothing else wanted the same reload register earlier in the insn,
5095 verify that assumption, now that all reloads have been assigned. */
5096
5097 for (j = 0; j < n_reloads; j++)
5098 {
5099 register int r = reload_order[j];
5100
5101 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5102 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5103 reload_opnum[r],
5104 reload_when_needed[r]))
5105 reload_inherited[r] = 0;
5106
5107 /* If we found a better place to reload from,
5108 validate it in the same fashion, if it is a reload reg. */
5109 if (reload_override_in[r]
5110 && (GET_CODE (reload_override_in[r]) == REG
5111 || GET_CODE (reload_override_in[r]) == SUBREG))
5112 {
5113 int regno = true_regnum (reload_override_in[r]);
5114 if (spill_reg_order[regno] >= 0
5115 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5116 reload_when_needed[r]))
5117 reload_override_in[r] = 0;
5118 }
5119 }
5120
5121 /* Now that reload_override_in is known valid,
5122 actually override reload_in. */
5123 for (j = 0; j < n_reloads; j++)
5124 if (reload_override_in[j])
5125 reload_in[j] = reload_override_in[j];
5126
5127 /* If this reload won't be done because it has been cancelled or is
5128 optional and not inherited, clear reload_reg_rtx so other
5129 routines (such as subst_reloads) don't get confused. */
5130 for (j = 0; j < n_reloads; j++)
5131 if (reload_reg_rtx[j] != 0
5132 && ((reload_optional[j] && ! reload_inherited[j])
5133 || (reload_in[j] == 0 && reload_out[j] == 0
5134 && ! reload_secondary_p[j])))
5135 {
5136 int regno = true_regnum (reload_reg_rtx[j]);
5137
5138 if (spill_reg_order[regno] >= 0)
5139 clear_reload_reg_in_use (regno, reload_opnum[j],
5140 reload_when_needed[j], reload_mode[j]);
5141 reload_reg_rtx[j] = 0;
5142 }
5143
5144 /* Record which pseudos and which spill regs have output reloads. */
5145 for (j = 0; j < n_reloads; j++)
5146 {
5147 register int r = reload_order[j];
5148
5149 i = reload_spill_index[r];
5150
5151 /* I is nonneg if this reload used one of the spill regs.
5152 If reload_reg_rtx[r] is 0, this is an optional reload
5153 that we opted to ignore. */
5154 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5155 && reload_reg_rtx[r] != 0)
5156 {
5157 register int nregno = REGNO (reload_out[r]);
5158 int nr = 1;
5159
5160 if (nregno < FIRST_PSEUDO_REGISTER)
5161 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5162
5163 while (--nr >= 0)
5164 reg_has_output_reload[nregno + nr] = 1;
5165
5166 if (i >= 0)
5167 {
5168 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5169 while (--nr >= 0)
5170 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5171 }
5172
5173 if (reload_when_needed[r] != RELOAD_OTHER
5174 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5175 && reload_when_needed[r] != RELOAD_FOR_INSN)
5176 abort ();
5177 }
5178 }
5179 }
5180 \f
5181 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5182 reloads of the same item for fear that we might not have enough reload
5183 registers. However, normally they will get the same reload register
5184 and hence actually need not be loaded twice.
5185
5186 Here we check for the most common case of this phenomenon: when we have
5187 a number of reloads for the same object, each of which were allocated
5188 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5189 reload, and is not modified in the insn itself. If we find such,
5190 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5191 This will not increase the number of spill registers needed and will
5192 prevent redundant code. */
5193
5194 #ifdef SMALL_REGISTER_CLASSES
5195
5196 static void
5197 merge_assigned_reloads (insn)
5198 rtx insn;
5199 {
5200 int i, j;
5201
5202 /* Scan all the reloads looking for ones that only load values and
5203 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5204 assigned and not modified by INSN. */
5205
5206 for (i = 0; i < n_reloads; i++)
5207 {
5208 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5209 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5210 || reg_set_p (reload_reg_rtx[i], insn))
5211 continue;
5212
5213 /* Look at all other reloads. Ensure that the only use of this
5214 reload_reg_rtx is in a reload that just loads the same value
5215 as we do. Note that any secondary reloads must be of the identical
5216 class since the values, modes, and result registers are the
5217 same, so we need not do anything with any secondary reloads. */
5218
5219 for (j = 0; j < n_reloads; j++)
5220 {
5221 if (i == j || reload_reg_rtx[j] == 0
5222 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5223 reload_reg_rtx[i]))
5224 continue;
5225
5226 /* If the reload regs aren't exactly the same (e.g, different modes)
5227 or if the values are different, we can't merge anything with this
5228 reload register. */
5229
5230 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5231 || reload_out[j] != 0 || reload_in[j] == 0
5232 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5233 break;
5234 }
5235
5236 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5237 we, in fact, found any matching reloads. */
5238
5239 if (j == n_reloads)
5240 {
5241 for (j = 0; j < n_reloads; j++)
5242 if (i != j && reload_reg_rtx[j] != 0
5243 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5244 {
5245 reload_when_needed[i] = RELOAD_OTHER;
5246 reload_in[j] = 0;
5247 transfer_replacements (i, j);
5248 }
5249
5250 /* If this is now RELOAD_OTHER, look for any reloads that load
5251 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5252 if they were for inputs, RELOAD_OTHER for outputs. Note that
5253 this test is equivalent to looking for reloads for this operand
5254 number. */
5255
5256 if (reload_when_needed[i] == RELOAD_OTHER)
5257 for (j = 0; j < n_reloads; j++)
5258 if (reload_in[j] != 0
5259 && reload_when_needed[i] != RELOAD_OTHER
5260 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5261 reload_in[i]))
5262 reload_when_needed[j]
5263 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5264 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5265 }
5266 }
5267 }
5268 #endif /* SMALL_RELOAD_CLASSES */
5269 \f
5270 /* Output insns to reload values in and out of the chosen reload regs. */
5271
5272 static void
5273 emit_reload_insns (insn)
5274 rtx insn;
5275 {
5276 register int j;
5277 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5278 rtx other_input_address_reload_insns = 0;
5279 rtx other_input_reload_insns = 0;
5280 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5281 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5282 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5283 rtx operand_reload_insns = 0;
5284 rtx following_insn = NEXT_INSN (insn);
5285 rtx before_insn = insn;
5286 int special;
5287 /* Values to be put in spill_reg_store are put here first. */
5288 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5289
5290 for (j = 0; j < reload_n_operands; j++)
5291 input_reload_insns[j] = input_address_reload_insns[j]
5292 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5293
5294 /* If this is a CALL_INSN preceded by USE insns, any reload insns
5295 must go in front of the first USE insn, not in front of INSN. */
5296
5297 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
5298 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
5299 while (GET_CODE (PREV_INSN (before_insn)) == INSN
5300 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
5301 before_insn = PREV_INSN (before_insn);
5302
5303 /* If INSN is followed by any CLOBBER insns made by find_reloads,
5304 put our reloads after them since they may otherwise be
5305 misinterpreted. */
5306
5307 while (GET_CODE (following_insn) == INSN
5308 && GET_MODE (following_insn) == DImode
5309 && GET_CODE (PATTERN (following_insn)) == CLOBBER
5310 && NEXT_INSN (following_insn) != 0)
5311 following_insn = NEXT_INSN (following_insn);
5312
5313 /* Now output the instructions to copy the data into and out of the
5314 reload registers. Do these in the order that the reloads were reported,
5315 since reloads of base and index registers precede reloads of operands
5316 and the operands may need the base and index registers reloaded. */
5317
5318 for (j = 0; j < n_reloads; j++)
5319 {
5320 register rtx old;
5321 rtx oldequiv_reg = 0;
5322 rtx store_insn = 0;
5323
5324 old = reload_in[j];
5325 if (old != 0 && ! reload_inherited[j]
5326 && ! rtx_equal_p (reload_reg_rtx[j], old)
5327 && reload_reg_rtx[j] != 0)
5328 {
5329 register rtx reloadreg = reload_reg_rtx[j];
5330 rtx oldequiv = 0;
5331 enum machine_mode mode;
5332 rtx *where;
5333
5334 /* Determine the mode to reload in.
5335 This is very tricky because we have three to choose from.
5336 There is the mode the insn operand wants (reload_inmode[J]).
5337 There is the mode of the reload register RELOADREG.
5338 There is the intrinsic mode of the operand, which we could find
5339 by stripping some SUBREGs.
5340 It turns out that RELOADREG's mode is irrelevant:
5341 we can change that arbitrarily.
5342
5343 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5344 then the reload reg may not support QImode moves, so use SImode.
5345 If foo is in memory due to spilling a pseudo reg, this is safe,
5346 because the QImode value is in the least significant part of a
5347 slot big enough for a SImode. If foo is some other sort of
5348 memory reference, then it is impossible to reload this case,
5349 so previous passes had better make sure this never happens.
5350
5351 Then consider a one-word union which has SImode and one of its
5352 members is a float, being fetched as (SUBREG:SF union:SI).
5353 We must fetch that as SFmode because we could be loading into
5354 a float-only register. In this case OLD's mode is correct.
5355
5356 Consider an immediate integer: it has VOIDmode. Here we need
5357 to get a mode from something else.
5358
5359 In some cases, there is a fourth mode, the operand's
5360 containing mode. If the insn specifies a containing mode for
5361 this operand, it overrides all others.
5362
5363 I am not sure whether the algorithm here is always right,
5364 but it does the right things in those cases. */
5365
5366 mode = GET_MODE (old);
5367 if (mode == VOIDmode)
5368 mode = reload_inmode[j];
5369
5370 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5371 /* If we need a secondary register for this operation, see if
5372 the value is already in a register in that class. Don't
5373 do this if the secondary register will be used as a scratch
5374 register. */
5375
5376 if (reload_secondary_reload[j] >= 0
5377 && reload_secondary_icode[j] == CODE_FOR_nothing
5378 && optimize)
5379 oldequiv
5380 = find_equiv_reg (old, insn,
5381 reload_reg_class[reload_secondary_reload[j]],
5382 -1, NULL_PTR, 0, mode);
5383 #endif
5384
5385 /* If reloading from memory, see if there is a register
5386 that already holds the same value. If so, reload from there.
5387 We can pass 0 as the reload_reg_p argument because
5388 any other reload has either already been emitted,
5389 in which case find_equiv_reg will see the reload-insn,
5390 or has yet to be emitted, in which case it doesn't matter
5391 because we will use this equiv reg right away. */
5392
5393 if (oldequiv == 0 && optimize
5394 && (GET_CODE (old) == MEM
5395 || (GET_CODE (old) == REG
5396 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5397 && reg_renumber[REGNO (old)] < 0)))
5398 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5399 -1, NULL_PTR, 0, mode);
5400
5401 if (oldequiv)
5402 {
5403 int regno = true_regnum (oldequiv);
5404
5405 /* If OLDEQUIV is a spill register, don't use it for this
5406 if any other reload needs it at an earlier stage of this insn
5407 or at this stage. */
5408 if (spill_reg_order[regno] >= 0
5409 && (! reload_reg_free_p (regno, reload_opnum[j],
5410 reload_when_needed[j])
5411 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5412 reload_when_needed[j])))
5413 oldequiv = 0;
5414
5415 /* If OLDEQUIV is not a spill register,
5416 don't use it if any other reload wants it. */
5417 if (spill_reg_order[regno] < 0)
5418 {
5419 int k;
5420 for (k = 0; k < n_reloads; k++)
5421 if (reload_reg_rtx[k] != 0 && k != j
5422 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5423 oldequiv))
5424 {
5425 oldequiv = 0;
5426 break;
5427 }
5428 }
5429
5430 /* If it is no cheaper to copy from OLDEQUIV into the
5431 reload register than it would be to move from memory,
5432 don't use it. Likewise, if we need a secondary register
5433 or memory. */
5434
5435 if (oldequiv != 0
5436 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5437 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5438 reload_reg_class[j])
5439 >= MEMORY_MOVE_COST (mode)))
5440 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5441 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5442 mode, oldequiv)
5443 != NO_REGS)
5444 #endif
5445 #ifdef SECONDARY_MEMORY_NEEDED
5446 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5447 REGNO_REG_CLASS (regno),
5448 mode)
5449 #endif
5450 ))
5451 oldequiv = 0;
5452 }
5453
5454 if (oldequiv == 0)
5455 oldequiv = old;
5456 else if (GET_CODE (oldequiv) == REG)
5457 oldequiv_reg = oldequiv;
5458 else if (GET_CODE (oldequiv) == SUBREG)
5459 oldequiv_reg = SUBREG_REG (oldequiv);
5460
5461 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5462 then load RELOADREG from OLDEQUIV. */
5463
5464 if (GET_MODE (reloadreg) != mode)
5465 reloadreg = gen_lowpart_common (mode, reloadreg);
5466 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5467 oldequiv = SUBREG_REG (oldequiv);
5468 if (GET_MODE (oldequiv) != VOIDmode
5469 && mode != GET_MODE (oldequiv))
5470 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5471
5472 /* Switch to the right place to emit the reload insns. */
5473 switch (reload_when_needed[j])
5474 {
5475 case RELOAD_OTHER:
5476 where = &other_input_reload_insns;
5477 break;
5478 case RELOAD_FOR_INPUT:
5479 where = &input_reload_insns[reload_opnum[j]];
5480 break;
5481 case RELOAD_FOR_INPUT_ADDRESS:
5482 where = &input_address_reload_insns[reload_opnum[j]];
5483 break;
5484 case RELOAD_FOR_OUTPUT_ADDRESS:
5485 where = &output_address_reload_insns[reload_opnum[j]];
5486 break;
5487 case RELOAD_FOR_OPERAND_ADDRESS:
5488 where = &operand_reload_insns;
5489 break;
5490 case RELOAD_FOR_OTHER_ADDRESS:
5491 where = &other_input_address_reload_insns;
5492 break;
5493 default:
5494 abort ();
5495 }
5496
5497 push_to_sequence (*where);
5498 special = 0;
5499
5500 /* Auto-increment addresses must be reloaded in a special way. */
5501 if (GET_CODE (oldequiv) == POST_INC
5502 || GET_CODE (oldequiv) == POST_DEC
5503 || GET_CODE (oldequiv) == PRE_INC
5504 || GET_CODE (oldequiv) == PRE_DEC)
5505 {
5506 /* We are not going to bother supporting the case where a
5507 incremented register can't be copied directly from
5508 OLDEQUIV since this seems highly unlikely. */
5509 if (reload_secondary_reload[j] >= 0)
5510 abort ();
5511 /* Prevent normal processing of this reload. */
5512 special = 1;
5513 /* Output a special code sequence for this case. */
5514 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5515 }
5516
5517 /* If we are reloading a pseudo-register that was set by the previous
5518 insn, see if we can get rid of that pseudo-register entirely
5519 by redirecting the previous insn into our reload register. */
5520
5521 else if (optimize && GET_CODE (old) == REG
5522 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5523 && dead_or_set_p (insn, old)
5524 /* This is unsafe if some other reload
5525 uses the same reg first. */
5526 && reload_reg_free_before_p (REGNO (reloadreg),
5527 reload_opnum[j],
5528 reload_when_needed[j]))
5529 {
5530 rtx temp = PREV_INSN (insn);
5531 while (temp && GET_CODE (temp) == NOTE)
5532 temp = PREV_INSN (temp);
5533 if (temp
5534 && GET_CODE (temp) == INSN
5535 && GET_CODE (PATTERN (temp)) == SET
5536 && SET_DEST (PATTERN (temp)) == old
5537 /* Make sure we can access insn_operand_constraint. */
5538 && asm_noperands (PATTERN (temp)) < 0
5539 /* This is unsafe if prev insn rejects our reload reg. */
5540 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5541 reloadreg)
5542 /* This is unsafe if operand occurs more than once in current
5543 insn. Perhaps some occurrences aren't reloaded. */
5544 && count_occurrences (PATTERN (insn), old) == 1
5545 /* Don't risk splitting a matching pair of operands. */
5546 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5547 {
5548 /* Store into the reload register instead of the pseudo. */
5549 SET_DEST (PATTERN (temp)) = reloadreg;
5550 /* If these are the only uses of the pseudo reg,
5551 pretend for GDB it lives in the reload reg we used. */
5552 if (reg_n_deaths[REGNO (old)] == 1
5553 && reg_n_sets[REGNO (old)] == 1)
5554 {
5555 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5556 alter_reg (REGNO (old), -1);
5557 }
5558 special = 1;
5559 }
5560 }
5561
5562 /* We can't do that, so output an insn to load RELOADREG. */
5563
5564 if (! special)
5565 {
5566 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5567 rtx second_reload_reg = 0;
5568 enum insn_code icode;
5569
5570 /* If we have a secondary reload, pick up the secondary register
5571 and icode, if any. If OLDEQUIV and OLD are different or
5572 if this is an in-out reload, recompute whether or not we
5573 still need a secondary register and what the icode should
5574 be. If we still need a secondary register and the class or
5575 icode is different, go back to reloading from OLD if using
5576 OLDEQUIV means that we got the wrong type of register. We
5577 cannot have different class or icode due to an in-out reload
5578 because we don't make such reloads when both the input and
5579 output need secondary reload registers. */
5580
5581 if (reload_secondary_reload[j] >= 0)
5582 {
5583 int secondary_reload = reload_secondary_reload[j];
5584 rtx real_oldequiv = oldequiv;
5585 rtx real_old = old;
5586
5587 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5588 and similarly for OLD.
5589 See comments in find_secondary_reload in reload.c. */
5590 if (GET_CODE (oldequiv) == REG
5591 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5592 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5593 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5594
5595 if (GET_CODE (old) == REG
5596 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5597 && reg_equiv_mem[REGNO (old)] != 0)
5598 real_old = reg_equiv_mem[REGNO (old)];
5599
5600 second_reload_reg = reload_reg_rtx[secondary_reload];
5601 icode = reload_secondary_icode[j];
5602
5603 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5604 || (reload_in[j] != 0 && reload_out[j] != 0))
5605 {
5606 enum reg_class new_class
5607 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5608 mode, real_oldequiv);
5609
5610 if (new_class == NO_REGS)
5611 second_reload_reg = 0;
5612 else
5613 {
5614 enum insn_code new_icode;
5615 enum machine_mode new_mode;
5616
5617 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5618 REGNO (second_reload_reg)))
5619 oldequiv = old, real_oldequiv = real_old;
5620 else
5621 {
5622 new_icode = reload_in_optab[(int) mode];
5623 if (new_icode != CODE_FOR_nothing
5624 && ((insn_operand_predicate[(int) new_icode][0]
5625 && ! ((*insn_operand_predicate[(int) new_icode][0])
5626 (reloadreg, mode)))
5627 || (insn_operand_predicate[(int) new_icode][1]
5628 && ! ((*insn_operand_predicate[(int) new_icode][1])
5629 (real_oldequiv, mode)))))
5630 new_icode = CODE_FOR_nothing;
5631
5632 if (new_icode == CODE_FOR_nothing)
5633 new_mode = mode;
5634 else
5635 new_mode = insn_operand_mode[(int) new_icode][2];
5636
5637 if (GET_MODE (second_reload_reg) != new_mode)
5638 {
5639 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5640 new_mode))
5641 oldequiv = old, real_oldequiv = real_old;
5642 else
5643 second_reload_reg
5644 = gen_rtx (REG, new_mode,
5645 REGNO (second_reload_reg));
5646 }
5647 }
5648 }
5649 }
5650
5651 /* If we still need a secondary reload register, check
5652 to see if it is being used as a scratch or intermediate
5653 register and generate code appropriately. If we need
5654 a scratch register, use REAL_OLDEQUIV since the form of
5655 the insn may depend on the actual address if it is
5656 a MEM. */
5657
5658 if (second_reload_reg)
5659 {
5660 if (icode != CODE_FOR_nothing)
5661 {
5662 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5663 second_reload_reg));
5664 special = 1;
5665 }
5666 else
5667 {
5668 /* See if we need a scratch register to load the
5669 intermediate register (a tertiary reload). */
5670 enum insn_code tertiary_icode
5671 = reload_secondary_icode[secondary_reload];
5672
5673 if (tertiary_icode != CODE_FOR_nothing)
5674 {
5675 rtx third_reload_reg
5676 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5677
5678 emit_insn ((GEN_FCN (tertiary_icode)
5679 (second_reload_reg, real_oldequiv,
5680 third_reload_reg)));
5681 }
5682 else
5683 gen_input_reload (second_reload_reg, oldequiv,
5684 reload_opnum[j],
5685 reload_when_needed[j]);
5686
5687 oldequiv = second_reload_reg;
5688 }
5689 }
5690 }
5691 #endif
5692
5693 if (! special)
5694 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5695 reload_when_needed[j]);
5696
5697 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5698 /* We may have to make a REG_DEAD note for the secondary reload
5699 register in the insns we just made. Find the last insn that
5700 mentioned the register. */
5701 if (! special && second_reload_reg
5702 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5703 {
5704 rtx prev;
5705
5706 for (prev = get_last_insn (); prev;
5707 prev = PREV_INSN (prev))
5708 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
5709 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5710 PATTERN (prev)))
5711 {
5712 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5713 second_reload_reg,
5714 REG_NOTES (prev));
5715 break;
5716 }
5717 }
5718 #endif
5719 }
5720
5721 /* End this sequence. */
5722 *where = get_insns ();
5723 end_sequence ();
5724 }
5725
5726 /* Add a note saying the input reload reg
5727 dies in this insn, if anyone cares. */
5728 #ifdef PRESERVE_DEATH_INFO_REGNO_P
5729 if (old != 0
5730 && reload_reg_rtx[j] != old
5731 && reload_reg_rtx[j] != 0
5732 && reload_out[j] == 0
5733 && ! reload_inherited[j]
5734 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5735 {
5736 register rtx reloadreg = reload_reg_rtx[j];
5737
5738 #if 0
5739 /* We can't abort here because we need to support this for sched.c.
5740 It's not terrible to miss a REG_DEAD note, but we should try
5741 to figure out how to do this correctly. */
5742 /* The code below is incorrect for address-only reloads. */
5743 if (reload_when_needed[j] != RELOAD_OTHER
5744 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5745 abort ();
5746 #endif
5747
5748 /* Add a death note to this insn, for an input reload. */
5749
5750 if ((reload_when_needed[j] == RELOAD_OTHER
5751 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5752 && ! dead_or_set_p (insn, reloadreg))
5753 REG_NOTES (insn)
5754 = gen_rtx (EXPR_LIST, REG_DEAD,
5755 reloadreg, REG_NOTES (insn));
5756 }
5757
5758 /* When we inherit a reload, the last marked death of the reload reg
5759 may no longer really be a death. */
5760 if (reload_reg_rtx[j] != 0
5761 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5762 && reload_inherited[j])
5763 {
5764 /* Handle inheriting an output reload.
5765 Remove the death note from the output reload insn. */
5766 if (reload_spill_index[j] >= 0
5767 && GET_CODE (reload_in[j]) == REG
5768 && spill_reg_store[reload_spill_index[j]] != 0
5769 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5770 REG_DEAD, REGNO (reload_reg_rtx[j])))
5771 remove_death (REGNO (reload_reg_rtx[j]),
5772 spill_reg_store[reload_spill_index[j]]);
5773 /* Likewise for input reloads that were inherited. */
5774 else if (reload_spill_index[j] >= 0
5775 && GET_CODE (reload_in[j]) == REG
5776 && spill_reg_store[reload_spill_index[j]] == 0
5777 && reload_inheritance_insn[j] != 0
5778 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
5779 REGNO (reload_reg_rtx[j])))
5780 remove_death (REGNO (reload_reg_rtx[j]),
5781 reload_inheritance_insn[j]);
5782 else
5783 {
5784 rtx prev;
5785
5786 /* We got this register from find_equiv_reg.
5787 Search back for its last death note and get rid of it.
5788 But don't search back too far.
5789 Don't go past a place where this reg is set,
5790 since a death note before that remains valid. */
5791 for (prev = PREV_INSN (insn);
5792 prev && GET_CODE (prev) != CODE_LABEL;
5793 prev = PREV_INSN (prev))
5794 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5795 && dead_or_set_p (prev, reload_reg_rtx[j]))
5796 {
5797 if (find_regno_note (prev, REG_DEAD,
5798 REGNO (reload_reg_rtx[j])))
5799 remove_death (REGNO (reload_reg_rtx[j]), prev);
5800 break;
5801 }
5802 }
5803 }
5804
5805 /* We might have used find_equiv_reg above to choose an alternate
5806 place from which to reload. If so, and it died, we need to remove
5807 that death and move it to one of the insns we just made. */
5808
5809 if (oldequiv_reg != 0
5810 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5811 {
5812 rtx prev, prev1;
5813
5814 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5815 prev = PREV_INSN (prev))
5816 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5817 && dead_or_set_p (prev, oldequiv_reg))
5818 {
5819 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5820 {
5821 for (prev1 = this_reload_insn;
5822 prev1; prev1 = PREV_INSN (prev1))
5823 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
5824 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5825 PATTERN (prev1)))
5826 {
5827 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5828 oldequiv_reg,
5829 REG_NOTES (prev1));
5830 break;
5831 }
5832 remove_death (REGNO (oldequiv_reg), prev);
5833 }
5834 break;
5835 }
5836 }
5837 #endif
5838
5839 /* If we are reloading a register that was recently stored in with an
5840 output-reload, see if we can prove there was
5841 actually no need to store the old value in it. */
5842
5843 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5844 && reload_in[j] != 0
5845 && GET_CODE (reload_in[j]) == REG
5846 #if 0
5847 /* There doesn't seem to be any reason to restrict this to pseudos
5848 and doing so loses in the case where we are copying from a
5849 register of the wrong class. */
5850 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5851 #endif
5852 && spill_reg_store[reload_spill_index[j]] != 0
5853 /* This is unsafe if some other reload uses the same reg first. */
5854 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
5855 reload_opnum[j], reload_when_needed[j])
5856 && dead_or_set_p (insn, reload_in[j])
5857 /* This is unsafe if operand occurs more than once in current
5858 insn. Perhaps some occurrences weren't reloaded. */
5859 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5860 delete_output_reload (insn, j,
5861 spill_reg_store[reload_spill_index[j]]);
5862
5863 /* Input-reloading is done. Now do output-reloading,
5864 storing the value from the reload-register after the main insn
5865 if reload_out[j] is nonzero.
5866
5867 ??? At some point we need to support handling output reloads of
5868 JUMP_INSNs or insns that set cc0. */
5869 old = reload_out[j];
5870 if (old != 0
5871 && reload_reg_rtx[j] != old
5872 && reload_reg_rtx[j] != 0)
5873 {
5874 register rtx reloadreg = reload_reg_rtx[j];
5875 register rtx second_reloadreg = 0;
5876 rtx note, p;
5877 enum machine_mode mode;
5878 int special = 0;
5879
5880 /* An output operand that dies right away does need a reload,
5881 but need not be copied from it. Show the new location in the
5882 REG_UNUSED note. */
5883 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5884 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5885 {
5886 XEXP (note, 0) = reload_reg_rtx[j];
5887 continue;
5888 }
5889 else if (GET_CODE (old) == SCRATCH)
5890 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5891 but we don't want to make an output reload. */
5892 continue;
5893
5894 #if 0
5895 /* Strip off of OLD any size-increasing SUBREGs such as
5896 (SUBREG:SI foo:QI 0). */
5897
5898 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5899 && (GET_MODE_SIZE (GET_MODE (old))
5900 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5901 old = SUBREG_REG (old);
5902 #endif
5903
5904 /* If is a JUMP_INSN, we can't support output reloads yet. */
5905 if (GET_CODE (insn) == JUMP_INSN)
5906 abort ();
5907
5908 push_to_sequence (output_reload_insns[reload_opnum[j]]);
5909
5910 /* Determine the mode to reload in.
5911 See comments above (for input reloading). */
5912
5913 mode = GET_MODE (old);
5914 if (mode == VOIDmode)
5915 {
5916 /* VOIDmode should never happen for an output. */
5917 if (asm_noperands (PATTERN (insn)) < 0)
5918 /* It's the compiler's fault. */
5919 abort ();
5920 error_for_asm (insn, "output operand is constant in `asm'");
5921 /* Prevent crash--use something we know is valid. */
5922 mode = word_mode;
5923 old = gen_rtx (REG, mode, REGNO (reloadreg));
5924 }
5925
5926 if (GET_MODE (reloadreg) != mode)
5927 reloadreg = gen_lowpart_common (mode, reloadreg);
5928
5929 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5930
5931 /* If we need two reload regs, set RELOADREG to the intermediate
5932 one, since it will be stored into OUT. We might need a secondary
5933 register only for an input reload, so check again here. */
5934
5935 if (reload_secondary_reload[j] >= 0)
5936 {
5937 rtx real_old = old;
5938
5939 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5940 && reg_equiv_mem[REGNO (old)] != 0)
5941 real_old = reg_equiv_mem[REGNO (old)];
5942
5943 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5944 mode, real_old)
5945 != NO_REGS))
5946 {
5947 second_reloadreg = reloadreg;
5948 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
5949
5950 /* See if RELOADREG is to be used as a scratch register
5951 or as an intermediate register. */
5952 if (reload_secondary_icode[j] != CODE_FOR_nothing)
5953 {
5954 emit_insn ((GEN_FCN (reload_secondary_icode[j])
5955 (real_old, second_reloadreg, reloadreg)));
5956 special = 1;
5957 }
5958 else
5959 {
5960 /* See if we need both a scratch and intermediate reload
5961 register. */
5962 int secondary_reload = reload_secondary_reload[j];
5963 enum insn_code tertiary_icode
5964 = reload_secondary_icode[secondary_reload];
5965 rtx pat;
5966
5967 if (GET_MODE (reloadreg) != mode)
5968 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5969
5970 if (tertiary_icode != CODE_FOR_nothing)
5971 {
5972 rtx third_reloadreg
5973 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5974 pat = (GEN_FCN (tertiary_icode)
5975 (reloadreg, second_reloadreg, third_reloadreg));
5976 }
5977 #ifdef SECONDARY_MEMORY_NEEDED
5978 /* If we need a memory location to do the move, do it that way. */
5979 else if (GET_CODE (reloadreg) == REG
5980 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
5981 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
5982 REGNO_REG_CLASS (REGNO (second_reloadreg)),
5983 GET_MODE (second_reloadreg)))
5984 {
5985 /* Get the memory to use and rewrite both registers
5986 to its mode. */
5987 rtx loc
5988 = get_secondary_mem (reloadreg,
5989 GET_MODE (second_reloadreg),
5990 reload_opnum[j],
5991 reload_when_needed[j]);
5992 rtx tmp_reloadreg;
5993
5994 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
5995 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
5996 REGNO (second_reloadreg));
5997
5998 if (GET_MODE (loc) != GET_MODE (reloadreg))
5999 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6000 REGNO (reloadreg));
6001 else
6002 tmp_reloadreg = reloadreg;
6003
6004 emit_move_insn (loc, second_reloadreg);
6005 pat = gen_move_insn (tmp_reloadreg, loc);
6006 }
6007 #endif
6008 else
6009 pat = gen_move_insn (reloadreg, second_reloadreg);
6010
6011 emit_insn (pat);
6012 }
6013 }
6014 }
6015 #endif
6016
6017 /* Output the last reload insn. */
6018 if (! special)
6019 {
6020 #ifdef SECONDARY_MEMORY_NEEDED
6021 /* If we need a memory location to do the move, do it that way. */
6022 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6023 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6024 REGNO_REG_CLASS (REGNO (reloadreg)),
6025 GET_MODE (reloadreg)))
6026 {
6027 /* Get the memory to use and rewrite both registers to
6028 its mode. */
6029 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6030 reload_opnum[j],
6031 reload_when_needed[j]);
6032
6033 if (GET_MODE (loc) != GET_MODE (reloadreg))
6034 reloadreg = gen_rtx (REG, GET_MODE (loc),
6035 REGNO (reloadreg));
6036
6037 if (GET_MODE (loc) != GET_MODE (old))
6038 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6039
6040 emit_insn (gen_move_insn (loc, reloadreg));
6041 emit_insn (gen_move_insn (old, loc));
6042 }
6043 else
6044 #endif
6045 emit_insn (gen_move_insn (old, reloadreg));
6046 }
6047
6048 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6049 /* If final will look at death notes for this reg,
6050 put one on the last output-reload insn to use it. Similarly
6051 for any secondary register. */
6052 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6053 for (p = get_last_insn (); p; p = PREV_INSN (p))
6054 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6055 && reg_overlap_mentioned_for_reload_p (reloadreg,
6056 PATTERN (p)))
6057 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6058 reloadreg, REG_NOTES (p));
6059
6060 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6061 if (! special
6062 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6063 for (p = get_last_insn (); p; p = PREV_INSN (p))
6064 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6065 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6066 PATTERN (p)))
6067 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6068 second_reloadreg, REG_NOTES (p));
6069 #endif
6070 #endif
6071 /* Look at all insns we emitted, just to be safe. */
6072 for (p = get_insns (); p; p = NEXT_INSN (p))
6073 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6074 {
6075 /* If this output reload doesn't come from a spill reg,
6076 clear any memory of reloaded copies of the pseudo reg.
6077 If this output reload comes from a spill reg,
6078 reg_has_output_reload will make this do nothing. */
6079 note_stores (PATTERN (p), forget_old_reloads_1);
6080
6081 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6082 store_insn = p;
6083 }
6084
6085 output_reload_insns[reload_opnum[j]] = get_insns ();
6086 end_sequence ();
6087
6088 }
6089
6090 if (reload_spill_index[j] >= 0)
6091 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6092 }
6093
6094 /* Now write all the insns we made for reloads in the order expected by
6095 the allocation functions. Prior to the insn being reloaded, we write
6096 the following reloads:
6097
6098 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6099
6100 RELOAD_OTHER reloads.
6101
6102 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6103 the RELOAD_FOR_INPUT reload for the operand.
6104
6105 RELOAD_FOR_OPERAND_ADDRESS reloads.
6106
6107 After the insn being reloaded, we write the following:
6108
6109 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6110 the RELOAD_FOR_OUTPUT reload for that operand. */
6111
6112 emit_insns_before (other_input_address_reload_insns, before_insn);
6113 emit_insns_before (other_input_reload_insns, before_insn);
6114
6115 for (j = 0; j < reload_n_operands; j++)
6116 {
6117 emit_insns_before (input_address_reload_insns[j], before_insn);
6118 emit_insns_before (input_reload_insns[j], before_insn);
6119 }
6120
6121 emit_insns_before (operand_reload_insns, before_insn);
6122
6123 for (j = 0; j < reload_n_operands; j++)
6124 {
6125 emit_insns_before (output_address_reload_insns[j], following_insn);
6126 emit_insns_before (output_reload_insns[j], following_insn);
6127 }
6128
6129 /* Move death notes from INSN
6130 to output-operand-address and output reload insns. */
6131 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6132 {
6133 rtx insn1;
6134 /* Loop over those insns, last ones first. */
6135 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6136 insn1 = PREV_INSN (insn1))
6137 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6138 {
6139 rtx source = SET_SRC (PATTERN (insn1));
6140 rtx dest = SET_DEST (PATTERN (insn1));
6141
6142 /* The note we will examine next. */
6143 rtx reg_notes = REG_NOTES (insn);
6144 /* The place that pointed to this note. */
6145 rtx *prev_reg_note = &REG_NOTES (insn);
6146
6147 /* If the note is for something used in the source of this
6148 reload insn, or in the output address, move the note. */
6149 while (reg_notes)
6150 {
6151 rtx next_reg_notes = XEXP (reg_notes, 1);
6152 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6153 && GET_CODE (XEXP (reg_notes, 0)) == REG
6154 && ((GET_CODE (dest) != REG
6155 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6156 dest))
6157 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6158 source)))
6159 {
6160 *prev_reg_note = next_reg_notes;
6161 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6162 REG_NOTES (insn1) = reg_notes;
6163 }
6164 else
6165 prev_reg_note = &XEXP (reg_notes, 1);
6166
6167 reg_notes = next_reg_notes;
6168 }
6169 }
6170 }
6171 #endif
6172
6173 /* For all the spill regs newly reloaded in this instruction,
6174 record what they were reloaded from, so subsequent instructions
6175 can inherit the reloads.
6176
6177 Update spill_reg_store for the reloads of this insn.
6178 Copy the elements that were updated in the loop above. */
6179
6180 for (j = 0; j < n_reloads; j++)
6181 {
6182 register int r = reload_order[j];
6183 register int i = reload_spill_index[r];
6184
6185 /* I is nonneg if this reload used one of the spill regs.
6186 If reload_reg_rtx[r] is 0, this is an optional reload
6187 that we opted to ignore.
6188
6189 Also ignore reloads that don't reach the end of the insn,
6190 since we will eventually see the one that does. */
6191
6192 if (i >= 0 && reload_reg_rtx[r] != 0
6193 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6194 reload_when_needed[r]))
6195 {
6196 /* First, clear out memory of what used to be in this spill reg.
6197 If consecutive registers are used, clear them all. */
6198 int nr
6199 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6200 int k;
6201
6202 for (k = 0; k < nr; k++)
6203 {
6204 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6205 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6206 }
6207
6208 /* Maybe the spill reg contains a copy of reload_out. */
6209 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6210 {
6211 register int nregno = REGNO (reload_out[r]);
6212 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6213 : HARD_REGNO_NREGS (nregno,
6214 GET_MODE (reload_reg_rtx[r])));
6215
6216 spill_reg_store[i] = new_spill_reg_store[i];
6217 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6218
6219 /* If NREGNO is a hard register, it may occupy more than
6220 one register. If it does, say what is in the
6221 rest of the registers assuming that both registers
6222 agree on how many words the object takes. If not,
6223 invalidate the subsequent registers. */
6224
6225 if (nregno < FIRST_PSEUDO_REGISTER)
6226 for (k = 1; k < nnr; k++)
6227 reg_last_reload_reg[nregno + k]
6228 = (nr == nnr ? gen_rtx (REG, word_mode,
6229 REGNO (reload_reg_rtx[r]) + k)
6230 : 0);
6231
6232 /* Now do the inverse operation. */
6233 for (k = 0; k < nr; k++)
6234 {
6235 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6236 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6237 : nregno + k);
6238 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6239 }
6240 }
6241
6242 /* Maybe the spill reg contains a copy of reload_in. Only do
6243 something if there will not be an output reload for
6244 the register being reloaded. */
6245 else if (reload_out[r] == 0
6246 && reload_in[r] != 0
6247 && ((GET_CODE (reload_in[r]) == REG
6248 && ! reg_has_output_reload[REGNO (reload_in[r])]
6249 || (GET_CODE (reload_in_reg[r]) == REG
6250 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6251 {
6252 register int nregno;
6253 int nnr;
6254
6255 if (GET_CODE (reload_in[r]) == REG)
6256 nregno = REGNO (reload_in[r]);
6257 else
6258 nregno = REGNO (reload_in_reg[r]);
6259
6260 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6261 : HARD_REGNO_NREGS (nregno,
6262 GET_MODE (reload_reg_rtx[r])));
6263
6264 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6265
6266 if (nregno < FIRST_PSEUDO_REGISTER)
6267 for (k = 1; k < nnr; k++)
6268 reg_last_reload_reg[nregno + k]
6269 = (nr == nnr ? gen_rtx (REG, word_mode,
6270 REGNO (reload_reg_rtx[r]) + k)
6271 : 0);
6272
6273 /* Unless we inherited this reload, show we haven't
6274 recently done a store. */
6275 if (! reload_inherited[r])
6276 spill_reg_store[i] = 0;
6277
6278 for (k = 0; k < nr; k++)
6279 {
6280 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6281 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6282 : nregno + k);
6283 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6284 = insn;
6285 }
6286 }
6287 }
6288
6289 /* The following if-statement was #if 0'd in 1.34 (or before...).
6290 It's reenabled in 1.35 because supposedly nothing else
6291 deals with this problem. */
6292
6293 /* If a register gets output-reloaded from a non-spill register,
6294 that invalidates any previous reloaded copy of it.
6295 But forget_old_reloads_1 won't get to see it, because
6296 it thinks only about the original insn. So invalidate it here. */
6297 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6298 {
6299 register int nregno = REGNO (reload_out[r]);
6300 reg_last_reload_reg[nregno] = 0;
6301 }
6302 }
6303 }
6304 \f
6305 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6306 operand OPNUM with reload type TYPE.
6307
6308 Returns first insn emitted. */
6309
6310 rtx
6311 gen_input_reload (reloadreg, in, opnum, type)
6312 rtx reloadreg;
6313 rtx in;
6314 int opnum;
6315 enum reload_type type;
6316 {
6317 rtx last = get_last_insn ();
6318
6319 /* How to do this reload can get quite tricky. Normally, we are being
6320 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6321 register that didn't get a hard register. In that case we can just
6322 call emit_move_insn.
6323
6324 We can also be asked to reload a PLUS that adds either two registers, or
6325 a register and a constant or MEM, or a MEM and a constant. This can
6326 occur during frame pointer elimination and while reloading addresses.
6327 This case is handled by trying to emit a single insn
6328 to perform the add. If it is not valid, we use a two insn sequence.
6329
6330 Finally, we could be called to handle an 'o' constraint by putting
6331 an address into a register. In that case, we first try to do this
6332 with a named pattern of "reload_load_address". If no such pattern
6333 exists, we just emit a SET insn and hope for the best (it will normally
6334 be valid on machines that use 'o').
6335
6336 This entire process is made complex because reload will never
6337 process the insns we generate here and so we must ensure that
6338 they will fit their constraints and also by the fact that parts of
6339 IN might be being reloaded separately and replaced with spill registers.
6340 Because of this, we are, in some sense, just guessing the right approach
6341 here. The one listed above seems to work.
6342
6343 ??? At some point, this whole thing needs to be rethought. */
6344
6345 if (GET_CODE (in) == PLUS
6346 && ((GET_CODE (XEXP (in, 0)) == REG
6347 && (GET_CODE (XEXP (in, 1)) == REG
6348 || CONSTANT_P (XEXP (in, 1))
6349 || GET_CODE (XEXP (in, 1)) == MEM))
6350 || (GET_CODE (XEXP (in, 0)) == MEM
6351 && CONSTANT_P (XEXP (in, 1)))))
6352 {
6353 /* We need to compute the sum of what is either a register and a
6354 constant, a register and memory, a hard register and a pseudo
6355 register, or memory and a constant and put it into the reload
6356 register. The best possible way of doing this is if the machine
6357 has a three-operand ADD insn that accepts the required operands.
6358
6359 The simplest approach is to try to generate such an insn and see if it
6360 is recognized and matches its constraints. If so, it can be used.
6361
6362 It might be better not to actually emit the insn unless it is valid,
6363 but we need to pass the insn as an operand to `recog' and
6364 `insn_extract' and it is simpler to emit and then delete the insn if
6365 not valid than to dummy things up. */
6366
6367 rtx op0, op1, tem, insn;
6368 int code;
6369
6370 op0 = find_replacement (&XEXP (in, 0));
6371 op1 = find_replacement (&XEXP (in, 1));
6372
6373 /* Since constraint checking is strict, commutativity won't be
6374 checked, so we need to do that here to avoid spurious failure
6375 if the add instruction is two-address and the second operand
6376 of the add is the same as the reload reg, which is frequently
6377 the case. If the insn would be A = B + A, rearrange it so
6378 it will be A = A + B as constrain_operands expects. */
6379
6380 if (GET_CODE (XEXP (in, 1)) == REG
6381 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6382 tem = op0, op0 = op1, op1 = tem;
6383
6384 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6385 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6386
6387 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6388 code = recog_memoized (insn);
6389
6390 if (code >= 0)
6391 {
6392 insn_extract (insn);
6393 /* We want constrain operands to treat this insn strictly in
6394 its validity determination, i.e., the way it would after reload
6395 has completed. */
6396 if (constrain_operands (code, 1))
6397 return insn;
6398 }
6399
6400 delete_insns_since (last);
6401
6402 /* If that failed, we must use a conservative two-insn sequence.
6403 use move to copy constant, MEM, or pseudo register to the reload
6404 register since "move" will be able to handle an arbitrary operand,
6405 unlike add which can't, in general. Then add the registers.
6406
6407 If there is another way to do this for a specific machine, a
6408 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6409 we emit below. */
6410
6411 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6412 || (GET_CODE (op1) == REG
6413 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6414 tem = op0, op0 = op1, op1 = tem;
6415
6416 emit_insn (gen_move_insn (reloadreg, op0));
6417
6418 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6419 This fixes a problem on the 32K where the stack pointer cannot
6420 be used as an operand of an add insn. */
6421
6422 if (rtx_equal_p (op0, op1))
6423 op1 = reloadreg;
6424
6425 emit_insn (gen_add2_insn (reloadreg, op1));
6426 }
6427
6428 #ifdef SECONDARY_MEMORY_NEEDED
6429 /* If we need a memory location to do the move, do it that way. */
6430 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6431 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6432 REGNO_REG_CLASS (REGNO (reloadreg)),
6433 GET_MODE (reloadreg)))
6434 {
6435 /* Get the memory to use and rewrite both registers to its mode. */
6436 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6437
6438 if (GET_MODE (loc) != GET_MODE (reloadreg))
6439 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6440
6441 if (GET_MODE (loc) != GET_MODE (in))
6442 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6443
6444 emit_insn (gen_move_insn (loc, in));
6445 emit_insn (gen_move_insn (reloadreg, loc));
6446 }
6447 #endif
6448
6449 /* If IN is a simple operand, use gen_move_insn. */
6450 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6451 emit_insn (gen_move_insn (reloadreg, in));
6452
6453 #ifdef HAVE_reload_load_address
6454 else if (HAVE_reload_load_address)
6455 emit_insn (gen_reload_load_address (reloadreg, in));
6456 #endif
6457
6458 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6459 else
6460 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6461
6462 /* Return the first insn emitted.
6463 We can not just return get_last_insn, because there may have
6464 been multiple instructions emitted. Also note that gen_move_insn may
6465 emit more than one insn itself, so we can not assume that there is one
6466 insn emitted per emit_insn_before call. */
6467
6468 return last ? NEXT_INSN (last) : get_insns ();
6469 }
6470 \f
6471 /* Delete a previously made output-reload
6472 whose result we now believe is not needed.
6473 First we double-check.
6474
6475 INSN is the insn now being processed.
6476 OUTPUT_RELOAD_INSN is the insn of the output reload.
6477 J is the reload-number for this insn. */
6478
6479 static void
6480 delete_output_reload (insn, j, output_reload_insn)
6481 rtx insn;
6482 int j;
6483 rtx output_reload_insn;
6484 {
6485 register rtx i1;
6486
6487 /* Get the raw pseudo-register referred to. */
6488
6489 rtx reg = reload_in[j];
6490 while (GET_CODE (reg) == SUBREG)
6491 reg = SUBREG_REG (reg);
6492
6493 /* If the pseudo-reg we are reloading is no longer referenced
6494 anywhere between the store into it and here,
6495 and no jumps or labels intervene, then the value can get
6496 here through the reload reg alone.
6497 Otherwise, give up--return. */
6498 for (i1 = NEXT_INSN (output_reload_insn);
6499 i1 != insn; i1 = NEXT_INSN (i1))
6500 {
6501 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6502 return;
6503 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6504 && reg_mentioned_p (reg, PATTERN (i1)))
6505 return;
6506 }
6507
6508 if (cannot_omit_stores[REGNO (reg)])
6509 return;
6510
6511 /* If this insn will store in the pseudo again,
6512 the previous store can be removed. */
6513 if (reload_out[j] == reload_in[j])
6514 delete_insn (output_reload_insn);
6515
6516 /* See if the pseudo reg has been completely replaced
6517 with reload regs. If so, delete the store insn
6518 and forget we had a stack slot for the pseudo. */
6519 else if (reg_n_deaths[REGNO (reg)] == 1
6520 && reg_basic_block[REGNO (reg)] >= 0
6521 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6522 {
6523 rtx i2;
6524
6525 /* We know that it was used only between here
6526 and the beginning of the current basic block.
6527 (We also know that the last use before INSN was
6528 the output reload we are thinking of deleting, but never mind that.)
6529 Search that range; see if any ref remains. */
6530 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6531 {
6532 rtx set = single_set (i2);
6533
6534 /* Uses which just store in the pseudo don't count,
6535 since if they are the only uses, they are dead. */
6536 if (set != 0 && SET_DEST (set) == reg)
6537 continue;
6538 if (GET_CODE (i2) == CODE_LABEL
6539 || GET_CODE (i2) == JUMP_INSN)
6540 break;
6541 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6542 && reg_mentioned_p (reg, PATTERN (i2)))
6543 /* Some other ref remains;
6544 we can't do anything. */
6545 return;
6546 }
6547
6548 /* Delete the now-dead stores into this pseudo. */
6549 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6550 {
6551 rtx set = single_set (i2);
6552
6553 if (set != 0 && SET_DEST (set) == reg)
6554 delete_insn (i2);
6555 if (GET_CODE (i2) == CODE_LABEL
6556 || GET_CODE (i2) == JUMP_INSN)
6557 break;
6558 }
6559
6560 /* For the debugging info,
6561 say the pseudo lives in this reload reg. */
6562 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6563 alter_reg (REGNO (reg), -1);
6564 }
6565 }
6566 \f
6567 /* Output reload-insns to reload VALUE into RELOADREG.
6568 VALUE is an autoincrement or autodecrement RTX whose operand
6569 is a register or memory location;
6570 so reloading involves incrementing that location.
6571
6572 INC_AMOUNT is the number to increment or decrement by (always positive).
6573 This cannot be deduced from VALUE. */
6574
6575 static void
6576 inc_for_reload (reloadreg, value, inc_amount)
6577 rtx reloadreg;
6578 rtx value;
6579 int inc_amount;
6580 {
6581 /* REG or MEM to be copied and incremented. */
6582 rtx incloc = XEXP (value, 0);
6583 /* Nonzero if increment after copying. */
6584 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6585 rtx last;
6586 rtx inc;
6587 rtx add_insn;
6588 int code;
6589
6590 /* No hard register is equivalent to this register after
6591 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6592 we could inc/dec that register as well (maybe even using it for
6593 the source), but I'm not sure it's worth worrying about. */
6594 if (GET_CODE (incloc) == REG)
6595 reg_last_reload_reg[REGNO (incloc)] = 0;
6596
6597 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6598 inc_amount = - inc_amount;
6599
6600 inc = GEN_INT (inc_amount);
6601
6602 /* If this is post-increment, first copy the location to the reload reg. */
6603 if (post)
6604 emit_insn (gen_move_insn (reloadreg, incloc));
6605
6606 /* See if we can directly increment INCLOC. Use a method similar to that
6607 in gen_input_reload. */
6608
6609 last = get_last_insn ();
6610 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6611 gen_rtx (PLUS, GET_MODE (incloc),
6612 incloc, inc)));
6613
6614 code = recog_memoized (add_insn);
6615 if (code >= 0)
6616 {
6617 insn_extract (add_insn);
6618 if (constrain_operands (code, 1))
6619 {
6620 /* If this is a pre-increment and we have incremented the value
6621 where it lives, copy the incremented value to RELOADREG to
6622 be used as an address. */
6623
6624 if (! post)
6625 emit_insn (gen_move_insn (reloadreg, incloc));
6626
6627 return;
6628 }
6629 }
6630
6631 delete_insns_since (last);
6632
6633 /* If couldn't do the increment directly, must increment in RELOADREG.
6634 The way we do this depends on whether this is pre- or post-increment.
6635 For pre-increment, copy INCLOC to the reload register, increment it
6636 there, then save back. */
6637
6638 if (! post)
6639 {
6640 emit_insn (gen_move_insn (reloadreg, incloc));
6641 emit_insn (gen_add2_insn (reloadreg, inc));
6642 emit_insn (gen_move_insn (incloc, reloadreg));
6643 }
6644 else
6645 {
6646 /* Postincrement.
6647 Because this might be a jump insn or a compare, and because RELOADREG
6648 may not be available after the insn in an input reload, we must do
6649 the incrementation before the insn being reloaded for.
6650
6651 We have already copied INCLOC to RELOADREG. Increment the copy in
6652 RELOADREG, save that back, then decrement RELOADREG so it has
6653 the original value. */
6654
6655 emit_insn (gen_add2_insn (reloadreg, inc));
6656 emit_insn (gen_move_insn (incloc, reloadreg));
6657 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6658 }
6659
6660 return;
6661 }
6662 \f
6663 /* Return 1 if we are certain that the constraint-string STRING allows
6664 the hard register REG. Return 0 if we can't be sure of this. */
6665
6666 static int
6667 constraint_accepts_reg_p (string, reg)
6668 char *string;
6669 rtx reg;
6670 {
6671 int value = 0;
6672 int regno = true_regnum (reg);
6673 int c;
6674
6675 /* Initialize for first alternative. */
6676 value = 0;
6677 /* Check that each alternative contains `g' or `r'. */
6678 while (1)
6679 switch (c = *string++)
6680 {
6681 case 0:
6682 /* If an alternative lacks `g' or `r', we lose. */
6683 return value;
6684 case ',':
6685 /* If an alternative lacks `g' or `r', we lose. */
6686 if (value == 0)
6687 return 0;
6688 /* Initialize for next alternative. */
6689 value = 0;
6690 break;
6691 case 'g':
6692 case 'r':
6693 /* Any general reg wins for this alternative. */
6694 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6695 value = 1;
6696 break;
6697 default:
6698 /* Any reg in specified class wins for this alternative. */
6699 {
6700 enum reg_class class = REG_CLASS_FROM_LETTER (c);
6701
6702 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
6703 value = 1;
6704 }
6705 }
6706 }
6707 \f
6708 /* Return the number of places FIND appears within X, but don't count
6709 an occurrence if some SET_DEST is FIND. */
6710
6711 static int
6712 count_occurrences (x, find)
6713 register rtx x, find;
6714 {
6715 register int i, j;
6716 register enum rtx_code code;
6717 register char *format_ptr;
6718 int count;
6719
6720 if (x == find)
6721 return 1;
6722 if (x == 0)
6723 return 0;
6724
6725 code = GET_CODE (x);
6726
6727 switch (code)
6728 {
6729 case REG:
6730 case QUEUED:
6731 case CONST_INT:
6732 case CONST_DOUBLE:
6733 case SYMBOL_REF:
6734 case CODE_LABEL:
6735 case PC:
6736 case CC0:
6737 return 0;
6738
6739 case SET:
6740 if (SET_DEST (x) == find)
6741 return count_occurrences (SET_SRC (x), find);
6742 break;
6743 }
6744
6745 format_ptr = GET_RTX_FORMAT (code);
6746 count = 0;
6747
6748 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6749 {
6750 switch (*format_ptr++)
6751 {
6752 case 'e':
6753 count += count_occurrences (XEXP (x, i), find);
6754 break;
6755
6756 case 'E':
6757 if (XVEC (x, i) != NULL)
6758 {
6759 for (j = 0; j < XVECLEN (x, i); j++)
6760 count += count_occurrences (XVECEXP (x, i, j), find);
6761 }
6762 break;
6763 }
6764 }
6765 return count;
6766 }
This page took 0.325345 seconds and 5 git commands to generate.